diff --git a/.github/settings.yml b/.github/settings.yml new file mode 100644 index 0000000000000..4aaa0dd57e4ad --- /dev/null +++ b/.github/settings.yml @@ -0,0 +1,2 @@ +--- +_extends: 'open-source-project-boilerplate' diff --git a/.github/workflows/benchmark-comment.yml b/.github/workflows/benchmark-comment.yml new file mode 100644 index 0000000000000..de86c3c132bb7 --- /dev/null +++ b/.github/workflows/benchmark-comment.yml @@ -0,0 +1,92 @@ +--- +name: Benchmark CLI - Comment + +on: + issue_comment: + types: [created, edited] + +jobs: + comment-handler: + name: Trigger Benchmarks + + runs-on: ubuntu-latest + + steps: + - name: Handle Incoming Comment + env: + DISPATCH_REPO: "benchmarks" + DISPATCH_OWNER: "npm" + EVENT_NAME: ${{ github.event_name }} + EVENT_ACTION: ${{ github.event.action }} + OWNER: ${{ github.event.repository.owner.login }} + REPO: ${{ github.event.repository.name }} + ISSUE_NUMBER: ${{ github.event.issue.number }} + COMMENT_BODY: ${{ github.event.comment.body }} + COMMENT_ID: ${{ github.event.comment.id }} + COMMENT_NODE_ID: ${{ github.event.comment.node_id }} + COMMENT_ACTIONABLE: ${{ startsWith(github.event.comment.body, 'test this please ✅') }} + AUTH_TOKEN: ${{ secrets.NPM_DEPLOY_USER_PAT }} + run: | + # Comment Handler + + # Creates an exit early condition if there are errors + # Exit early if `jq` is not present + set -e + jq --version + + # Figure out if comment came from pull-request or issue + IS_PR=$(curl -s https://api.github.com/repos/${OWNER}/${REPO}/issues/${ISSUE_NUMBER} | jq -cr '.pull_request.url') + + if [ "${IS_PR}" != "null" ]; then + echo "Comment from pull/${ISSUE_NUMBER}." + + # It is a pull-request; check comment body for correct phrase + if [ "${COMMENT_ACTIONABLE}" == "true" ]; then + # Fetch pull-request information + PR_DATA=$(curl -s "${IS_PR}") + PR_OWNER=$(echo "${PR_DATA}" | jq '.head.repo.owner.login') + PR_REPO=$(echo "${PR_DATA}" | jq '.head.repo.name') + PR_COMMIT_SHA=$(curl -s "${IS_PR}/commits" | jq -r '.[0].sha') + + # dispatch request for benchmarks + echo "Dispatching request..." + curl \ + -s \ + -X POST https://api.github.com/repos/${DISPATCH_OWNER}/${DISPATCH_REPO}/dispatches \ + -H "Accept: application/vnd.github.everest-preview+json" \ + -H "Authorization: token ${AUTH_TOKEN}" \ + -d \ + ' + { + "event_type": "'"${EVENT_NAME}"'", + "client_payload": { + "pr_id": "'"${ISSUE_NUMBER}"'", + "repo": "'"${PR_REPO}"'", + "owner": "'"${PR_OWNER}"'", + "commit_sha": "'"${PR_COMMIT_SHA}"'" + } + }' + + # Create reaction on comment to confirm dispatch was sent + curl \ + -s \ + -X POST https://api.github.com/graphql \ + -H "Content-Type: application/json" \ + -H "Authorization: token ${AUTH_TOKEN}" \ + -d \ + ' + { + "query": "mutation($inputData:AddReactionInput!) { addReaction(input:$inputData) { reaction { content } } }", + "variables": { + "inputData": { + "subjectId": "'"${COMMENT_NODE_ID}"'", + "content": "ROCKET" + } + } + }' + else + echo "Comment not actionable." + fi + else + echo "Comment not from pull-request." + fi diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml new file mode 100644 index 0000000000000..0920f711c5aff --- /dev/null +++ b/.github/workflows/benchmark.yml @@ -0,0 +1,68 @@ +--- +name: Benchmark Suite + +on: + push: + branches: + - "latest" + pull_request: + branches: + - "**" + +jobs: + build: + name: Trigger Benchmarks + + runs-on: ubuntu-latest + + steps: + - name: Handle Incoming Pull-Request + env: + DISPATCH_REPO: "benchmarks" + DISPATCH_OWNER: "npm" + EVENT_NAME: ${{ github.event_name }} + EVENT_ACTION: ${{ github.event.action }} + REPO: ${{ github.event.repository.name }} + PR_NUMBER: ${{ github.event.pull_request.number }} + PR_OWNER: ${{ github.event.pull_request.head.repo.owner.login }} + PR_COMMITS_URL: ${{ github.event.pull_request.commits_url }} + PR_COMMIT_SHA: ${{ github.event.pull_request.head.sha }} + AUTH_TOKEN: ${{ secrets.NPM_DEPLOY_USER_PAT }} + run: | + # Dispatch Handler + + dispatch_request () { + echo "Dispatching request..." + REF_SHA=$1 + curl \ + -s \ + -X POST https://api.github.com/repos/${DISPATCH_OWNER}/${DISPATCH_REPO}/dispatches \ + -H "Accept: application/vnd.github.everest-preview+json" \ + -H "Authorization: token ${AUTH_TOKEN}" \ + -d \ + ' + { + "event_type": "'"${EVENT_NAME}"'", + "client_payload": { + "pr_id": "'"${PR_NUMBER}"'", + "repo": "'"${REPO}"'", + "owner": "'"${PR_OWNER}"'", + "commit_sha": "'"${REF_SHA}"'" + } + }' + } + + if [ "${AUTH_TOKEN}" != "" ]; then + if [ "${EVENT_ACTION}" == "opened" ]; then + # Fetch the head commit sha, since it doesn't exist in the body of this event + COMMIT_SHA=$(curl -s "${PR_COMMITS_URL}" | jq -r '.[0].sha') + + # Dispatch request for benchmarks + dispatch_request "${COMMIT_SHA}" + else + # Dispatch request for benchmarks + dispatch_request "${PR_COMMIT_SHA}" + fi + else + echo "NO AUTH - FORK PULL REQUEST" + fi diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000000000..499ad3842d31f --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,51 @@ +name: Node CI + +on: [push] + +jobs: + build: + strategy: + fail-fast: false + matrix: + node-version: [6.x, 8.x, 10.x, 12.x] + os: [ubuntu-latest] + + runs-on: ${{ matrix.os }} + + steps: + # Checkout the npm/cli repo + - uses: actions/checkout@v2 + + # Installs the specific version of Node.js + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + + # Run the installer script + - name: Install dependencies + run: node . install + + # Run the tests + - name: Run Tap tests + run: node . run tap -- "test/tap/*.js" -t600 -Rclassic -c + env: + DEPLOY_VERSION: testing + + # Run coverage check + - name: Run coverage report + if: matrix.os == 'ubuntu-latest' && matrix.node-version == '12.x' + run: node . run tap -- "test/tap/*.js" -t600 -Rclassic -c + env: + DEPLOY_VERSION: testing + COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_OPTIONAL_TOKEN }} + + # - name: Run sudo tests on Linux + # if: matrix.os == 'ubuntu-latest' + # run: sudo PATH=$PATH $(which node) . run tap -- "test/tap/*.js" --coverage --timeout 600 + + - name: Lint + run: node . run lint + + - name: Validate licenses + run: node . run licenses diff --git a/.gitignore b/.gitignore index 065c0438da279..763bb30e749f8 100644 --- a/.gitignore +++ b/.gitignore @@ -8,9 +8,10 @@ npm-debug.log /test/packages/test-package/random-data.txt /test/root /node_modules/.bin -/html/doc/ +/docs/public/ +/docs/.cache/ +/docs/node_modules/ /man/ -/doc/*/npm-index.md /npmrc /release/ /npm-*.tgz @@ -21,3 +22,6 @@ npm-debug.log .jshintrc .eslintrc .nyc_output +/test/npm_cache* +/node_modules/.cache +.DS_Store \ No newline at end of file diff --git a/.licensee.json b/.licensee.json index 820425a9ce80a..5b815d601ee77 100644 --- a/.licensee.json +++ b/.licensee.json @@ -1,6 +1,12 @@ { - "license": "(MIT OR BSD-2-Clause OR BSD-3-Clause OR Apache-2.0 OR ISC OR Unlicense OR CC-BY-3.0 OR CC0-1.0 OR Artistic-2.0)", - "whitelist": { + "licenses": { + "spdx": [ + "CC-BY-3.0" + ], + "blueOak": "bronze" + }, + "corrections": true, + "packages": { "config-chain": "1.1.12", "cyclist": "0.2.2", "json-schema": "0.2.3", diff --git a/.mailmap b/.mailmap index 42c32d0ab4419..725a59da65590 100644 --- a/.mailmap +++ b/.mailmap @@ -2,13 +2,14 @@ Alex K. Wolfe Andrew Bradley Andrew Lunny Arlo Breault -Ashley Williams Ashley Williams +Ashley Williams Benjamin Coe Benjamin Coe Brian White Cedric Nelson Charlie Robbins +Claudia Hernández Dalmais Maxence Danila Gerasimov Dave Galbraith @@ -22,21 +23,23 @@ Evan Lucas Evan Lucas Faiq Raza Forbes Lindesay -Forrest L Norvell +Forrest L Norvell +Forrest L Norvell Gabriel Barros Geoff Flarity Gregers Gram Rygg Ifeanyi Oraelosi -Isaac Z. Schlueter -Isaac Z. Schlueter isaacs +isaacs +isaacs +isaacs Jake Verbaten James Sanders James Treworgy Jason Smith Jed Fox +Jonas Weber Joshua Bennett Joshua Bennett -Jonas Weber Julien Meddah Kat Marchán Kevin Lorenz @@ -50,12 +53,12 @@ Max Goodman Maxim Bogushevich Maximilian Antoni Michael Hayes -Nicolas Morel Misha Kaletsky +Nicolas Morel Olivier Melcher Ra'Shaun Stovall -Rebecca Turner Rebecca Turner +Rebecca Turner Ryan Emery Sam Mikes Sreenivas Alapati diff --git a/.npmignore b/.npmignore index 1b32b033e23a7..c42aaf956257d 100644 --- a/.npmignore +++ b/.npmignore @@ -1,11 +1,13 @@ *.swp .*.swp +netlify.toml npm-debug.log /.github /test node_modules/marked -node_modules/ronn +node_modules/marked-man node_modules/tap +tap-snapshots node_modules/.bin node_modules/npm-registry-mock /npmrc diff --git a/.travis.yml b/.travis.yml index 72b0f9a52db3d..cec3aac226b1c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,38 +1,22 @@ -sudo: false # need to declare the language as well as the matrix below language: node_js -# having top-level `env:` adds a phantom build -# https://github.com/travis-ci/travis-ci/issues/4681 -#env: DEPLOY_VERSION=testing -matrix: - include: - # LTS is our most important target - - node_js: "10" - # DEPLOY_VERSION is used to set the couchapp setup mode for test/tap/registry.js - # only gather coverage info for LTS - env: DEPLOY_VERSION=testing COVERALLS_REPO_TOKEN="$COVERALLS_OPTIONAL_TOKEN" - script: - - "node . run tap-cover -- \"test/tap/*.js\"" - - "unset COVERALLS_REPO_TOKEN ; node . run tap -- \"test/broken-under-*/*.js\"" - # previous LTS is next most important - - node_js: "6" - env: DEPLOY_VERSION=testing - - node_js: "8" - env: DEPLOY_VERSION=testing - - node_js: "9" - env: DEPLOY_VERSION=testing - - node_js: "11" - env: DEPLOY_VERSION=testing - script: - - "npx standard" - - "node . run licenses" - - "node . run tap -- \"test/tap/*.js\" \"test/broken-under-nyc/*.js\"" + +os: + - windows + +node_js: + - 12 + - 10 + - 8 + - 6 + +env: "DEPLOY_VERSION=testing" + notifications: slack: npm-inc:kRqQjto7YbINqHPb1X6nS3g8 -cache: - directories: - - node_modules/.cache + install: - "node . install" + script: - - "node . run tap -- \"test/tap/*.js\" \"test/broken-under-nyc/*.js\"" + - "node . run tap -- \"test/tap/*.js\" -t600 -Rclassic -c" diff --git a/AUTHORS b/AUTHORS index f66afe80f01a1..89270e3c127b4 100644 --- a/AUTHORS +++ b/AUTHORS @@ -1,5 +1,5 @@ # Authors sorted by whether or not they're me -Isaac Z. Schlueter +isaacs Steve Steiner Mikeal Rogers Aaron Blohowiak @@ -612,3 +612,79 @@ Joe Bottigliero Nikolai Vavilov Kelvin Jin 乱序 +Audrey Eschright +Xu Meng +George +Beni von Cheni +Frédéric Harper +Johannes Würbach +ƇʘƁ̆ąƇ́ +Eli Doran +Tobias Koppers +Grey Baker +JT Turner +Audrey Eschright +Alexander Gudulin +Philipp Hagemeister +Amadou Sall +Chris Manson +vlasy +Emilis Dambauskas (Tokenmill) +George Czabania +Jonathan Underwood +Nick Graef +James George +John O'Sullivan +ossdev +Raphael Goulais +COURIER, CALEB [AG/1000] +CalebCourier +Florian Keller +Sreeram Jayan +--get +raywu0123 +Iván Reinoso García +Roy Marples +Robert James Gabriel +John Firebaugh +Kitten King +Claudia Hernández +Artem Sapegin +Márton Salomváry +gall0ws +Olivier Chevet +Maël Nison +Sara Ahbabou +Gareth Jones +Ruy Adorno +Michael Perrotte +Ben Blank +Christian Clauss +Nikita Lebedev +Henrik Gemal +Philip Harrison +Jason Etcovitch +Darcy Clarke +orion +NoDocCat +joan xie +Amal Hussein +Brett Zamir +Menelaos Kotsollaris +Mehdi Hasan Khan +Craig Loewen +Fighting-Jack <574637316@qq.com> +Bakel, Roel van +Charlie West-Toebe <38671683+Hoidberg@users.noreply.github.com> +Richard Lau +Felix Yan +Zhenya Vinogradov +Rafael Hengles +Jan-Philip Gehrcke +Caleb Sacks <16855387+clabe45@users.noreply.github.com> +Kyle Getz +Sean Healy +Netanel Gilad +Dave Nicolson +Ajay Narain Mathur +Vitaliy Markitanov <9357021+vit100@users.noreply.github.com> diff --git a/CHANGELOG.md b/CHANGELOG.md index b575dbeef55da..b25d1532eb651 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,1459 @@ +## 6.14.2 (2020-03-03) + +### DOCUMENTATION +* [`f9248c0be`](https://github.com/npm/cli/commit/f9248c0be63fba37a30098dc9215c752474380e3) [#730](https://github.com/npm/cli/pull/730) chore(docs): update unpublish docs & policy reference ([@nomadtechie](https://github.com/nomadtechie), [@mikemimik](https://github.com/mikemimik)) + +### DEPENDENCIES + +* [`909cc3918`](https://github.com/npm/cli/commit/909cc39180a352f206898481add5772206c8b65f) `hosted-git-info@2.8.8` ([@darcyclarke](https://github.com/darcyclarke)) + * [`5038b18`](https://github.com/npm/hosted-git-info/commit/5038b1891a61ca3cd7453acbf85d7011fe0086bb) fix: regression in old node versions w/ respect to url.URL implmentation +* [`9204ffa58`](https://github.com/npm/cli/commit/9204ffa584c140c5e22b1ee37f6df2c98f5dc70b) `npm-profile@4.0.4` ([@isaacs](https://github.com/isaacs)) + * [`6bcf086`](https://github.com/npm/npm-profile/commit/6bcf0860a3841865099d0115dbcbde8b78109bd9) fix: treat non-http/https login urls as invalid +* [`0365d39bd`](https://github.com/npm/cli/commit/0365d39bdc74960a18caac674f51d0e2a98b31e6) `glob@7.1.6` ([@isaacs](https://github.com/isaacs)) + +## 6.14.1 (2020-02-26) + +* [`303e5c11e`](https://github.com/npm/cli/commit/303e5c11e7db34cf014107aecd2e81c821bfde8d) + `hosted-git-info@2.8.7` + Fixes a regression where scp-style git urls are passed to the WhatWG URL + parser, which does not handle them properly. + ([@isaacs](https://github.com/isaacs)) + +## 6.14.0 (2020-02-25) + +### FEATURES +* [`30f170877`](https://github.com/npm/cli/commit/30f170877954acd036cb234a581e4eb155049b82) [#731](https://github.com/npm/cli/pull/731) add support for multiple funding sources ([@ljharb](https://github.com/ljharb) & [@ruyadorno](hhttps://github.com/ruyadorno/)) + +### BUG FIXES +* [`55916b130`](https://github.com/npm/cli/commit/55916b130ef52984584678f2cc17c15c1f031cb5) [#508](https://github.com/npm/cli/pull/508) fix: check `npm.config` before accessing its members ([@kaiyoma](https://github.com/kaiyoma)) +* [`7d0cd65b2`](https://github.com/npm/cli/commit/7d0cd65b23c0986b631b9b54d87bbe74902cc023) [#733](https://github.com/npm/cli/pull/733) fix: access grant with unscoped packages ([@netanelgilad](https://github.com/netanelgilad)) +* [`28c3d40d6`](https://github.com/npm/cli/commit/28c3d40d65eef63f9d6ccb60b99ac57f5057a46e), [`0769c5b20`](https://github.com/npm/cli/commit/30f170877954acd036cb234a581e4eb155049b82) [#945](https://github.com/npm/cli/pull/945), [#697](https://github.com/npm/cli/pull/697) fix: allow new major versions of node to be automatically considered "supported" ([@isaacs](https://github.com/isaacs), [@ljharb](https://github.com/ljharb)) + +### DEPENDENCIES +* [`6f39e93`](https://github.com/npm/hosted-git-info/commit/6f39e93bae9162663af6f15a9d10bce675dd5de3) `hosted-git-info@2.8.6` ([@darcyclarke](https://github.com/darcyclarke)) + * fix: passwords & usernames are escaped properly in git deps ([@stevenhilder](https://github.com/stevenhilder)) +* [`f14b594ee`](https://github.com/npm/cli/commit/f14b594ee9dbfc98ed0b65c65d904782db4f31ad) `chownr@1.1.4` ([@isaacs](https://github.com/isaacs)) +* [`77044150b`](https://github.com/npm/cli/commit/77044150b763d67d997f9ff108219132ea922678) `npm-packlist@1.4.8` ([@isaacs](https://github.com/isaacs)) +* [`1d112461a`](https://github.com/npm/cli/commit/1d112461ad8dc99e5ff7fabb5177e8c2f89a9755) `npm-registry-fetch@4.0.3` ([@isaacs](https://github.com/isaacs)) + * [`ba8b4fe`](https://github.com/npm/npm-registry-fetch/commit/ba8b4fe60eb6cdf9b39012560aec596eda8ce924) fix: always bypass cache when ?write=true +* [`a47fed760`](https://github.com/npm/cli/commit/a47fed7603a6ed31dcc314c0c573805f05a96830) `readable-stream@3.6.0` + * [`3bbf2d6`](https://github.com/nodejs/readable-stream/commit/3bbf2d6feb45b03f4e46a2ae8251601ad2262121) fix: babel's "loose mode" class transform enbrittles BufferList ([@ljharb](https://github.com/ljharb)) + +### DOCUMENTATION +* [`284c1c055`](https://github.com/npm/cli/commit/284c1c055a28c4b334496101799acefe3c54ceb3), [`fbb5f0e50`](https://github.com/npm/cli/commit/fbb5f0e50e54425119fa3f03c5de93e4cb6bfda7) [#729](https://github.com/npm/cli/pull/729) update lifecycle hooks docs + ([@seanhealy](https://github.com/seanhealy), [@mikemimik](https://github.com/mikemimik)) +* [`1c272832d`](https://github.com/npm/cli/commit/1c272832d048300e409882313305c416dc6f21a2) [#787](https://github.com/npm/cli/pull/787) fix: trademarks typo ([@dnicolson](https://github.com/dnicolson)) +* [`f6ff41776`](https://github.com/npm/cli/commit/f6ff417767d52418cc8c9e7b9731ede2c3916d2e) [#936](https://github.com/npm/cli/pull/936) fix: postinstall example ([@ajaymathur](https://github.com/ajaymathur)) +* [`373224b16`](https://github.com/npm/cli/commit/373224b16e019b7b63d8f0b4c5d4adb7e5cb80dd) [#939](https://github.com/npm/cli/pull/939) fix: bad links in publish docs ([@vit100](https://github.com/vit100)) + +### MISCELLANEOUS +* [`85c79636d`](https://github.com/npm/cli/commit/85c79636df31bac586c0e380c4852ee155a7723c) [#736](https://github.com/npm/cli/pull/736) add script to update dist-tags ([@mikemimik](https://github.com/mikemimik)) + +## 6.13.7 (2020-01-28) + +### BUG FIXES +* [`7dbb91438`](https://github.com/npm/cli/commit/7dbb914382ecd2074fffb7eba81d93262e2d23c6) + [#655](https://github.com/npm/cli/pull/655) + Update CI detection cases + ([@isaacs](https://github.com/isaacs)) + +### DEPENDENCIES +* [`0fb1296c7`](https://github.com/npm/cli/commit/0fb1296c7d6d4bb9e78c96978c433cd65e55c0ea) + `libnpx@10.2.2` + ([@mikemimik](https://github.com/mikemimik)) +* [`c9b69d569`](https://github.com/npm/cli/commit/c9b69d569fec7944375a746e9c08a6fa9bec96ff) + `node-gyp@5.0.7` + ([@mikemimik](https://github.com/mikemimik)) +* [`e8dbaf452`](https://github.com/npm/cli/commit/e8dbaf452a1f6c5350bb0c37059b89a7448e7986) + `bin-links@1.1.7` + ([@mikemimik](https://github.com/mikemimik)) + * [#613](https://github.com/npm/cli/issues/613) Fixes bin entry for package + +## 6.13.6 (2020-01-09) + +### DEPENDENCIES + +* [`6dba897a1`](https://github.com/npm/cli/commit/6dba897a1e2d56388fb6df0c814b0bb85af366b4) + `pacote@9.5.12`: + * [`d2f4176`](https://github.com/npm/pacote/commit/d2f4176b6af393d7e29de27e9b638dbcbab9a0c7) + fix(git): Do not drop uid/gid when executing in root-owned directory + ([@isaacs](https://github.com/isaacs)) + +## 6.13.5 (2020-01-09) + +### BUG FIXES + +* [`fd0a802ec`](https://github.com/npm/cli/commit/fd0a802ec468ec7b98d6c15934c355fef0e7ff60) [#550](https://github.com/npm/cli/pull/550) Fix cache location for `npm ci` ([@zhenyavinogradov](https://github.com/zhenyavinogradov)) +* [`4b30f3cca`](https://github.com/npm/cli/commit/4b30f3ccaebf50d6ab3bad130ff94827c017cc16) [#648](https://github.com/npm/cli/pull/648) fix(version): using 'allow-same-version', git commit --allow-empty and git tag -f ([@rhengles](https://github.com/rhengles)) + +### TESTING + +* [`e16f68d30`](https://github.com/npm/cli/commit/e16f68d30d59ce1ddde9fe62f7681b2c07fce84d) test(ci): add failing cache config test ([@ruyadorno](https://github.com/ruyadorno)) +* [`3f009fbf2`](https://github.com/npm/cli/commit/3f009fbf2c42f68c5127efecc6e22db105a74fe0) [#659](https://github.com/npm/cli/pull/659) test: fix bin-overwriting test on Windows ([@isaacs](https://github.com/isaacs)) +* [`43ae0791f`](https://github.com/npm/cli/commit/43ae0791f74f68e02850201a64a6af693657b241) [#601](https://github.com/npm/cli/pull/601) ci: Allow builds to run even if one fails ([@XhmikosR](https://github.com/XhmikosR)) +* [`4a669bee4`](https://github.com/npm/cli/commit/4a669bee4ac54c70adc6979d45cd0605b6dc33fd) [#603](https://github.com/npm/cli/pull/603) Remove the unused appveyor.yml ([@XhmikosR](https://github.com/XhmikosR)) +* [`9295046ac`](https://github.com/npm/cli/commit/9295046ac92bbe82f4d84e1ec90cc81d3b80bfc7) [#600](https://github.com/npm/cli/pull/600) ci: switch to `actions/checkout@v2` ([@XhmikosR](https://github.com/XhmikosR)) + +### DOCUMENTATION + +* [`f2d770ac7`](https://github.com/npm/cli/commit/f2d770ac768ea84867772b90a3c9acbdd0c1cb6a) [#569](https://github.com/npm/cli/pull/569) fix netlify publish path config ([@claudiahdz](https://github.com/claudiahdz)) +* [`462cf0983`](https://github.com/npm/cli/commit/462cf0983dbc18a3d93f77212ca69f878060b2ec) [#627](https://github.com/npm/cli/pull/627) update gatsby dependencies ([@felixonmars](https://github.com/felixonmars)) +* [`6fb5dbb72`](https://github.com/npm/cli/commit/6fb5dbb7213c4c050c9a47a7d5131447b8b7dcc8) + [#532](https://github.com/npm/cli/pull/532) docs: clarify usage of global prefix ([@jgehrcke](https://github.com/jgehrcke)) + +## 6.13.4 (2019-12-11) + +## BUGFIXES + +* [`320ac9aee`](https://github.com/npm/cli/commit/320ac9aeeafd11bb693c53b31148b8d10c4165e8) + [npm/bin-links#12](https://github.com/npm/bin-links/pull/12) + [npm/gentle-fs#7](https://github.com/npm/gentle-fs/pull/7) + Do not remove global bin/man links inappropriately + ([@isaacs](https://github.com/isaacs)) + +## DEPENDENCIES + +* [`52fd21061`](https://github.com/npm/cli/commit/52fd21061ff8b1a73429294620ffe5ebaaa60d3e) + `gentle-fs@2.3.0` + ([@isaacs](https://github.com/isaacs)) +* [`d06f5c0b0`](https://github.com/npm/cli/commit/d06f5c0b0611c43b6e70ded92af24fa5d83a0f48) + `bin-links@1.1.6` + ([@isaacs](https://github.com/isaacs)) + +## 6.13.3 (2019-12-09) + +### DEPENDENCIES + +* [`19ce061a2`](https://github.com/npm/cli/commit/19ce061a2ee165d8de862c8f0f733c222846b9e1) + `bin-links@1.1.5` Properly normalize, sanitize, and verify `bin` entries + in `package.json`. +* [`59c836aae`](https://github.com/npm/cli/commit/59c836aae8d0104a767e80c540b963c91774012a) + `npm-packlist@1.4.7` +* [`fb4ecd7d2`](https://github.com/npm/cli/commit/fb4ecd7d2810b0b4897daaf081a5e2f3f483b310) + `pacote@9.5.11` + * [`5f33040`](https://github.com/npm/pacote/commit/5f3304028b6985fd380fc77c4840ff12a4898301) + [#476](https://github.com/npm/cli/issues/476) + [npm/pacote#22](https://github.com/npm/pacote/issues/22) + [npm/pacote#14](https://github.com/npm/pacote/issues/14) fix: Do not + drop perms in git when not root ([isaacs](https://github.com/isaacs), + [@darcyclarke](https://github.com/darcyclarke)) + * [`6f229f7`](https://github.com/npm/pacote/6f229f78d9911b4734f0a19c6afdc5454034c759) + sanitize and normalize package bin field + ([isaacs](https://github.com/isaacs)) +* [`1743cb339`](https://github.com/npm/cli/commit/1743cb339767e86431dcd565c7bdb0aed67b293d) + `read-package-json@2.1.1` + + +## 6.13.2 (2019-12-03) + +### BUG FIXES + +* [`4429645b3`](https://github.com/npm/cli/commit/4429645b3538e1cda54d8d1b7ecb3da7a88fdd3c) + [#546](https://github.com/npm/cli/pull/546) + fix docs target typo + ([@richardlau](https://github.com/richardlau)) +* [`867642942`](https://github.com/npm/cli/commit/867642942bec69bb9ab71cff1914fb6a9fe67de8) + [#142](https://github.com/npm/cli/pull/142) + fix(packageRelativePath): fix 'where' for file deps + ([@larsgw](https://github.com/larsgw)) +* [`d480f2c17`](https://github.com/npm/cli/commit/d480f2c176e6976b3cca3565e4c108b599b0379b) + [#527](https://github.com/npm/cli/pull/527) + Revert "windows: Add preliminary WSL support for npm and npx" + ([@craigloewen-msft](https://github.com/craigloewen-msft)) +* [`e4b97962e`](https://github.com/npm/cli/commit/e4b97962e5fce0d49beb541ce5a0f96aee0525de) + [#504](https://github.com/npm/cli/pull/504) + remove unnecessary package.json read when reading shrinkwrap + ([@Lighting-Jack](https://github.com/Lighting-Jack)) +* [`1c65d26ac`](https://github.com/npm/cli/commit/1c65d26ac9f10ac0037094c207d216fbf0e969bf) + [#501](https://github.com/npm/cli/pull/501) + fix(fund): open url for string shorthand + ([@ruyadorno](https://github.com/ruyadorno)) +* [`ae7afe565`](https://github.com/npm/cli/commit/ae7afe56504dbffabf9f73d55b6dac1e3e9fed4a) + [#263](https://github.com/npm/cli/pull/263) + Don't log error message if git tagging is disabled + ([@woppa684](https://github.com/woppa684)) +* [`4c1b16f6a`](https://github.com/npm/cli/commit/4c1b16f6aecaf78956b9335734cfde2ac076ee11) + [#182](https://github.com/npm/cli/pull/182) + Warn the user that it is uninstalling npm-install + ([@Hoidberg](https://github.com/Hoidberg)) + +## 6.13.1 (2019-11-18) + +### BUG FIXES + +* [`938d6124d`](https://github.com/npm/cli/commit/938d6124d6d15d96b5a69d0ae32ef59fceb8ceab) + [#472](https://github.com/npm/cli/pull/472) + fix(fund): support funding string shorthand + ([@ruyadorno](https://github.com/ruyadorno)) +* [`b49c5535b`](https://github.com/npm/cli/commit/b49c5535b7c41729a8d167b035924c3c66b36de0) + [#471](https://github.com/npm/cli/pull/471) + should not publish tap-snapshot folder + ([@ruyadorno](https://github.com/ruyadorno)) +* [`3471d5200`](https://github.com/npm/cli/commit/3471d5200217bfa612b1a262e36c9c043a52eb09) + [#253](https://github.com/npm/cli/pull/253) + Add preliminary WSL support for npm and npx + ([@infinnie](https://github.com/infinnie)) +* [`3ef295f23`](https://github.com/npm/cli/commit/3ef295f23ee1b2300abf13ec19e935c47a455179) + [#486](https://github.com/npm/cli/pull/486) + print quick audit report for human output + ([@isaacs](https://github.com/isaacs)) + +### TESTING + +* [`dbbf977ac`](https://github.com/npm/cli/commit/dbbf977acd1e74bcdec859c562ea4a2bc0536442) + [#278](https://github.com/npm/cli/pull/278) + added workflow to trigger and run benchmarks + ([@mikemimik](https://github.com/mikemimik)) +* [`b4f5e3825`](https://github.com/npm/cli/commit/b4f5e3825535256aaada09c5e8f104570a3d96a4) + [#457](https://github.com/npm/cli/pull/457) + feat(docs): adding tests and updating docs to reflect changes in registry teams API. + ([@nomadtechie](https://github.com/nomadtechie)) +* [`454c7dd60`](https://github.com/npm/cli/commit/454c7dd60c78371bf606f11a17ed0299025bc37c) + [#456](https://github.com/npm/cli/pull/456) + fix git configs for git 2.23 and above + ([@isaacs](https://github.com/isaacs)) + +### DOCUMENTATION + +* [`b8c1576a4`](https://github.com/npm/cli/commit/b8c1576a448566397c721655b95fc90bf202b35a) [`30b013ae8`](https://github.com/npm/cli/commit/30b013ae8eacd04b1b8a41ce2ed0dd50c8ebae25) [`26c1b2ef6`](https://github.com/npm/cli/commit/26c1b2ef6be1595d28d935d35faa8ec72daae544) [`9f943a765`](https://github.com/npm/cli/commit/9f943a765faf6ebb8a442e862b808dbb630e018d) [`c0346b158`](https://github.com/npm/cli/commit/c0346b158fc25ab6ca9954d4dd78d9e62f573a41) [`8e09d5ad6`](https://github.com/npm/cli/commit/8e09d5ad67d4f142241193cecbce61c659389be3) [`4a2f551ee`](https://github.com/npm/cli/commit/4a2f551eeb3285f6f200534da33644789715a41a) [`87d67258c`](https://github.com/npm/cli/commit/87d67258c213d9ea9a49ce1804294a718f08ff13) [`5c3b32722`](https://github.com/npm/cli/commit/5c3b3272234764c8b4d2d798b69af077b5a529c7) [`b150eaeff`](https://github.com/npm/cli/commit/b150eaeff428180bfa03be53fd741d5625897758) [`7555a743c`](https://github.com/npm/cli/commit/7555a743ce4c3146d6245dd63f91503c7f439a6c) [`b89423e2f`](https://github.com/npm/cli/commit/b89423e2f6a09b290b15254e7ff7e8033b434d83) + [#463](https://github.com/npm/cli/pull/463) + [#285](https://github.com/npm/cli/pull/285) + [#268](https://github.com/npm/cli/pull/268) + [#232](https://github.com/npm/cli/pull/232) + [#485](https://github.com/npm/cli/pull/485) + [#453](https://github.com/npm/cli/pull/453) + docs cleanup: typos, styling and content + ([@claudiahdz](https://github.com/claudiahdz)) + ([@XhmikosR](https://github.com/XhmikosR)) + ([@mugli](https://github.com/mugli)) + ([@brettz9](https://github.com/brettz9)) + ([@mkotsollaris](https://github.com/mkotsollaris)) + +### DEPENDENCIES + +* [`661d86cd2`](https://github.com/npm/cli/commit/661d86cd229b14ddf687b7f25a66941a79d233e7) + `make-fetch-happen@5.0.2` + ([@claudiahdz](https://github.com/claudiahdz)) + +## 6.13.0 (2019-11-05) + +### NEW FEATURES + +* [`4414b06d9`](https://github.com/npm/cli/commit/4414b06d944c56bee05ccfb85260055a767ee334) + [#273](https://github.com/npm/cli/pull/273) + add fund command + ([@ruyadorno](https://github.com/ruyadorno)) + +### DOCUMENTATION + +* [`ae4c74d04`](https://github.com/npm/cli/commit/ae4c74d04f820a0255a92bdfe77ecf97af134fae) + [#274](https://github.com/npm/cli/pull/274) + migrate existing docs to gatsby + ([@claudiahdz](https://github.com/claudiahdz)) +* [`4ff1bb180`](https://github.com/npm/cli/commit/4ff1bb180b1db8c72e51b3d57bd4e268b738e049) + [#277](https://github.com/npm/cli/pull/277) + updated documentation copy + ([@oletizi](https://github.com/oletizi)) + +### BUG FIXES + +* [`e4455409f`](https://github.com/npm/cli/commit/e4455409fe6fe9c198b250b488129171f0b4624a) + [#281](https://github.com/npm/cli/pull/281) + delete ps1 files on package removal + ([@NoDocCat](https://github.com/NoDocCat)) +* [`cd14d4701`](https://github.com/npm/cli/commit/cd14d47014e8c96ffd6a18791e8752028b19d637) + [#279](https://github.com/npm/cli/pull/279) + update supported node list to remove v6.0, v6.1, v9.0 - v9.2 + ([@ljharb](https://github.com/ljharb)) + +### DEPENDENCIES + +* [`a37296b20`](https://github.com/npm/cli/commit/a37296b20ca3e19c2bbfa78fedcfe695e03fda69) + `pacote@9.5.9` +* [`d3cb3abe8`](https://github.com/npm/cli/commit/d3cb3abe8cee54bd2624acdcf8043932ef0d660a) + `read-cmd-shim@1.0.5` + +### TESTING + +* [`688cd97be`](https://github.com/npm/cli/commit/688cd97be94ca949719424ff69ff515a68c5caba) + [#272](https://github.com/npm/cli/pull/272) + use github actions for CI + ([@JasonEtco](https://github.com/JasonEtco)) +* [`9a2d8af84`](https://github.com/npm/cli/commit/9a2d8af84f7328f13d8f578cf4b150b9d5f09517) + [#240](https://github.com/npm/cli/pull/240) + Clean up some flakiness and inconsistency + ([@isaacs](https://github.com/isaacs)) + +## 6.12.1 (2019-10-29) + +### BUG FIXES + +* [`6508e833d`](https://github.com/npm/cli/commit/6508e833df35a3caeb2b496f120ce67feff306b6) + [#269](https://github.com/npm/cli/pull/269) + add node v13 as a supported version + ([@ljharb](https://github.com/ljharb)) +* [`b6588a8f7`](https://github.com/npm/cli/commit/b6588a8f74fb8b1ad103060b73c4fd5174b1d1f6) + [#265](https://github.com/npm/cli/pull/265) + Fix regression in lockfile repair for sub-deps + ([@feelepxyz](https://github.com/feelepxyz)) +* [`d5dfe57a1`](https://github.com/npm/cli/commit/d5dfe57a1d810fe7fd64edefc976633ee3a4da53) + [#266](https://github.com/npm/cli/pull/266) + resolve circular dependency in pack.js + ([@addaleax](https://github.com/addaleax)) + +### DEPENDENCIES + +* [`73678bb59`](https://github.com/npm/cli/commit/73678bb590a8633c3bdbf72e08f1279f9e17fd28) + `chownr@1.1.3` +* [`4b76926e2`](https://github.com/npm/cli/commit/4b76926e2058ef30ab1d5e2541bb96d847653417) + `graceful-fs@4.2.3` +* [`c691f36a9`](https://github.com/npm/cli/commit/c691f36a9c108b6267859fe61e4a38228b190c17) + `libcipm@4.0.7` +* [`5e1a14975`](https://github.com/npm/cli/commit/5e1a14975311bfdc43df8e1eb317ae5690ee580c) + `npm-packlist@1.4.6` +* [`c194482d6`](https://github.com/npm/cli/commit/c194482d65ee81a5a0a6281c7a9f984462286c56) + `npm-registry-fetch@4.0.2` +* [`bc6a8e0ec`](https://github.com/npm/cli/commit/bc6a8e0ec966281e49b1dc66f9c641ea661ab7a6) + `tar@4.4.1` +* [`4dcca3cbb`](https://github.com/npm/cli/commit/4dcca3cbb161da1f261095d9cdd26e1fbb536a8d) + `uuid@3.3.3` + +## 6.12.0 (2019-10-08): + +Now `npm ci` runs prepare scripts for git dependencies, and respects the +`--no-optional` argument. Warnings for `engine` mismatches are printed +again. Various other fixes and cleanups. + +### BUG FIXES + +* [`890b245dc`](https://github.com/npm/cli/commit/890b245dc1f609590d8ab993fac7cf5a37ed46a5) + [#252](https://github.com/npm/cli/pull/252) ci: add dirPacker to options + ([@claudiahdz](https://github.com/claudiahdz)) +* [`f3299acd0`](https://github.com/npm/cli/commit/f3299acd0b4249500e940776aca77cc6c0977263) + [#257](https://github.com/npm/cli/pull/257) + [npm.community#4792](https://npm.community/t/engines-and-engines-strict-ignored/4792) + warn message on engine mismatch + ([@ruyadorno](https://github.com/ruyadorno)) +* [`bbc92fb8f`](https://github.com/npm/cli/commit/bbc92fb8f3478ff67071ebaff551f01c1ea42ced) + [#259](https://github.com/npm/cli/pull/259) + [npm.community#10288](https://npm.community/t/npm-token-err-figgypudding-options-cannot-be-modified-use-concat-instead/10288) + Fix figgyPudding error in `npm token` + ([@benblank](https://github.com/benblank)) +* [`70f54dcb5`](https://github.com/npm/cli/commit/70f54dcb5693b301c6b357922b7e8d16b57d8b00) + [#241](https://github.com/npm/cli/pull/241) doctor: Make OK more + consistent ([@gemal](https://github.com/gemal)) + +### FEATURES + +* [`ed993a29c`](https://github.com/npm/cli/commit/ed993a29ccf923425317c433844d55dbea2f23ee) + [#249](https://github.com/npm/cli/pull/249) Add CI environment variables + to user-agent ([@isaacs](https://github.com/isaacs)) +* [`f6b0459a4`](https://github.com/npm/cli/commit/f6b0459a466a2c663dbd549cdc331e7732552dca) + [#248](https://github.com/npm/cli/pull/248) Add option to save + package-lock without formatting Adds a new config + `--format-package-lock`, which defaults to true. + ([@bl00mber](https://github.com/bl00mber)) + +### DEPENDENCIES + +* [`0ca063c5d`](https://github.com/npm/cli/commit/0ca063c5dc961c4aa17373f4b33fb54c51c8c8d6) + `npm-lifecycle@3.1.4`: + - fix: filter functions and undefined out of makeEnv + ([@isaacs](https://github.com/isaacs)) +* [`5df6b0ea2`](https://github.com/npm/cli/commit/5df6b0ea2e3106ba65bba649cc8d7f02f4738236) + `libcipm@4.0.4`: + - fix: pack git directories properly + ([@claudiahdz](https://github.com/claudiahdz)) + - respect no-optional argument + ([@cruzdanilo](https://github.com/cruzdanilo)) +* [`7e04f728c`](https://github.com/npm/cli/commit/7e04f728cc4cd4853a8fc99e2df0a12988897589) + `tar@4.4.12` +* [`5c380e5a3`](https://github.com/npm/cli/commit/5c380e5a33d760bb66a4285b032ae5f50af27199) + `stringify-package@1.0.1` ([@isaacs](https://github.com/isaacs)) +* [`62f2ca692`](https://github.com/npm/cli/commit/62f2ca692ac0c0467ef4cf74f91777a5175258c4) + `node-gyp@5.0.5` ([@isaacs](https://github.com/isaacs)) +* [`0ff0ea47a`](https://github.com/npm/cli/commit/0ff0ea47a8840dd7d952bde7f7983a5016cda8ea) + `npm-install-checks@3.0.2` ([@isaacs](https://github.com/isaacs)) +* [`f46edae94`](https://github.com/npm/cli/commit/f46edae9450b707650a0efab09aa1e9295a18070) + `hosted-git-info@2.8.5` ([@isaacs](https://github.com/isaacs)) + +### TESTING + +* [`44a2b036b`](https://github.com/npm/cli/commit/44a2b036b34324ec85943908264b2e36de5a9435) + [#262](https://github.com/npm/cli/pull/262) fix root-ownership race + conditions in meta-test ([@isaacs](https://github.com/isaacs)) + +## 6.11.3 (2019-09-03): + +Fix npm ci regressions and npm outdated depth. + +### BUG FIXES + +* [`235ed1d28`](https://github.com/npm/cli/commit/235ed1d2838ef302bb995e183980209d16c51b9b) + [#239](https://github.com/npm/cli/pull/239) + Don't override user specified depth in outdated + Restores ability to update packages using `--depth` as suggested by `npm audit`. + ([@G-Rath](https://github.com/G-Rath)) +* [`1fafb5151`](https://github.com/npm/cli/commit/1fafb51513466cd793866b576dfea9a8963a3335) + [#242](https://github.com/npm/cli/pull/242) + [npm.community#9586](https://npm.community/t/6-11-1-some-dependencies-are-no-longer-being-installed/9586/4) + Revert "install: do not descend into directory deps' child modules" + ([@isaacs](https://github.com/isaacs)) +* [`cebf542e6`](https://github.com/npm/cli/commit/cebf542e61dcabdd2bd3b876272bf8eebf7d01cc) + [#243](https://github.com/npm/cli/pull/243) + [npm.community#9720](https://npm.community/t/6-11-2-npm-ci-installs-package-with-wrong-permissions/9720) + ci: pass appropriate configs for file/dir modes + ([@isaacs](https://github.com/isaacs)) + +### DEPENDENCIES + +* [`e5fbb7ed1`](https://github.com/npm/cli/commit/e5fbb7ed1fc7ef5c6ca4790e2d0dc441e0ac1596) + `read-cmd-shim@1.0.4` + ([@claudiahdz](https://github.com/claudiahdz)) +* [`23ce65616`](https://github.com/npm/cli/commit/23ce65616c550647c586f7babc3c2f60115af2aa) + `npm-pick-manifest@3.0.2` + ([@claudiahdz](https://github.com/claudiahdz)) + +## 6.11.2 (2019-08-22): + +Fix a recent Windows regression, and two long-standing Windows bugs. Also, +get CI running on Windows, so these things are less likely in the future. + +### DEPENDENCIES + +* [`9778a1b87`](https://github.com/npm/cli/commit/9778a1b878aaa817af6e99385e7683c2a389570d) + `cmd-shim@3.0.3`: Fix regression where shims fail to preserve exit code + ([@isaacs](https://github.com/isaacs)) +* [`bf93e91d8`](https://github.com/npm/cli/commit/bf93e91d879c816a055d5913e6e4210d7299f299) + `npm-package-arg@6.1.1`: Properly handle git+file: urls on Windows when a + drive letter is included. ([@isaacs](https://github.com/isaacs)) + +### BUGFIXES + +* [`6cc4cc66f`](https://github.com/npm/cli/commit/6cc4cc66f1fb050dc4113e35cab59197fd48e04a) + escape args properly on Windows Bash Despite being bash, Node.js running + on windows git mingw bash still executes child processes using cmd.exe. + As a result, arguments in this environment need to be escaped in the + style of cmd.exe, not bash. ([@isaacs](https://github.com/isaacs)) + +### TESTS + +* [`291aba7b8`](https://github.com/npm/cli/commit/291aba7b821e247b96240b1ec037310ead69a594) + make tests pass on Windows ([@isaacs](https://github.com/isaacs)) +* [`fea3a023a`](https://github.com/npm/cli/commit/fea3a023a80863f32a5f97f5132401b1a16161b8) + travis: run tests on Windows as well + ([@isaacs](https://github.com/isaacs)) + +## 6.11.1 (2019-08-20): + +Fix a regression for windows command shim syntax. + +* [`37db29647`](https://github.com/npm/cli/commit/37db2964710c80003604b7e3c1527d17be7ed3d0) + `cmd-shim@3.0.2` ([@isaacs](https://github.com/isaacs)) + +## v6.11.0 (2019-08-20): + +A few meaty bugfixes, and introducing `peerDependenciesMeta`. + +### FEATURES + +* [`a12341088`](https://github.com/npm/cli/commit/a12341088820c0e7ef6c1c0db3c657f0c2b3943e) + [#224](https://github.com/npm/cli/pull/224) Implements + peerDependenciesMeta ([@arcanis](https://github.com/arcanis)) +* [`2f3b79bba`](https://github.com/npm/cli/commit/2f3b79bbad820fd4a398aa494b19f79b7fd520a1) + [#234](https://github.com/npm/cli/pull/234) add new forbidden 403 error + code ([@claudiahdz](https://github.com/claudiahdz)) + +### BUGFIXES + +* [`24acc9fc8`](https://github.com/npm/cli/commit/24acc9fc89d99d87cc66206c6c6f7cdc82fbf763) + and + [`45772af0d`](https://github.com/npm/cli/commit/45772af0ddca54b658cb2ba2182eec26d0a4729d) + [#217](https://github.com/npm/cli/pull/217) + [npm.community#8863](https://npm.community/t/installing-the-same-module-under-multiple-relative-paths-fails-on-linux/8863) + [npm.community#9327](https://npm.community/t/reinstall-breaks-after-npm-update-to-6-10-2/9327,) + do not descend into directory deps' child modules, fix shrinkwrap files + that inappropriately list child nodes of symlink packages + ([@isaacs](https://github.com/isaacs) and + [@salomvary](https://github.com/salomvary)) +* [`50cfe113d`](https://github.com/npm/cli/commit/50cfe113da5fcc59c1d99b0dcf1050ace45803c7) + [#229](https://github.com/npm/cli/pull/229) fixed typo in semver doc + ([@gall0ws](https://github.com/gall0ws)) +* [`e8fb2a1bd`](https://github.com/npm/cli/commit/e8fb2a1bd9785e0092e9926f4fd65ad431e38452) + [#231](https://github.com/npm/cli/pull/231) Fix spelling mistakes in + CHANGELOG-3.md ([@XhmikosR](https://github.com/XhmikosR)) +* [`769d2e057`](https://github.com/npm/cli/commit/769d2e057daf5a2cbfe0ce86f02550e59825a691) + [npm/uid-number#7](https://github.com/npm/uid-number/issues/7) Better + error on invalid `--user`/`--group` configs. This addresses the issue + when people fail to install binary packages on Docker and other + environments where there is no 'nobody' user. + ([@isaacs](https://github.com/isaacs)) +* [`8b43c9624`](https://github.com/npm/cli/commit/8b43c962498c8e2707527e4fca442d7a4fa51595) + [nodejs/node#28987](https://github.com/nodejs/node/issues/28987) + [npm.community#6032](https://npm.community/t/npm-ci-doesnt-respect-npmrc-variables/6032) + [npm.community#6658](https://npm.community/t/npm-ci-doesnt-fill-anymore-the-process-env-npm-config-cache-variable-on-post-install-scripts/6658) + [npm.community#6069](https://npm.community/t/npm-ci-does-not-compile-native-dependencies-according-to-npmrc-configuration/6069) + [npm.community#9323](https://npm.community/t/npm-6-9-x-not-passing-environment-to-node-gyp-regression-from-6-4-x/9323/2) + Fix the regression where random config values in a .npmrc file are not + passed to lifecycle scripts, breaking build processes which rely on them. + ([@isaacs](https://github.com/isaacs)) +* [`8b85eaa47`](https://github.com/npm/cli/commit/8b85eaa47da3abaacc90fe23162a68cc6e1f0404) + save files with inferred ownership rather than relying on `SUDO_UID` and + `SUDO_GID`. ([@isaacs](https://github.com/isaacs)) +* [`b7f6e5f02`](https://github.com/npm/cli/commit/b7f6e5f0285515087b4614d81db17206524c0fdb) + Infer ownership of shrinkwrap files + ([@isaacs](https://github.com/isaacs)) +* [`54b095d77`](https://github.com/npm/cli/commit/54b095d77b3b131622b3cf4cb5c689aa2dd10b6b) + [#235](https://github.com/npm/cli/pull/235) Add spec to dist-tag remove + function ([@theberbie](https://github.com/theberbie)) + +### DEPENDENCIES + +* [`dc8f9e52f`](https://github.com/npm/cli/commit/dc8f9e52f0bb107c0a6b20cc0c97cbc3b056c1b3) + `pacote@9.5.7`: Infer the ownership of all unpacked files in + `node_modules`, so that we never have user-owned files in root-owned + folders, or root-owned files in user-owned folders. + ([@isaacs](https://github.com/isaacs)) +* [`bb33940c3`](https://github.com/npm/cli/commit/bb33940c32aad61704084e61ebd1bd8e7cacccc8) + `cmd-shim@3.0.0`: + * [`9c93ac3`](https://github.com/npm/cmd-shim/commit/9c93ac39e95b0d6ae852e842e4c5dba5e19687c2) + [#2](https://github.com/npm/cmd-shim/pull/2) + [npm#3380](https://github.com/npm/npm/issues/3380) Handle environment + variables properly ([@basbossink](https://github.com/basbossink)) + * [`2d277f8`](https://github.com/npm/cmd-shim/commit/2d277f8e84d45401747b0b9470058f168b974ad5) + [#25](https://github.com/npm/cmd-shim/pull/25) + [#36](https://github.com/npm/cmd-shim/pull/36) + [#35](https://github.com/npm/cmd-shim/pull/35) Fix 'no shebang' case by + always providing `$basedir` in shell script + ([@igorklopov](https://github.com/igorklopov)) + * [`adaf20b`](https://github.com/npm/cmd-shim/commit/adaf20b7fa2c09c2111a2506c6a3e53ed0831f88) + [#26](https://github.com/npm/cmd-shim/pull/26) Fix `$*` causing an + error when arguments contain parentheses + ([@satazor](https://github.com/satazor)) + * [`49f0c13`](https://github.com/npm/cmd-shim/commit/49f0c1318fd384e0031c3fd43801f0e22e1e555f) + [#30](https://github.com/npm/cmd-shim/pull/30) Fix paths for MSYS/MINGW + bash ([@dscho](https://github.com/dscho)) + * [`51a8af3`](https://github.com/npm/cmd-shim/commit/51a8af30990cb072cb30d67fc1b564b14746bba9) + [#34](https://github.com/npm/cmd-shim/pull/34) Add proper support for + PowerShell ([@ExE-Boss](https://github.com/ExE-Boss)) + * [`4c37e04`](https://github.com/npm/cmd-shim/commit/4c37e048dee672237e8962fdffca28e20e9f976d) + [#10](https://github.com/npm/cmd-shim/issues/10) Work around quoted + batch file names ([@isaacs](https://github.com/isaacs)) +* [`a4e279544`](https://github.com/npm/cli/commit/a4e279544f7983e0adff1e475e3760f1ea85825a) + `npm-lifecycle@3.1.3` ([@isaacs](https://github.com/isaacs)): + * fail properly if `uid-number` raises an error +* [`7086a1809`](https://github.com/npm/cli/commit/7086a1809bbfda9be81344b3949c7d3ac687ffc4) + `libcipm@4.0.3` ([@isaacs](https://github.com/isaacs)) +* [`8845141f9`](https://github.com/npm/cli/commit/8845141f9d7827dae572c8cf26f2c775db905bd3) + `read-package-json@2.1.0` ([@isaacs](https://github.com/isaacs)) +* [`51c028215`](https://github.com/npm/cli/commit/51c02821575d80035ebe853492d110db11a7d1b9) + `bin-links@1.1.3` ([@isaacs](https://github.com/isaacs)) +* [`534a5548c`](https://github.com/npm/cli/commit/534a5548c9ebd59f0dd90e9ccca148ed8946efa6) + `read-cmd-shim@1.0.3` ([@isaacs](https://github.com/isaacs)) +* [`3038f2fd5`](https://github.com/npm/cli/commit/3038f2fd5b1d7dd886ee72798241d8943690f508) + `gentle-fs@2.2.1` ([@isaacs](https://github.com/isaacs)) +* [`a609a1648`](https://github.com/npm/cli/commit/a609a16489f76791697d270b499fd4949ab1f8c3) + `graceful-fs@4.2.2` ([@isaacs](https://github.com/isaacs)) +* [`f0346f754`](https://github.com/npm/cli/commit/f0346f75490619a81b310bfc18646ae5ae2e0ea4) + `cacache@12.0.3` ([@isaacs](https://github.com/isaacs)) +* [`ca9c615c8`](https://github.com/npm/cli/commit/ca9c615c8cff5c7db125735eb09f84d912d18694) + `npm-pick-manifest@3.0.0` ([@isaacs](https://github.com/isaacs)) +* [`b417affbf`](https://github.com/npm/cli/commit/b417affbf7133dc7687fd809e4956a43eae3438a) + `pacote@9.5.8` ([@isaacs](https://github.com/isaacs)) + +### TESTS + +* [`b6df0913c`](https://github.com/npm/cli/commit/b6df0913ca73246f1fa6cfa0e81e34ba5f2b6204) + [#228](https://github.com/npm/cli/pull/228) Proper handing of + /usr/bin/node lifecycle-path test + ([@olivr70](https://github.com/olivr70)) +* [`aaf98e88c`](https://github.com/npm/cli/commit/aaf98e88c78fd6c850d0a3d3ee2f61c02f63bc8c) + `npm-registry-mock@1.3.0` ([@isaacs](https://github.com/isaacs)) + +## v6.10.3 (2019-08-06): + +### BUGFIXES + +* [`27cccfbda`](https://github.com/npm/cli/commit/27cccfbdac8526cc807b07f416355949b1372a9b) + [#223](https://github.com/npm/cli/pull/223) vulns → vulnerabilities in + npm audit output ([@sapegin](https://github.com/sapegin)) +* [`d5e865eb7`](https://github.com/npm/cli/commit/d5e865eb79329665a927cc2767b4395c03045dbb) + [#222](https://github.com/npm/cli/pull/222) + [#226](https://github.com/npm/cli/pull/226) install, doctor: don't crash + if registry unset ([@dmitrydvorkin](https://github.com/dmitrydvorkin), + [@isaacs](https://github.com/isaacs)) +* [`5b3890226`](https://github.com/npm/cli/commit/5b389022652abeb0e1c278a152550eb95bc6c452) + [#227](https://github.com/npm/cli/pull/227) + [npm.community#9167](https://npm.community/t/npm-err-cb-never-called-permission-denied/9167/5) + Handle unhandledRejections, tell user what to do when encountering an + `EACCES` error in the cache. ([@isaacs](https://github.com/isaacs)) + +### DEPENDENCIES + +* [`77516df6e`](https://github.com/npm/cli/commit/77516df6eac94a6d7acb5e9ca06feaa0868d779b) + `licensee@7.0.3` ([@isaacs](https://github.com/isaacs)) +* [`ceb993590`](https://github.com/npm/cli/commit/ceb993590e4e376a9a78264ce7bb4327fbbb37fe) + `query-string@6.8.2` ([@isaacs](https://github.com/isaacs)) +* [`4050b9189`](https://github.com/npm/cli/commit/4050b91898c60e9b22998cf82b70b9b822de592a) + `hosted-git-info@2.8.2` + * [#46](https://github.com/npm/hosted-git-info/issues/46) + [#43](https://github.com/npm/hosted-git-info/issues/43) + [#47](https://github.com/npm/hosted-git-info/pull/47) + [#44](https://github.com/npm/hosted-git-info/pull/44) Add support for + GitLab subgroups ([@mterrel](https://github.com/mterrel), + [@isaacs](https://github.com/isaacs), + [@ybiquitous](https://github.com/ybiquitous)) + * [`3b1d629`](https://github.com/npm/hosted-git-info/commit/3b1d629) + [#48](https://github.com/npm/hosted-git-info/issues/48) fix http + protocol using sshurl by default + ([@fengmk2](https://github.com/fengmk2)) + * [`5d4a8d7`](https://github.com/npm/hosted-git-info/commit/5d4a8d7) + ignore noCommittish on tarball url generation + ([@isaacs](https://github.com/isaacs)) + * [`1692435`](https://github.com/npm/hosted-git-info/commit/1692435) + use gist tarball url that works for anonymous gists + ([@isaacs](https://github.com/isaacs)) + * [`d5cf830`](https://github.com/npm/hosted-git-info/commit/d5cf8309be7af884032616c63ea302ce49dd321c) + Do not allow invalid gist urls ([@isaacs](https://github.com/isaacs)) + * [`e518222`](https://github.com/npm/hosted-git-info/commit/e5182224351183ce619dd5ef00019ae700ed37b7) + Use LRU cache to prevent unbounded memory consumption + ([@iarna](https://github.com/iarna)) + +## v6.10.2 (2019-07-23): + +tl;dr - Fixes several issues with the cache when npm is run as `sudo` on +Unix systems. + +### TESTING + +* [`2a78b96f8`](https://github.com/npm/cli/commit/2a78b96f830bbd834720ccc9eacccc54915ae6f7) + check test cache for root-owned files + ([@isaacs](https://github.com/isaacs)) +* [`108646ebc`](https://github.com/npm/cli/commit/108646ebc12f3eeebaa0a45884c45991a45e57e4) + run sudo tests on Travis-CI ([@isaacs](https://github.com/isaacs)) +* [`cf984e946`](https://github.com/npm/cli/commit/cf984e946f453cbea2fcc7a59608de3f24ab74c3) + set --no-esm tap flag ([@isaacs](https://github.com/isaacs)) +* [`8e0a3100d`](https://github.com/npm/cli/commit/8e0a3100dffb3965bb3dc4240e82980dfadf2f3c) + add script to run tests and leave fixtures for inspection and debugging + ([@isaacs](https://github.com/isaacs)) + +### BUGFIXES + +* [`25f4f73f6`](https://github.com/npm/cli/commit/25f4f73f6dc9744757787c82351120cd1baee5f8) + add a util for writing arbitrary files to cache This prevents metrics + timing and debug logs from becoming root-owned. + ([@isaacs](https://github.com/isaacs)) +* [`2c61ce65d`](https://github.com/npm/cli/commit/2c61ce65d6b67100fdf3fcb9729055b669cb1a1d) + infer cache owner from parent dir in `correct-mkdir` util + ([@isaacs](https://github.com/isaacs)) +* [`235e5d6df`](https://github.com/npm/cli/commit/235e5d6df6f427585ec58425f1f3339d08f39d8a) + ensure correct owner on cached all-packages metadata + ([@isaacs](https://github.com/isaacs)) +* [`e2d377bb6`](https://github.com/npm/cli/commit/e2d377bb6419d8a3c1d80a73dba46062b4dad336) + [npm.community#8540](https://npm.community/t/npm-audit-fails-with-child-requires-fails-because-requires-must-be-an-object/8540) + audit: report server error on failure + ([@isaacs](https://github.com/isaacs)) +* [`52576a39e`](https://github.com/npm/cli/commit/52576a39ed75d94c46bb2c482fd38d2c6ea61c56) + [#216](https://github.com/npm/cli/pull/216) + [npm.community#5385](https://npm.community/t/6-8-0-npm-ci-fails-with-local-dependency/5385) + [npm.community#6076](https://npm.community/t/npm-ci-fail-to-local-packages/6076) + Fix `npm ci` with `file:` dependencies. Partially reverts + [#40](https://github.com/npm/cli/pull/40)/[#86](https://github.com/npm/cli/pull/86), + recording dependencies of linked deps in order for `npm ci` to work. + ([@jfirebaugh](https://github.com/jfirebaugh)) + +### DEPENDENCIES + +* [`0fefdee13`](https://github.com/npm/cli/commit/0fefdee130fd7d0dbb240fb9ecb50a793fbf3d29) + `cacache@12.0.2` ([@isaacs](https://github.com/isaacs)) + * infer uid/gid instead of accepting as options, preventing the + overwhelming majority of cases where root-owned files end up in the + cache folder. + ([ac84d14](https://github.com/npm/cacache/commit/ac84d14)) + ([@isaacs](https://github.com/isaacs)) + ([#1](https://github.com/npm/cacache/pull/1)) + * **i18n:** add another error message + ([676cb32](https://github.com/npm/cacache/commit/676cb32)) + ([@zkat](https://github.com/zkat)) +* [`e1d87a392`](https://github.com/npm/cli/commit/e1d87a392371a070b0788ab7bfc62be18b21e9ad) + `pacote@9.5.4` ([@isaacs](https://github.com/isaacs)) + * git: ensure stream failures are reported + ([7f07b5d](https://github.com/npm/pacote/commit/7f07b5d)) + [#1](https://github.com/npm/pacote/issues/1) + ([@lddubeau](https://github.com/lddubeau)) +* [`3f035bf09`](https://github.com/npm/cli/commit/3f035bf098e2feea76574cec18b04812659aa16d) + `infer-owner@1.0.4` ([@isaacs](https://github.com/isaacs)) +* [`ba3283112`](https://github.com/npm/cli/commit/ba32831126591d2f6f48e31a4a2329b533b1ff19) + `npm-registry-fetch@4.0.0` ([@isaacs](https://github.com/isaacs)) +* [`ee90c334d`](https://github.com/npm/cli/commit/ee90c334d271383d0325af42f20f80f34cb61f07) + `libnpm@3.0.1` ([@isaacs](https://github.com/isaacs)) +* [`1e480c384`](https://github.com/npm/cli/commit/1e480c38416982ae28b5cdd48c698ca59d3c0395) + `libnpmaccess@3.0.2` ([@isaacs](https://github.com/isaacs)) +* [`7662ee850`](https://github.com/npm/cli/commit/7662ee850220c71ecaec639adbc7715286f0d28b) + `libnpmhook@5.0.3` ([@isaacs](https://github.com/isaacs)) +* [`1357fadc6`](https://github.com/npm/cli/commit/1357fadc613d0bfeb40f9a8f3ecace2face2fe2c) + `libnpmorg@1.0.1` ([@isaacs](https://github.com/isaacs)) +* [`a621b5cb6`](https://github.com/npm/cli/commit/a621b5cb6c881f95a11af86a8051754a67ae017c) + `libnpmsearch@2.0.2` ([@isaacs](https://github.com/isaacs)) +* [`560cd31dd`](https://github.com/npm/cli/commit/560cd31dd51b6aa2e396ccdd7289fab0a50b5608) + `libnpmteam@1.0.2` ([@isaacs](https://github.com/isaacs)) +* [`de7ae0867`](https://github.com/npm/cli/commit/de7ae0867d4c0180edc283457ce0b4e8e5eee554) + `npm-profile@4.0.2` ([@isaacs](https://github.com/isaacs)) +* [`e95da463c`](https://github.com/npm/cli/commit/e95da463cb7a325457ef411a569d7ef4bf76901d) + `libnpm@3.0.1` ([@isaacs](https://github.com/isaacs)) +* [`554b641d4`](https://github.com/npm/cli/commit/554b641d49d135ae8d137e83aa288897c32dacc6) + `npm-registry-fetch@4.0.0` ([@isaacs](https://github.com/isaacs)) +* [`06772f34a`](https://github.com/npm/cli/commit/06772f34ab851440dcd78574736936c674a84aed) + `node-gyp@5.0.3` ([@isaacs](https://github.com/isaacs)) +* [`85358db80`](https://github.com/npm/cli/commit/85358db80d6ccb5f7bc9a0b4d558ac6dd2468394) + `npm-lifecycle@3.1.2` ([@isaacs](https://github.com/isaacs)) + * [`051cf20`](https://github.com/npm/npm-lifecycle/commit/051cf20072a01839c17920d2e841756251c4f924) + [#26](https://github.com/npm/npm-lifecycle/pull/26) fix switches for + alternative shells on Windows + ([@gucong3000](https://github.com/gucong3000)) + * [`3aaf954`](https://github.com/npm/npm-lifecycle/commit/3aaf95435965e8f7acfd955582cf85237afd2c9b) + [#25](https://github.com/npm/npm-lifecycle/pull/25) set only one PATH + env variable for child process on Windows + ([@zkochan](https://github.com/zkochan)) + * [`ea18ed2`](https://github.com/npm/npm-lifecycle/commit/ea18ed2b754ca7f11998cad70d88e9004c5bef4a) + [#36](https://github.com/npm/npm-lifecycle/pull/36) + [#11](https://github.com/npm/npm-lifecycle/issue/11) + [#18](https://github.com/npm/npm-lifecycle/issue/18) remove + procInterrupt listener on SIGINT in procError + ([@mattshin](https://github.com/mattshin)) + * [`5523951`](https://github.com/npm/npm-lifecycle/commit/55239519c57b82521605622e6c71640a31ed4586) + [#29](https://github.com/npm/npm-lifecycle/issue/29) + [#30](https://github.com/npm/npm-lifecycle/pull/30) Use platform + specific path casing if present + ([@mattezell](https://github.com/mattezell)) + +## v6.10.1 (2019-07-11): + +### BUGFIXES + +* [`3cbd57712`](https://github.com/npm/cli/commit/3cbd577120a9da6e51bb8b13534d1bf71ea5712c) + fix(git): strip GIT environs when running git + ([@isaacs](https://github.com/isaacs)) +* [`a81a8c4c4`](https://github.com/npm/cli/commit/a81a8c4c466f510215a51cef1bb08544d11844fe) + [#206](https://github.com/npm/cli/pull/206) improve isOnly(Dev,Optional) + ([@larsgw](https://github.com/larsgw)) +* [`172f9aca6`](https://github.com/npm/cli/commit/172f9aca67a66ee303c17f90a994cd52fc66552a) + [#179](https://github.com/npm/cli/pull/179) fix-xmas-underline + ([@raywu0123](https://github.com/raywu0123)) +* [`f52673fc7`](https://github.com/npm/cli/commit/f52673fc7284e58af8c04533e82b76bf7add72cf) + [#212](https://github.com/npm/cli/pull/212) build: use `/usr/bin/env` to + load bash ([@rsmarples](https://github.com/rsmarples)) + +### DEPENDENCIES + +* [`ef4445ad3`](https://github.com/npm/cli/commit/ef4445ad34a53b5639499c8e3c9752f62ee6f37c) + [#208](https://github.com/npm/cli/pull/208) `node-gyp@5.0.2` + ([@irega](https://github.com/irega)) +* [`c0d611356`](https://github.com/npm/cli/commit/c0d611356f7b23077e97574b01c8886e544db425) + `npm-lifecycle@3.0.0` ([@isaacs](https://github.com/isaacs)) +* [`7716ba972`](https://github.com/npm/cli/commit/7716ba9720270d5b780755a5bb1ce79702067f1f) + `libcipm@4.0.0` ([@isaacs](https://github.com/isaacs)) +* [`42d22e837`](https://github.com/npm/cli/commit/42d22e8374c7d303d94e405d7385d94dd2558814) + `libnpm@3.0.0` ([@isaacs](https://github.com/isaacs)) +* [`a2ea7f9ff`](https://github.com/npm/cli/commit/a2ea7f9ff64ae743d05fdbf7d46fb9afafa8aa6f) + `semver@5.7.0` ([@isaacs](https://github.com/isaacs)) +* [`429226a5e`](https://github.com/npm/cli/commit/429226a5e992cd907d4f19bd738037007cf78c1f) + `lru-cache@5.1.1` ([@isaacs](https://github.com/isaacs)) +* [`175670ea6`](https://github.com/npm/cli/commit/175670ea65cca03f8b2e957df3dd4b8b0efd0e1f) + `npm-registry-fetch@3.9.1`: ([@isaacs](https://github.com/isaacs)) +* [`0d0517f7f`](https://github.com/npm/cli/commit/0d0517f7f8c902b5064ac18fb4015b31750ad2b2) + `call-limit@1.1.1` ([@isaacs](https://github.com/isaacs)) +* [`741400429`](https://github.com/npm/cli/commit/74140042917ea241062a812ceb65c5423c2bafa9) + `glob@7.1.4` ([@isaacs](https://github.com/isaacs)) +* [`bddd60e30`](https://github.com/npm/cli/commit/bddd60e302283a4a70d35f8f742e42bd13f4dabf) + `inherits@2.0.4` ([@isaacs](https://github.com/isaacs)) +* [`4acf03fd1`](https://github.com/npm/cli/commit/4acf03fd140ed3ddb2dcf3fdc9756bc3f5a8bcbb) + `libnpmsearch@2.0.1` ([@isaacs](https://github.com/isaacs)) +* [`c2bd17291`](https://github.com/npm/cli/commit/c2bd17291a86bea7ced2fbd07d66d013bd7a7560) + `marked@0.6.3` ([@isaacs](https://github.com/isaacs)) +* [`7f0221bb1`](https://github.com/npm/cli/commit/7f0221bb1bb41ffc933c785940e227feae38c80c) + `marked-man@0.6.0` ([@isaacs](https://github.com/isaacs)) +* [`f458fe7dd`](https://github.com/npm/cli/commit/f458fe7dd3bebddf603aaae183a424ea8aaa018f) + `npm-lifecycle@2.1.1` ([@isaacs](https://github.com/isaacs)) +* [`009752978`](https://github.com/npm/cli/commit/0097529780269c28444f1efa0d7c131d47a933eb) + `node-gyp@4.0.0` ([@isaacs](https://github.com/isaacs)) +* [`0fa2bb438`](https://github.com/npm/cli/commit/0fa2bb4386379d6e9d8c95db08662ec0529964f9) + `query-string@6.8.1` ([@isaacs](https://github.com/isaacs)) +* [`b86450929`](https://github.com/npm/cli/commit/b86450929796950a1fe4b1f9b02b1634c812f3bb) + `tar-stream@2.1.0` ([@isaacs](https://github.com/isaacs)) +* [`25db00fe9`](https://github.com/npm/cli/commit/25db00fe953453198adb9e1bd71d1bc2a9f04aaa) + `worker-farm@1.7.0` ([@isaacs](https://github.com/isaacs)) +* [`8dfbe8610`](https://github.com/npm/cli/commit/8dfbe861085dfa8fa56bb504b4a00fba04c34f9d) + `readable-stream@3.4.0` ([@isaacs](https://github.com/isaacs)) +* [`f6164d5dd`](https://github.com/npm/cli/commit/f6164d5ddd072eabdf2237f1694a31efd746eb1d) + [isaacs/chownr#21](https://github.com/isaacs/chownr/pull/21) + [isaacs/chownr#20](https://github.com/isaacs/chownr/issues/20) + [npm.community#7901](https://npm.community/t/7901/) + [npm.community#8203](https://npm.community/t/8203) `chownr@1.1.2` This + fixes an EISDIR error from cacache on Darwin in Node versions prior to + 10.6. ([@isaacs](https://github.com/isaacs)) + +## v6.10.0 (2019-07-03): + +### FEATURES + +* [`87fef4e35`](https://github.com/npm/cli/commit/87fef4e35) + [#176](https://github.com/npm/cli/pull/176) fix: Always return JSON for + outdated --json ([@sreeramjayan](https://github.com/sreeramjayan)) +* [`f101d44fc`](https://github.com/npm/cli/commit/f101d44fc) + [#203](https://github.com/npm/cli/pull/203) fix(unpublish): add space + after hyphen ([@ffflorian](https://github.com/ffflorian)) +* [`a4475de4c`](https://github.com/npm/cli/commit/a4475de4c) + [#202](https://github.com/npm/cli/pull/202) enable production flag for + npm audit ([@CalebCourier](https://github.com/CalebCourier)) +* [`d192904d0`](https://github.com/npm/cli/commit/d192904d0) + [#178](https://github.com/npm/cli/pull/178) fix: Return a value for + `view` when in silent mode + ([@stayradiated](https://github.com/stayradiated)) +* [`39d473adf`](https://github.com/npm/cli/commit/39d473adf) + [#185](https://github.com/npm/cli/pull/185) Allow git to follow global + tagsign config ([@junderw](https://github.com/junderw)) + +### BUGFIXES + +* [`d9238af0b`](https://github.com/npm/cli/commit/d9238af0b) + [#201](https://github.com/npm/cli/pull/163) + [npm/npm#17858](https://github.com/npm/npm/issues/17858) + [npm/npm#18042](https://github.com/npm/npm/issues/18042) + [npm.community#644](https://npm.community/t/644) do not crash when + removing nameless packages + ([@SteveVanOpstal](https://github.com/SteveVanOpstal) and + [@isaacs](https://github.com/isaacs)) +* [`4bec4f111`](https://github.com/npm/cli/commit/4bec4f111) + [#200](https://github.com/npm/cli/pull/200) Check for `node` (as well as + `node.exe`) in npm's local dir on Windows + ([@rgoulais](https://github.com/rgoulais)) +* [`ce93dab2d`](https://github.com/npm/cli/commit/ce93dab2db423ef23b3e08a0612dafbeb2d25789) + [#180](https://github.com/npm/cli/pull/180) + [npm.community#6187](https://npm.community/t/6187) Fix handling of + `remote` deps in `npm outdated` ([@larsgw](https://github.com/larsgw)) + +### TESTING + +* [`a823f3084`](https://github.com/npm/cli/commit/a823f3084) travis: Update + to include new v12 LTS ([@isaacs](https://github.com/isaacs)) +* [`33e2d1dac`](https://github.com/npm/cli/commit/33e2d1dac) fix flaky + debug-logs test ([@isaacs](https://github.com/isaacs)) +* [`e9411c6cd`](https://github.com/npm/cli/commit/e9411c6cd) Don't time out + waiting for gpg user input ([@isaacs](https://github.com/isaacs)) +* [`d2d301704`](https://github.com/npm/cli/commit/d2d301704) + [#195](https://github.com/npm/cli/pull/195) Add the arm64 check for + legacy-platform-all.js test case. + ([@ossdev07](https://github.com/ossdev07)) +* [`a4dc34243`](https://github.com/npm/cli/commit/a4dc34243) parallel tests + ([@isaacs](https://github.com/isaacs)) + +### DOCUMENTATION + +* [`f5857e263`](https://github.com/npm/cli/commit/f5857e263) + [#192](https://github.com/npm/cli/pull/192) Clarify usage of + bundledDependencies + ([@john-osullivan](https://github.com/john-osullivan)) +* [`747fdaf66`](https://github.com/npm/cli/commit/747fdaf66) + [#159](https://github.com/npm/cli/pull/159) doc: add --audit-level param + ([@ngraef](https://github.com/ngraef)) + +### DEPENDENCIES + +* [`e36b3c320`](https://github.com/npm/cli/commit/e36b3c320) + graceful-fs@4.2.0 ([@isaacs](https://github.com/isaacs)) +* [`6bb935c09`](https://github.com/npm/cli/commit/6bb935c09) + read-package-tree@5.3.1 ([@isaacs](https://github.com/isaacs)) + * [`e9cd536`](https://github.com/npm/read-package-tree/commit/e9cd536) + Use custom caching `realpath` implementation, dramatically reducing + `lstat` calls when reading the package tree + ([@isaacs](https://github.com/isaacs)) +* [`39538b460`](https://github.com/npm/cli/commit/39538b460) + write-file-atomic@2.4.3 ([@isaacs](https://github.com/isaacs)) + * [`f8b1552`](https://github.com/npm/write-file-atomic/commit/f8b1552) + [#38](https://github.com/npm/write-file-atomic/pull/38) Ignore errors + raised by `fs.closeSync` ([@lukeapage](https://github.com/lukeapage)) +* [`042193069`](https://github.com/npm/cli/commit/042193069) pacote@9.5.1 + ([@isaacs](https://github.com/isaacs)) + * [`8bbd051`](https://github.com/npm/pacote/commit/8bbd051) + [#172](https://github.com/zkat/pacote/pull/172) limit git retry + times, avoid unlimited retries ([小秦](https://github.com/xqin)) + * [`92f5e4c`](https://github.com/npm/pacote/commit/92f5e4c) + [#170](https://github.com/zkat/pacote/pull/170) fix(errors): Fix + "TypeError: err.code.match is not a function" error + ([@jviotti](https://github.com/jviotti)) +* [`8bd8e909f`](https://github.com/npm/cli/commit/8bd8e909f) cacache@11.3.3 + ([@isaacs](https://github.com/isaacs)) + * [`47de8f5`](https://github.com/npm/cacache/commit/47de8f5) + [#146](https://github.com/zkat/cacache/pull/146) + [npm.community#2395](https://npm.community/t/2395) fix(config): Add + ssri config 'error' option ([@larsgw](https://github.com/larsgw)) + * [`5156561`](https://github.com/npm/cacache/commit/5156561) + fix(write): avoid a `cb never called` situation + ([@zkat](https://github.com/zkat)) + * [`90f40f0`](https://github.com/npm/cacache/commit/90f40f0) + [#166](https://github.com/zkat/cacache/pull/166) + [#165](https://github.com/zkat/cacache/issues/165) docs: Fix docs for + `path` property in get.info + ([@hdgarrood](https://github.com/hdgarrood)) +* [`bf61c45c6`](https://github.com/npm/cli/commit/bf61c45c6) bluebird@3.5.5 + ([@isaacs](https://github.com/isaacs)) +* [`f75d46a9d`](https://github.com/npm/cli/commit/f75d46a9d) tar@4.4.10 + ([@isaacs](https://github.com/isaacs)) + * [`c80341a`](https://github.com/npm/node-tar/commit/c80341a) + [#215](https://github.com/npm/node-tar/pull/215) Fix + encoding/decoding of base-256 numbers + ([@justfalter](https://github.com/justfalter)) + * [`77522f0`](https://github.com/npm/node-tar/commit/77522f0) + [#204](https://github.com/npm/node-tar/issues/204) + [#214](https://github.com/npm/node-tar/issues/214) Use `stat` instead + of `lstat` when checking CWD ([@stkb](https://github.com/stkb)) +* [`ec6236210`](https://github.com/npm/cli/commit/ec6236210) + npm-packlist@1.4.4 ([@isaacs](https://github.com/isaacs)) + * [`63d1e3e`](https://github.com/npm/npm-packlist/commit/63d1e3e) + [#30](https://github.com/npm/npm-packlist/issues/30) Sort package + tarball entries by file type for compression benefits + ([@isaacs](https://github.com/isaacs)) + * [`7fcd045`](https://github.com/npm/npm-packlist/commit/7fcd045) + Ignore `.DS_Store` files as well as folders + ([@isaacs](https://github.com/isaacs)) + * [`68b7c96`](https://github.com/npm/npm-packlist/commit/68b7c96) Never + include .git folders in package root. (Note: this prevents the issue + that broke the v6.9.1 release.) + ([@isaacs](https://github.com/isaacs)) +* [`57bef61bc`](https://github.com/npm/cli/commit/57bef61bc) update fstream + in node-gyp ([@isaacs](https://github.com/isaacs)) + * Addresses [security advisory + #886](https://www.npmjs.com/advisories/886) +* [`acbbf7eee`](https://github.com/npm/cli/commit/acbbf7eee) + [#183](https://github.com/npm/cli/pull/183) licensee@7.0.2 + ([@kemitchell](https://github.com/kemitchell)) +* [`011ae67f0`](https://github.com/npm/cli/commit/011ae67f0) + readable-stream@3.3.0 ([@isaacs](https://github.com/isaacs)) +* [`f5e884909`](https://github.com/npm/cli/commit/f5e884909) + npm-registry-mock@1.2.1 ([@isaacs](https://github.com/isaacs)) +* [`b57d07e35`](https://github.com/npm/cli/commit/b57d07e35) + npm-registry-couchapp@2.7.2 ([@isaacs](https://github.com/isaacs)) + +## v6.9.2 (2019-06-27): + +This release is identical to v6.9.1, but we had to publish a new version +due to [a .git directory in the release](https://npm.community/t/8454). + +## v6.9.1 (2019-06-26): + +### BUGFIXES + +* [`6b1a9da0e`](https://github.com/npm/cli/commit/6b1a9da0e0f5c295cdaf4dea4b73bd221d778611) + [#165](https://github.com/npm/cli/pull/165) + Update `knownBroken` version. + ([@ljharb](https://github.com/ljharb)) +* [`d07547154`](https://github.com/npm/cli/commit/d07547154eb8a88aa4fde8a37e128e1e3272adc1) + [npm.community#5929](https://npm.community/t/npm-outdated-throw-an-error-cannot-read-property-length-of-undefined/5929) + Fix `outdated` rendering for global dependencies. + ([@zkat](https://github.com/zkat)) +* [`e4a1f1745`](https://github.com/npm/cli/commit/e4a1f174514a57580fd5e0fa33eee0f42bba77fc) + [npm.community#6259](https://npm.community/t/npm-token-create-doesnt-work-in-6-6-0-6-9-0/6259) + Fix OTP for token create and remove. + ([@zkat](https://github.com/zkat)) + +### DEPENDENCIES + +* [`a163a9c35`](https://github.com/npm/cli/commit/a163a9c35f6f341de343562368056258bba5d7dc) + `sha@3.0.0` + ([@aeschright](https://github.com/aeschright)) +* [`47b08b3b9`](https://github.com/npm/cli/commit/47b08b3b9860438b416efb438e975a628ec2eed5) + `query-string@6.4.0` + ([@aeschright](https://github.com/aeschright)) +* [`d6a956cff`](https://github.com/npm/cli/commit/d6a956cff6357e6de431848e578c391768685a64) + `readable-stream@3.2.0` + ([@aeschright](https://github.com/aeschright)) +* [`10b8bed2b`](https://github.com/npm/cli/commit/10b8bed2bb0afac5451164e87f25924cc1ac6f2e) + `tacks@1.3.0` + ([@aeschright](https://github.com/aeschright)) +* [`e7483704d`](https://github.com/npm/cli/commit/e7483704dda1acffc8c6b8c165c14c8a7512f3c8) + `tap@12.6.0` + ([@aeschright](https://github.com/aeschright)) +* [`3242fe698`](https://github.com/npm/cli/commit/3242fe698ead46a9cda94e1a4d489cd84a85d7e3) + `tar-stream@2.0.1` + ([@aeschright](https://github.com/aeschright)) + +## v6.9.0 (2019-02-20): + +### FEATURES + +* [`2ba3a0f67`](https://github.com/npm/cli/commit/2ba3a0f6721f6d5a16775aebce6012965634fc7c) + [#90](https://github.com/npm/cli/pull/90) + Time traveling installs using the `--before` flag. + ([@zkat](https://github.com/zkat)) +* [`b7b54f2d1`](https://github.com/npm/cli/commit/b7b54f2d18e2d8d65ec67c850b21ae9f01c60e7e) + [#3](https://github.com/npm/cli/pull/3) + Add support for package aliases. This allows packages to be installed under a + different directory than the package name listed in `package.json`, and adds a + new dependency type to allow this to be done for registry dependencies. + ([@zkat](https://github.com/zkat)) +* [`684bccf06`](https://github.com/npm/cli/commit/684bccf061dfc97bb759121bc0ad635e01c65868) + [#146](https://github.com/npm/cli/pull/146) + Always save `package-lock.json` when using `--package-lock-only`. + ([@aeschright](https://github.com/aeschright)) +* [`b8b8afd40`](https://github.com/npm/cli/commit/b8b8afd4048b4ba1181e00ba2ac49ced43936ce0) + [#139](https://github.com/npm/cli/pull/139) + Make empty-string run-scripts run successfully as a no-op. + ([@vlasy](https://github.com/vlasy)) +* [`8047b19b1`](https://github.com/npm/cli/commit/8047b19b1b994fd4b4e7b5c91d7cc4e0384bd5e4) + [npm.community#3784](https://npm.community/t/3784) + Match git semver ranges when flattening the tree. + ([@larsgw](https://github.com/larsgw)) +* [`e135c2bb3`](https://github.com/npm/cli/commit/e135c2bb360dcf00ecee34a95985afec21ba3655) + [npm.community#1725](https://npm.community/t/1725?u=larsgw) + Re-enable updating local packages. + ([@larsgw](https://github.com/larsgw)) + +### BUGFIXES + +* [`cf09fbaed`](https://github.com/npm/cli/commit/cf09fbaed489d908e9b551382cc5f61bdabe99a9) + [#153](https://github.com/npm/cli/pull/153) + Set modified to undefined in `npm view` when `time` is not available. This + fixes a bug where `npm view` would crash on certain third-party registries. + ([@simonua](https://github.com/simonua)) +* [`774fc26ee`](https://github.com/npm/cli/commit/774fc26eeb01345c11bd8c97e2c4f328d419d9b5) + [#154](https://github.com/npm/cli/pull/154) + Print out tar version in `install.sh` only when the flag is supported not all + the tar implementations support --version flag. This allows the install script + to work in OpenBSD, for example. + ([@agudulin](https://github.com/agudulin)) +* [`863baff11`](https://github.com/npm/cli/commit/863baff11d8c870f1a0d9619bb5133c67d71e407) + [#158](https://github.com/npm/cli/pull/158) + Fix typo in error message for `npm stars`. + ([@phihag](https://github.com/phihag)) +* [`a805a95ad`](https://github.com/npm/cli/commit/a805a95ad8832ef5008671f4bd4c11b83e32e0f2) + [npm.community#4227](https://npm.community/t/4227) + Strip version info from pkg on E404. This improves the error messaging format. + ([@larsgw](https://github.com/larsgw)) + +### DOCS + +* [`5d7633833`](https://github.com/npm/cli/commit/5d76338338621fd0b3d4f7914a51726d27569ee1) + [#160](https://github.com/npm/cli/pull/160) + Add `npm add` as alias to npm install in docs. + ([@ahasall](https://github.com/ahasall)) +* [`489c2211c`](https://github.com/npm/cli/commit/489c2211c96a01d65df50fd57346c785bcc3efe6) + [#162](https://github.com/npm/cli/pull/162) + Fix link to RFC #10 in the changelog. + ([@mansona](https://github.com/mansona)) +* [`433020ead`](https://github.com/npm/cli/commit/433020ead5251b562bc3b0f5f55341a5b8cc9023) + [#135](https://github.com/npm/cli/pull/135) + Describe exit codes in npm-audit docs. + ([@emilis-tm](https://github.com/emilis-tm)) + +### DEPENDENCIES + +* [`ee6b6746b`](https://github.com/npm/cli/commit/ee6b6746b04f145dfe489af2d26667ac32ba0cef) + [zkat/make-fetch-happen#29](https://github.com/zkat/make-fetch-happen/issues/29) + `agent-base@4.2.1` + ([@TooTallNate](https://github.com/TooTallNate)) +* [`2ce23baf5`](https://github.com/npm/cli/commit/2ce23baf53b1ce7d11b8efb80c598ddaf9cef9e7) + `lock-verify@2.1.0`: + Adds support for package aliases + ([@zkat](https://github.com/zkat)) +* [`baaedbc6e`](https://github.com/npm/cli/commit/baaedbc6e2fc370d73b35e7721794719115507cc) + `pacote@9.5.0`: + Adds opts.before support + ([@zkat](https://github.com/zkat)) +* [`57e771a03`](https://github.com/npm/cli/commit/57e771a032165d1e31e71d0ff7530442139c21a6) + [#164](https://github.com/npm/cli/pull/164) + `licensee@6.1.0` + ([@kemitchell](https://github.com/kemitchell)) +* [`2b78288d4`](https://github.com/npm/cli/commit/2b78288d4accd10c1b7cc6c36bc28045f5634d91) + add core to default inclusion tests in pack + ([@Kat Marchán](https://github.com/Kat Marchán)) +* [`9b8b6513f`](https://github.com/npm/cli/commit/9b8b6513fbce92764b32a067322984985ff683fe) + [npm.community#5382](https://npm.community/t/npm-pack-leaving-out-files-6-8-0-only/5382) + `npm-packlist@1.4.1`: Fixes bug where `core/` directories were being suddenly excluded. + ([@zkat](https://github.com/zkat)) + +## v6.8.0 (2019-02-07): + +This release includes an implementation of [RFC #10](https://github.com/npm/rfcs/blob/latest/implemented/0010-monorepo-subdirectory-declaration.md), documenting an optional field that can be used to specify +the directory path for a package within a monorepo. + +### NEW FEATURES + +* [`3663cdef2`](https://github.com/npm/cli/commit/3663cdef205fa9ba2c2830e5ef7ceeb31c30298c) + [#140](https://github.com/npm/cli/pull/140) + Update package.json docs to include repository.directory details. + ([@greysteil](https://github.com/greysteil)) + +### BUGFIXES + +* [`550bf703a`](https://github.com/npm/cli/commit/550bf703ae3e31ba6a300658ae95b6937f67b68f) + Add @types to ignore list to fix git clean -fd. + ([@zkat](https://github.com/zkat)) +* [`cdb059293`](https://github.com/npm/cli/commit/cdb0592939d6256c80f7ec5a2b6251131a512a2a) + [#144](https://github.com/npm/cli/pull/144) + Fix common.npm callback arguments. + ([@larsgw](https://github.com/larsgw)) +* [`25573e9b9`](https://github.com/npm/cli/commit/25573e9b9d5d26261c68d453f06db5b3b1cd6789) + [npm.community#4770](https://npm.community/t/https://npm.community/t/4770) + Show installed but unmet peer deps. + ([@larsgw](https://github.com/larsgw)) +* [`ce2c4bd1a`](https://github.com/npm/cli/commit/ce2c4bd1a2ce7ac1727a4ca9a350b743a2e27b2a) + [#149](https://github.com/npm/cli/pull/149) + Use figgy-config to make sure extra opts are there. + ([@zkat](https://github.com/zkat)) +* [`3c22d1a35`](https://github.com/npm/cli/commit/3c22d1a35878f73c0af8ea5968b962a85a1a9b84) + [npm.community#5101](https://npm.community/t/npm-6-6-0-breaks-access-to-ls-collaborators/5101) + Fix `ls-collaborators` access error for non-scoped case. + ([@zkat](https://github.com/zkat)) +* [`d5137091d`](https://github.com/npm/cli/commit/d5137091dd695a2980f7ade85fdc56b2421ff677) + [npm.community#754](https://npm.community/t/npm-install-for-package-with-local-dependency-fails/754) + Fix issue with sub-folder local references. + ([@iarna](https://github.com/iarna)) + ([@jhecking](https://github.com/jhecking)) + +### DEPENDENCY BUMPS + +* [`d72141080`](https://github.com/npm/cli/commit/d72141080ec8fcf35bcc5650245efbe649de053e) + `npm-registry-couchapp@2.7.1` + ([@zkat](https://github.com/zkat)) +* [`671cad1b1`](https://github.com/npm/cli/commit/671cad1b18239d540da246d6f78de45d9f784396) + `npm-registry-fetch@3.9.0`: + Make sure publishing with legacy username:password `_auth` works again. + ([@zkat](https://github.com/zkat)) +* [`95ca1aef4`](https://github.com/npm/cli/commit/95ca1aef4077c8e68d9f4dce37f6ba49b591c4ca) + `pacote@9.4.1` + ([@aeschright](https://github.com/aeschright)) +* [`322fef403`](https://github.com/npm/cli/commit/322fef40376e71cd100159dc914e7ca89faae327) + `normalize-package-data@2.5.0` + ([@aeschright](https://github.com/aeschright)) +* [`32d34c0da`](https://github.com/npm/cli/commit/32d34c0da4f393a74697297667eb9226155ecc6b) + `npm-packlist@1.3.0` + ([@aeschright](https://github.com/aeschright)) +* [`338571cf0`](https://github.com/npm/cli/commit/338571cf0bd3a1e2ea800464d57581932ff0fb11) + `read-package-tree@5.2.2` + ([@zkat](https://github.com/zkat)) + +### MISC + +* [`89b23a5f7`](https://github.com/npm/cli/commit/89b23a5f7b0ccdcdda1d7d4d3eafb6903156d186) + [#120](https://github.com/npm/cli/pull/120) + Use `const` in lib/fetch-package-metadata.md. + ([@watilde](https://github.com/watilde)) +* [`4970d553c`](https://github.com/npm/cli/commit/4970d553c0ea66128931d118469fd31c87cc7986) + [#126](https://github.com/npm/cli/pull/126) + Replace ronn with marked-man in `.npmignore`. + ([@watilde](https://github.com/watilde)) +* [`d9b6090dc`](https://github.com/npm/cli/commit/d9b6090dc26cd0fded18b4f80248cff3e51bb185) + [#138](https://github.com/npm/cli/pull/138) + Reduce work to test if executable ends with a 'g'. + ([@elidoran](https://github.com/elidoran)) + ([@larsgw](https://github.com/larsgw)) + +## v6.7.0 (2019-01-23): + +Hey y'all! This is a quick hotfix release that includes some important fixes to +`npm@6.6.0` related to the large rewrite/refactor. We're tagging it as a feature +release because the changes involve some minor new features, and semver is +semver, but there's nothing major here. + +### NEW FEATURES + +* [`50463f58b`](https://github.com/npm/cli/commit/50463f58b4b70180a85d6d8c10fcf50d8970ef5e) + Improve usage errors to `npm org` commands and add optional filtering to `npm + org ls` subcommand. + ([@zkat](https://github.com/zkat)) + +### BUGFIXES + +* [`4027070b0`](https://github.com/npm/cli/commit/4027070b03be3bdae2515f2291de89b91f901df9) + Fix default usage printout for `npm org` so you actually see how it's supposed + to be used. + ([@zkat](https://github.com/zkat)) +* [`cfea6ea5b`](https://github.com/npm/cli/commit/cfea6ea5b67ec5e4ec57e3a9cb8c82d018cb5476) + fix default usage message for npm hook + ([@zkat](https://github.com/zkat)) + +### DOCS + +* [`e959e1421`](https://github.com/npm/cli/commit/e959e14217d751ddb295565fd75cc81de1ee0d5b) + Add manpage for `npm org` command. + ([@zkat](https://github.com/zkat)) + +### DEPENDENCY BUMPS + +* [`8543fc357`](https://github.com/npm/cli/commit/8543fc3576f64e91f7946d4c56a5ffb045b55156) + `pacote@9.4.0`: Fall back to "fullfat" packuments on ETARGET errors. This will + make it so that, when a package is published but the corgi follower hasn't + caught up, users can still install a freshly-published package. + ([@zkat](https://github.com/zkat)) +* [`75475043b`](https://github.com/npm/cli/commit/75475043b03a254b2e7db2c04c3f0baea31d8dc5) + [npm.community#4752](https://npm.community/t/npm-6-6-0-broke-authentication-with-npm-registry-couchapp/4752) + `libnpmpublish@1.1.1`: Fixes auth error for username/password legacy authentication. + ([@sreeramjayan](https://github.com/sreeramjayan)) +* [`0af8c00ac`](https://github.com/npm/cli/commit/0af8c00acb01849362ffca25b567cc62447c7175) + [npm.community#4746](https://npm.community/t/npm-6-6-0-release-breaking-docker-npm-ci-commands/4746) + `libcipm@3.0.3`: Fixes issue with "cannot run in wd" errors for run-scripts. + ([@zkat](https://github.com/zkat)) +* [`5a7962e46`](https://github.com/npm/cli/commit/5a7962e46f582c6bd91784b0ddc941ed45e9f802) + `write-file-atomic@2.4.2`: + Fixes issues with leaking `signal-exit` instances and file descriptors. + ([@iarna](https://github.com/iarna)) + +## v6.6.0 (2019-01-17): + +### REFACTORING OUT npm-REGISTRY-CLIENT + +Today is an auspicious day! This release marks the end of a massive internal +refactor to npm that means we finally got rid of the legacy +[`npm-registry-client`](https://npm.im/npm-registry-client) in favor of the +shiny, new, `window.fetch`-like +[`npm-registry-fetch`](https://npm.im/npm-registry-fetch). + +Now, the installer had already done most of this work with the release of +`npm@5`, but it turns out _every other command_ still used the legacy client. +This release updates all of those commands to use the new client, and while +we're at it, adds a few extra goodies: + +* All OTP-requiring commands will now **prompt**. `--otp` is no longer required for `dist-tag`, `access`, et al. +* We're starting to integrate a new config system which will eventually get extracted into a standalone package. +* We now use [`libnpm`](https://npm.im/libnpm) for the API functionality of a lot of our commands! That means you can install a library if you want to write your own tooling around them. +* There's now an `npm org` command for managing users in your org. +* [`pacote`](https://npm.im/pacote) now consumes npm-style configurations, instead of its own naming for various config vars. This will make it easier to load npm configs using `libnpm.config` and hand them directly to `pacote`. + +There's too many commits to list all of them here, so check out the PR if you're +curious about details: + +* [`c5af34c05`](https://github.com/npm/cli/commit/c5af34c05fd569aecd11f18d6d0ddeac3970b253) + [npm-registry-client@REMOVED](https://www.youtube.com/watch\?v\=kPIdRJlzERo) + ([@zkat](https://github.com/zkat)) +* [`4cca9cb90`](https://github.com/npm/cli/commit/4cca9cb9042c0eeb743377e8f1ae1c07733df43f) + [`ad67461dc`](https://github.com/npm/cli/commit/ad67461dc3a73d5ae6569fdbee44c67e1daf86e7) + [`77625f9e2`](https://github.com/npm/cli/commit/77625f9e20d4285b7726b3bf3ebc10cb21c638f0) + [`6e922aefb`](https://github.com/npm/cli/commit/6e922aefbb4634bbd77ed3b143e0765d63afc7f9) + [`584613ea8`](https://github.com/npm/cli/commit/584613ea8ff94b927db4957e5647504b30ca2b1f) + [`64de4ebf0`](https://github.com/npm/cli/commit/64de4ebf019b217179039124c6621e74651e4d27) + [`6cd87d1a9`](https://github.com/npm/cli/commit/6cd87d1a9bb90e795f9891ea4db384435f4a8930) + [`2786834c0`](https://github.com/npm/cli/commit/2786834c0257b8bb1bbb115f1ce7060abaab2e17) + [`514558e09`](https://github.com/npm/cli/commit/514558e094460fd0284a759c13965b685133b3fe) + [`dec07ebe3`](https://github.com/npm/cli/commit/dec07ebe3312245f6421c6e523660be4973ae8c2) + [`084741913`](https://github.com/npm/cli/commit/084741913c4fdb396e589abf3440b4be3aa0b67e) + [`45aff0e02`](https://github.com/npm/cli/commit/45aff0e02251785a85e56eafacf9efaeac6f92ae) + [`846ddcc44`](https://github.com/npm/cli/commit/846ddcc44538f2d9a51ac79405010dfe97fdcdeb) + [`8971ba1b9`](https://github.com/npm/cli/commit/8971ba1b953d4f05ff5094f1822b91526282edd8) + [`99156e081`](https://github.com/npm/cli/commit/99156e081a07516d6c970685bc3d858f89dc4f9c) + [`ab2155306`](https://github.com/npm/cli/commit/ab215530674d7f6123c9572d0ad4ca9e9b5fb184) + [`b37a66542`](https://github.com/npm/cli/commit/b37a66542ca2879069b2acd338b1904de71b7f40) + [`d2af0777a`](https://github.com/npm/cli/commit/d2af0777ac179ff5009dbbf0354a4a84f151b60f) + [`e0b4c6880`](https://github.com/npm/cli/commit/e0b4c6880504fa2e8491c2fbd098efcb2e496849) + [`ff72350b4`](https://github.com/npm/cli/commit/ff72350b4c56d65e4a92671d86a33080bf3c2ea5) + [`6ed943303`](https://github.com/npm/cli/commit/6ed943303ce7a267ddb26aa25caa035f832895dd) + [`90a069e7d`](https://github.com/npm/cli/commit/90a069e7d4646682211f4cabe289c306ee1d5397) + [`b24ed5fdc`](https://github.com/npm/cli/commit/b24ed5fdc3a4395628465ae5273bad54eea274c8) + [`ec9fcc14f`](https://github.com/npm/cli/commit/ec9fcc14f4e0e2f3967e2fd6ad8b8433076393cb) + [`8a56fa39e`](https://github.com/npm/cli/commit/8a56fa39e61136da45565447fe201a57f04ad4cd) + [`41d19e18f`](https://github.com/npm/cli/commit/41d19e18f769c6f0acfdffbdb01d12bf332908ce) + [`125ff9551`](https://github.com/npm/cli/commit/125ff9551595dda9dab2edaef10f4c73ae8e1433) + [`1c3b226ff`](https://github.com/npm/cli/commit/1c3b226ff37159c426e855e83c8f6c361603901d) + [`3c0a7b06b`](https://github.com/npm/cli/commit/3c0a7b06b6473fe068fc8ae8466c07a177975b87) + [`08fcb3f0f`](https://github.com/npm/cli/commit/08fcb3f0f26e025702b35253ed70a527ab69977f) + [`c8135d97a`](https://github.com/npm/cli/commit/c8135d97a424b38363dc4530c45e4583471e9849) + [`ae936f22c`](https://github.com/npm/cli/commit/ae936f22ce80614287f2769e9aaa9a155f03cc15) + [#2](https://github.com/npm/cli/pull/2) + Move rest of commands to `npm-registry-fetch` and use [`figgy-pudding`](https://npm.im/figgy-pudding) for configs. + ([@zkat](https://github.com/zkat)) + +### NEW FEATURES + +* [`02c837e01`](https://github.com/npm/cli/commit/02c837e01a71a26f37cbd5a09be89df8a9ce01da) + [#106](https://github.com/npm/cli/pull/106) + Make `npm dist-tags` the same as `npm dist-tag ls`. + ([@isaacs](https://github.com/isaacs)) +* [`1065a7809`](https://github.com/npm/cli/commit/1065a7809161fd4dc23e96b642019fc842fdacf2) + [#65](https://github.com/npm/cli/pull/65) + Add support for `IBM i`. + ([@dmabupt](https://github.com/dmabupt)) +* [`a22e6f5fc`](https://github.com/npm/cli/commit/a22e6f5fc3e91350d3c64dcc88eabbe0efbca759) + [#131](https://github.com/npm/cli/pull/131) + Update profile to support new npm-profile API. + ([@zkat](https://github.com/zkat)) + +### BUGFIXES + +* [`890a74458`](https://github.com/npm/cli/commit/890a74458dd4a55e2d85f3eba9dbf125affa4206) + [npm.community#3278](https://npm.community/t/3278) + Fix support for passing git binary path config with `--git`. + ([@larsgw](https://github.com/larsgw)) +* [`90e55a143`](https://github.com/npm/cli/commit/90e55a143ed1de8678d65c17bc3c2b103a15ddac) + [npm.community#2713](https://npm.community/t/npx-envinfo-preset-jest-fails-on-windows-with-a-stack-trace/2713) + Check for `npm.config`'s existence in `error-handler.js` to prevent weird + errors when failures happen before config object is loaded. + ([@BeniCheni](https://github.com/BeniCheni)) +* [`134207174`](https://github.com/npm/cli/commit/134207174652e1eb6d7b0f44fd9858a0b6a0cd6c) + [npm.community#2569](https://npm.community/t/2569) + Fix checking for optional dependencies. + ([@larsgw](https://github.com/larsgw)) +* [`7a2f6b05d`](https://github.com/npm/cli/commit/7a2f6b05d27f3bcf47a48230db62e86afa41c9d3) + [npm.community#4172](https://npm.community/t/4172) + Remove tink experiments. + ([@larsgw](https://github.com/larsgw)) +* [`c5b6056b6`](https://github.com/npm/cli/commit/c5b6056b6b35eefb81ae5fb00a5c7681c5318c22) + [#123](https://github.com/npm/cli/pull/123) + Handle git branch references correctly. + ([@johanneswuerbach](https://github.com/johanneswuerbach)) +* [`f58b43ef2`](https://github.com/npm/cli/commit/f58b43ef2c5e3dea2094340a0cf264b2d11a5da4) + [npm.community#3983](https://npm.community/t/npm-audit-error-messaging-update-for-401s/3983) + Report any errors above 400 as potentially not supporting audit. + ([@zkat](https://github.com/zkat)) +* [`a5c9e6f35`](https://github.com/npm/cli/commit/a5c9e6f35a591a6e2d4b6ace5c01bc03f2b75fdc) + [#124](https://github.com/npm/cli/pull/124) + Set default homepage to an empty string. + ([@anchnk](https://github.com/anchnk)) +* [`5d076351d`](https://github.com/npm/cli/commit/5d076351d7ec1d3585942a9289548166a7fbbd4c) + [npm.community#4054](https://npm.community/t/4054) + Fix npm-prefix description. + ([@larsgw](https://github.com/larsgw)) + +### DOCS + +* [`31a7274b7`](https://github.com/npm/cli/commit/31a7274b70de18b24e7bee51daa22cc7cbb6141c) + [#71](https://github.com/npm/cli/pull/71) + Fix typo in npm-token documentation. + ([@GeorgeTaveras1231](https://github.com/GeorgeTaveras1231)) +* [`2401b7592`](https://github.com/npm/cli/commit/2401b7592c6ee114e6db7077ebf8c072b7bfe427) + Correct docs for fake-registry interface. + ([@iarna](https://github.com/iarna)) + +### DEPENDENCIES + +* [`9cefcdc1d`](https://github.com/npm/cli/commit/9cefcdc1d2289b56f9164d14d7e499e115cfeaee) + `npm-registry-fetch@3.8.0` + ([@zkat](https://github.com/zkat)) +* [`1c769c9b3`](https://github.com/npm/cli/commit/1c769c9b3e431d324c1a5b6dd10e1fddb5cb88c7) + `pacote@9.1.0` + ([@zkat](https://github.com/zkat)) +* [`f3bc5539b`](https://github.com/npm/cli/commit/f3bc5539b30446500abcc3873781b2c717f8e22c) + `figgy-pudding@3.5.1` + ([@zkat](https://github.com/zkat)) +* [`bf7199d3c`](https://github.com/npm/cli/commit/bf7199d3cbf50545da1ebd30d28f0a6ed5444a00) + `npm-profile@4.0.1` + ([@zkat](https://github.com/zkat)) +* [`118c50496`](https://github.com/npm/cli/commit/118c50496c01231cab3821ae623be6df89cb0a32) + `semver@5.5.1` + ([@isaacs](https://github.com/isaacs)) +* [`eab4df925`](https://github.com/npm/cli/commit/eab4df9250e9169c694b3f6c287d2932bf5e08fb) + `libcipm@3.0.2` + ([@zkat](https://github.com/zkat)) +* [`b86e51573`](https://github.com/npm/cli/commit/b86e515734faf433dc6c457c36c1de52795aa870) + `libnpm@1.4.0` + ([@zkat](https://github.com/zkat)) +* [`56fffbff2`](https://github.com/npm/cli/commit/56fffbff27fe2fae8bef27d946755789ef0d89bd) + `get-stream@4.1.0` + ([@zkat](https://github.com/zkat)) +* [`df972e948`](https://github.com/npm/cli/commit/df972e94868050b5aa42ac18b527fd929e1de9e4) + npm-profile@REMOVED + ([@zkat](https://github.com/zkat)) +* [`32c73bf0e`](https://github.com/npm/cli/commit/32c73bf0e3f0441d0c7c940292235d4b93aa87e2) + `libnpm@2.0.1` + ([@zkat](https://github.com/zkat)) +* [`569491b80`](https://github.com/npm/cli/commit/569491b8042f939dc13986b6adb2a0a260f95b63) + `licensee@5.0.0` + ([@zkat](https://github.com/zkat)) +* [`a3ba0ccf1`](https://github.com/npm/cli/commit/a3ba0ccf1fa86aec56b1ad49883abf28c1f56b3c) + move rimraf to prod deps + ([@zkat](https://github.com/zkat)) +* [`f63a0d6cf`](https://github.com/npm/cli/commit/f63a0d6cf0b7db3dcc80e72e1383c3df723c8119) + `spdx-license-ids@3.0.3`: + Ref: https://github.com/npm/cli/pull/121 + ([@zkat](https://github.com/zkat)) +* [`f350e714f`](https://github.com/npm/cli/commit/f350e714f66a77f71a7ebe17daeea2ea98179a1a) + `aproba@2.0.0` + ([@aeschright](https://github.com/aeschright)) +* [`a67e4d8b2`](https://github.com/npm/cli/commit/a67e4d8b214e58ede037c3854961acb33fd889da) + `byte-size@5.0.1` + ([@aeschright](https://github.com/aeschright)) +* [`8bea4efa3`](https://github.com/npm/cli/commit/8bea4efa34857c4e547904b3630dd442def241de) + `cacache@11.3.2` + ([@aeschright](https://github.com/aeschright)) +* [`9d4776836`](https://github.com/npm/cli/commit/9d4776836a4eaa4b19701b4e4f00cd64578bf078) + `chownr@1.1.1` + ([@aeschright](https://github.com/aeschright)) +* [`70da139e9`](https://github.com/npm/cli/commit/70da139e97ed1660c216e2d9b3f9cfb986bfd4a4) + `ci-info@2.0.0` + ([@aeschright](https://github.com/aeschright)) +* [`bcdeddcc3`](https://github.com/npm/cli/commit/bcdeddcc3d4dc242f42404223dafe4afdc753b32) + `cli-table3@0.5.1` + ([@aeschright](https://github.com/aeschright)) +* [`63aab82c7`](https://github.com/npm/cli/commit/63aab82c7bfca4f16987cf4156ddebf8d150747c) + `is-cidr@3.0.0` + ([@aeschright](https://github.com/aeschright)) +* [`d522bd90c`](https://github.com/npm/cli/commit/d522bd90c3b0cb08518f249ae5b90bd609fff165) + `JSONStream@1.3.5` + ([@aeschright](https://github.com/aeschright)) +* [`2a59bfc79`](https://github.com/npm/cli/commit/2a59bfc7989bd5575d8cbba912977c6d1ba92567) + `libnpmhook@5.0.2` + ([@aeschright](https://github.com/aeschright)) +* [`66d60e394`](https://github.com/npm/cli/commit/66d60e394e5a96330f90e230505758f19a3643ac) + `marked@0.6.0` + ([@aeschright](https://github.com/aeschright)) +* [`8213def9a`](https://github.com/npm/cli/commit/8213def9aa9b6e702887e4f2ed7654943e1e4154) + `npm-packlist@1.2.0` + ([@aeschright](https://github.com/aeschright)) +* [`e4ffc6a2b`](https://github.com/npm/cli/commit/e4ffc6a2bfb8d0b7047cb6692030484760fc8c91) + `unique-filename@1.1.1` + ([@aeschright](https://github.com/aeschright)) +* [`09a5c2fab`](https://github.com/npm/cli/commit/09a5c2fabe0d1c00ec8c99f328f6d28a3495eb0b) + `semver@5.6.0` + ([@aeschright](https://github.com/aeschright)) +* [`740e79e17`](https://github.com/npm/cli/commit/740e79e17a78247f73349525043c9388ce94459a) + `rimraf@2.6.3` + ([@aeschright](https://github.com/aeschright)) +* [`455476c8d`](https://github.com/npm/cli/commit/455476c8d148ca83a4e030e96e93dcf1c7f0ff5f) + `require-inject@1.4.4` + ([@aeschright](https://github.com/aeschright)) +* [`3f40251c5`](https://github.com/npm/cli/commit/3f40251c5868feaacbcdbcb1360877ce76998f5e) + `npm-pick-manifest@2.2.3` + ([@aeschright](https://github.com/aeschright)) +* [`4ffa8a8e9`](https://github.com/npm/cli/commit/4ffa8a8e9e80e5562898dd76fe5a49f5694f38c8) + `query-string@6.2.0` + ([@aeschright](https://github.com/aeschright)) +* [`a0a0ca9ec`](https://github.com/npm/cli/commit/a0a0ca9ec2a962183d420fa751f4139969760f18) + `pacote@9.3.0` + ([@aeschright](https://github.com/aeschright)) +* [`5777ea8ad`](https://github.com/npm/cli/commit/5777ea8ad2058be3166a6dad2d31d2d393c9f778) + `readable-stream@3.1.1` + ([@aeschright](https://github.com/aeschright)) +* [`887e94386`](https://github.com/npm/cli/commit/887e94386f42cb59a5628e7762b3662d084b23c8) + `lru-cache@4.1.5` + ([@aeschright](https://github.com/aeschright)) +* [`41f15524c`](https://github.com/npm/cli/commit/41f15524c58c59d206c4b1d25ae9e0f22745213b) + Updating semver docs. + ([@aeschright](https://github.com/aeschright)) +* [`fb3bbb72d`](https://github.com/npm/cli/commit/fb3bbb72d448ac37a465b31233b21381917422f3) + `npm-audit-report@1.3.2`: + ([@melkikh](https://github.com/melkikh)) + +### TESTING + +* [`f1edffba9`](https://github.com/npm/cli/commit/f1edffba90ebd96cf88675d2e18ebc48954ba50e) + Modernize maketest script. + ([@iarna](https://github.com/iarna)) +* [`ae263473d`](https://github.com/npm/cli/commit/ae263473d92a896b482830d4019a04b5dbd1e9d7) + maketest: Use promise based example common.npm call. + ([@iarna](https://github.com/iarna)) +* [`d9970da5e`](https://github.com/npm/cli/commit/d9970da5ee97a354eab01cbf16f9101693a15d2d) + maketest: Use newEnv for env production. + ([@iarna](https://github.com/iarna)) + +### MISCELLANEOUS + +* [`c665f35aa`](https://github.com/npm/cli/commit/c665f35aacdb8afdbe35f3dd7ccb62f55ff6b896) + [#119](https://github.com/npm/cli/pull/119) + Replace var with const/let in lib/repo.js. + ([@watilde](https://github.com/watilde)) +* [`46639ba9f`](https://github.com/npm/cli/commit/46639ba9f04ea729502f1af28b02eb67fb6dcb66) + Update package-lock.json for https tarball URLs + ([@aeschright](https://github.com/aeschright)) + ## v6.5.0 (2018-11-28): ### NEW FEATURES @@ -13,7 +1469,7 @@ ### BUGFIXES * [`89652cb9b`](https://github.com/npm/cli/commit/89652cb9b810f929f5586fc90cc6794d076603fb) - [npm.community#1661](https://npm.community/t/https://npm.community/t/1661) + [npm.community#1661](https://npm.community/t/1661) Fix sign-git-commit options. They were previously totally wrong. ([@zkat](https://github.com/zkat)) * [`414f2d1a1`](https://github.com/npm/cli/commit/414f2d1a1bdffc02ed31ebb48a43216f284c21d4) @@ -167,7 +1623,7 @@ circular dependency fix, as well as adding a proper LICENSE file. ([@isaacs](https://github.com/isaacs)) * [`e8d5f4418`](https://github.com/npm/cli/commit/e8d5f441821553a31fc8cd751670663699d2c8ce) - [npm.community#632](https://npm.community/t/https://npm.community/t/using-npm-ci-does-not-run-prepare-script-for-git-modules/632) + [npm.community#632](https://npm.community/t/using-npm-ci-does-not-run-prepare-script-for-git-modules/632) `libcipm@2.0.2`: Fixes issue where `npm ci` wasn't running the `prepare` lifecycle script when installing git dependencies @@ -304,7 +1760,7 @@ Thanks, [@rvagg](https://github.com/rvagg)! * [`af98e76ed`](https://github.com/npm/cli/commit/af98e76ed96af780b544962aa575585b3fa17b9a) [npm/cli#34](https://github.com/npm/cli/pull/34) - Remove `npm publish` from list of commands not affected by `--dry-run`. + Remove `npm publish` from list of commands not affected by `--dry-run`. ([@joebowbeer](https://github.com/joebowbeer)) * [`e2b0f0921`](https://github.com/npm/cli/commit/e2b0f092193c08c00f12a6168ad2bd9d6e16f8ce) [npm/cli#36](https://github.com/npm/cli/pull/36) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3fc4ff5dc7a82..981f0457d5c6b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -3,80 +3,190 @@ ## Table of Contents * [Introduction](#introduction) -* [Roles](#roles) - * [Community Members](#community-members) - * [Collaborators](#collaborators) - * [npm, Inc Employees](#npm-inc-employees) - +* [Code Structure](#code-structure) +* [Running Tests](#running-tests) +* [Debugging](#debugging) +* [Coverage](#coverage) +* [Benchmarking](#benchmarking) +* [Types of Contributions](#types-of-contributions) + * [Contributing an Issue?](#contributing-an-issue) + * [Contributing a Question?](#contributing-a-question) + * [Contributing a Bug Fix?](#contributing-a-bug-fix) + * [Contributing a Feature?](#contributing-a-bug-feature) +* [Development Dependencies](#development-dependencies) +* [Dependencies](#dependencies) ## Introduction -Welcome to the npm CLI Contributor Guide! This document outlines the npm CLI repository's process for community interaction and contribution. This includes the issue tracker, pull requests, wiki pages, and, to a certain extent, outside communication in the context of the npm CLI. It defines roles, responsibilities, and procedures, and is an entry point for anyone wishing to contribute their time and effort to making npm a better tool for the JavaScript community! +Welcome to the npm CLI Contributor Guide! This document outlines the npm CLI repository's process for community interaction and contribution. This includes the issue tracker, pull requests, wiki pages, and, to a certain extent, outside communication in the context of the npm CLI. This is an entry point for anyone wishing to contribute their time and effort to making npm a better tool for the JavaScript community! All interactions in the npm repository are covered by the [npm Code of Conduct](https://www.npmjs.com/policies/conduct) -## Roles -There are three main roles for people participating in the npm issue tracker. Each has a specific set of abilities and responsibilities: [Community members](#community-members), [Collaborators](#collaborators), and [npm, Inc employees](#npm-inc-employees). +## Code Structure +``` +/ +├── bin/ +│ │ # Directory for executable files. It's very rare that you +│ │ # will need to update a file in this directory. +│ │ +│ ├── npm # npm-cli entrypoint for bourne shell +│ ├── npm-cli.js # npm-cli entrypoint for node +│ ├── npm.cmd # npm-cli entrypoint for windows +│ ├── npx # npx entrypoint for bourne shell +│ ├── npx-cli.js # npx entrypoint for node +│ └── npx.cmd # npx entrypoint for windows +│ +├── docs/ 📖 +│ │ # Directory that contains the documentation website for +│ │ # the npm-cli. You can run this website locally, and have +│ │ # offline docs! 🔥📖🤓 +│ │ +│ ├── content/ # Markdown files for site content +│ ├── src/ # Source files for the website; gatsby related +│ └── package.json # Site manifest; scripts and dependencies +│ +├── lib/ 📦 +│ # All the Good Bits(tm) of the CLI project live here +│ +├── node_modules/ 🔋 +│ # Vendored dependencies for the CLI project (See the +│ # dependencies section below for more details). +│ +├── scripts/ 📜 +│ # We've created some helper scripts for working with the +│ # CLI project, specifically around managing our vendored +│ # dependencies, merging in pull-requests, and publishing +│ # releases. +│ +├── test/ 🧪 +│ # All the tests for the CLI live in this folder. We've +│ # got a lot of tests 🤓🧪🩺 +│ +├── CONTRIBUTING.md # This file! 🎉 +└── package.json # The projects main manifest file 📃 +``` + +## Running Tests + +``` +# Make sure you install the dependencies first before running tests. +$ npm install + +# Run tests for the CLI (it could take awhile). +$ npm run test +``` + +## Debugging + +It can be tricky to track down issues in the CLI. It's a large code base that has been evolving for over a decade. There is a handy `make` command that will connect the **cloned repository** you have on your machine with the global command, so you can add `console.log` statements or debug any other way you feel most comfortable with. + +``` +# Clone the repository to start with +$ git clone git@github.com:npm/cli.git + +# Change working directories into the repository +$ cd cli + +# Make sure you have the latest code (if that's what you're trying to debug) +$ git fetch origin latest + +# Connect repository to the global namespace +$ make link + +################# +# ALTERNATIVELY +################# +# If ou're working on a feature or bug, you can run the same command on your +# working branch and link that code. + +# Create new branch to work from (there are many ways) +$ git checkout -b feature/awesome-feature + +# Connect repository to global namespace +$ make link +``` + +## Coverage + +We try and make sure that each new feature or bug fix has tests to go along with them in order to keep code coverages consistent and increasing. We are actively striving for 100% code coverage! + +``` +# You can run the following command to find out coverage +$ npm run test-coverage +``` + +## Benchmarking + +We often want to know if the bug we've fixed for the feature we've added has any sort of performance impact. We've created a [benchmark suite](https://github.com/npm/benchmarks) to run against the CLI project from pull-requests. If you would like to know if there are any performance impacts to the work you're contributing, simply do the following: + +1. Make a pull-request against this repository +2. Add the following comment to the pull-request: "`test this please ✅`" + +This will trigger the [benmark suite](https://github.com/npm/benchmarks) to run against your pull-request, and when it's finished running it will post a comment on your pull-request just like bellow. You'll be able to see the results from the suite inline in your pull-request. + +> You'll notice that the bot-user will also add a 🚀 reaction to your comment to +let you know that it's sent the request to start the benchmark suite. + +![image](https://user-images.githubusercontent.com/2818462/72312698-e2e57f80-3656-11ea-9fcf-4a8f6b97b0d1.png) + +If you've updated your pull-reuqest and you'd like to run the the benchmark suite again, simple update your original comment, by adding `test this please ✅` again, or simply just adding another emoji to the **end**. _(The trigger is the phrase "test this please ✅" at the beginning of a comment. Updates will trigger as well, so long as the phrase stays at the beginning.)_. + +![image](https://user-images.githubusercontent.com/2818462/72313006-ec231c00-3657-11ea-9bd9-227634d67362.png) + +## Types of Contributions + +### Contributing an Issue? -Failure to comply with the expected responsibilities of each role, or violating the Code of Conduct will result in punitive action relative to the transgression, ranging from a warning to full removal from the project, at the discretion of npm employees. +Great!! Is your [new issue](https://github.com/npm/cli/issues/new/choose) a [bug](https://github.com/npm/cli/issues/new?template=bug.md&title=%5BBUG%5D+%3Ctitle%3E), a [feature](https://github.com/npm/cli/issues/new?template=feature.md&title=%5BFEATURE%5D+%3Ctitle%3E), or a [question](https://github.com/npm/cli/issues/new?template=question.md&title=%5BQUESTION%5D+%3Ctitle%3E)? -### Community Members +### Contributing a Question? -This includes anyone who may show up to the npm/npm repo with issues, PRs, comments etc. They may not have any other involvement with npm. +Huh? 🤔 Got a situation you're not sure about?! Perfect! We've got some resources you can use. -#### Abilities +* Our [documentation site](https://docs.npmjs.com/) +* The local docs that come with the CLI project -* Open issues and PRs -* Comment on issues and PRs + > **Example**: `npm help install --viewer browser` -#### Responsibilities +* The man pages that are built and shipped with the CLI -* Comment on issues when they have a reference to the answer. -* If community members aren't sure they are correct and don't have a reference to the answer, please leave the issue and try another one. -* Defer to collaborators and npm employees for answers. -* Make sure to search for [the troubleshooting posts on npm.community](https://npm.community/c/support/troubleshooting) and search on the issue tracker for similar issues before opening a new one. -* Any users with urgent support needs are welcome to email support@npmjs.com, and our dedicated support team will be happy to help. + > **Example**: `man npm-install` (only on linux/macOS) -PLEASE don't @ collaborators or npm employees on issues. The CLI team is small, and has many outstanding commitments to fulfill. +* Search of the [current issues](https://github.com/npm/cli/issues) -### Collaborators +### Contributing a Bug Fix? -These are folks who have the ability to label and close issues. The role of collaborators may expand over time, but for now it is a limited (& important) role. This is an excellent way to contribute to npm without writing code. +We'd be happy to triage and help! Head over to the issues and [create a new one](https://github.com/npm/cli/issues/new?template=bug.md&title=%5BBUG%5D+%3Ctitle%3E)! -Community members may become collaborators by showing consistent, proven track record of quality contributions to the project, a reasonable level of proficiency with the CLI, and regular participation through the tracker and other related mediums, including regular contact with the CLI team itself. This role entails a higher level of responsibility than community member, so we ask for a higher level of understanding and commitment. +> We'll need a little bit of information about what happened, rather than "it broke". Such as: +* When did/does this bug happen? +* Can you reproduce it? _(Can you make it happen more than once.)_ +* What version of `node`/`npm` are you running on your computer? +* What did you expect it to do? +* What did it _actually do? +* etc... -Collaborators who become inactive for 3 months or longer may have their collaborator privileges removed until they are ready to return. +### Contributing a Feature? -#### Abilities +Snazzy, we're always up for fancy new things! If the feature is fairly minor, the team can triage it and prioritize it into our backlog. However, if the feature is a little more complex, then it's best to create an [RFC](https://en.wikipedia.org/wiki/Request_for_Comments) in our [RFC repository](https://github.com/npm/rfcs). Exactly how to do that is outlined in that repository. If you're not sure _exactly_ how to implement your idea, or don't want to make a document about your idea, then please create an issue on that repository. We consider these RRFC's, or a "Requesting Request For Comment". -* Label/triage new issues -* Respond to ongoing issues -* Close resolved issues. +## Development Dependencies -#### Responsibilities +You'll need a few things installed in order to update and test the CLI project during development: -* Only answer questions when they know the answer, and provide a reference to the answer. -* If collaborators aren't totally confident about their answer, please leave the issue and try another one. -* If they've responded to an issue, it becomes their responsibility to see it to resolution. -* Defer to fellow Collaborators & npm employees for answers (Again, please don't @ collaborators or npm employees, thank you!) -* Make sure to search [the troubleshooting posts on npm.community](https://npm.community/c/support/troubleshooting) and search the rest of the forum for similar topics. +* [node](https://nodejs.org/) v8 or greater -### npm, Inc Employees +> We recommend that you have a [node version manager](https://github.com/nvm-sh/nvm) installed if you plan on fixing bugs that might be present in a specific version of node. With a version manager you can easily switch versions of node and test if your changes to the CLI project are working. -Folks who work at npm, Inc, who have a responsibility to ensure the stability and functionality of the tools npm offers. +* [git](https://git-scm.com/) v2.11+ -#### Abilities -* Label/triage new issues -* Respond to ongoing issues -* Close resolved issues -* Land PRs +## Dependencies -Please note that this is a living document, and the CLI team will put up PRs to it as needed. +> Package vendoring is commonly referred to as the case where dependent packages are stored in the same place as your project. That usually means you dependencies are checked into your source management system, such as Git. -#### Responsibilities +The CLI project vendors it's dependencies in the `node_modules/` folder. Meaning all the dependencies that the CLI project uses are contained withing the project itself. This is represented by the `bundledDependencies` section in the root level `package.json` file. The main reason for this is because the `npm` CLI project is distributed with the NodeJS runtime and needs to work out of the box, which means all dependencies need to be available after the runtime is installed. -* Preserve and promote the health of the CLI, the registry, the website, etc. +There are a couple scripts created to help manage this process in the `scripts/` folder. -In special cases, [Collaborators](#collaborators) may request time to speak with an npm employee directly, by contacting them and coordinating a time/place. diff --git a/Makefile b/Makefile index 4e00647a19cf0..dde925f1c438c 100644 --- a/Makefile +++ b/Makefile @@ -4,51 +4,25 @@ SHELL = bash PUBLISHTAG = $(shell node scripts/publish-tag.js) BRANCH = $(shell git rev-parse --abbrev-ref HEAD) -markdowns = $(shell find doc -name '*.md' | grep -v 'index') README.md +markdowns = $(shell find docs -name '*.md' | grep -v 'index') README.md -html_docdeps = html/dochead.html \ - html/docfoot.html \ - scripts/doc-build.sh \ - package.json - -cli_mandocs = $(shell find doc/cli -name '*.md' \ +cli_mandocs = $(shell find docs/content/cli-commands -name '*.md' \ |sed 's|.md|.1|g' \ - |sed 's|doc/cli/|man/man1/|g' ) \ + |sed 's|docs/content/cli-commands/|man/man1/|g' ) \ man/man1/npm-README.1 \ man/man1/npx.1 -files_mandocs = $(shell find doc/files -name '*.md' \ +files_mandocs = $(shell find docs/content/configuring-npm -name '*.md' \ |sed 's|.md|.5|g' \ - |sed 's|doc/files/|man/man5/|g' ) \ - man/man5/npm-json.5 \ - man/man5/npm-global.5 + |sed 's|docs/content/configuring-npm/|man/man5/|g' ) \ -misc_mandocs = $(shell find doc/misc -name '*.md' \ +misc_mandocs = $(shell find docs/content/using-npm -name '*.md' \ |sed 's|.md|.7|g' \ - |sed 's|doc/misc/|man/man7/|g' ) \ - man/man7/npm-index.7 - -cli_htmldocs = $(shell find doc/cli -name '*.md' \ - |sed 's|.md|.html|g' \ - |sed 's|doc/cli/|html/doc/cli/|g' ) \ - html/doc/README.html - -files_htmldocs = $(shell find doc/files -name '*.md' \ - |sed 's|.md|.html|g' \ - |sed 's|doc/files/|html/doc/files/|g' ) \ - html/doc/files/npm-json.html \ - html/doc/files/npm-global.html - -misc_htmldocs = $(shell find doc/misc -name '*.md' \ - |sed 's|.md|.html|g' \ - |sed 's|doc/misc/|html/doc/misc/|g' ) \ - html/doc/index.html + |sed 's|docs/content/using-npm/|man/man7/|g' ) \ mandocs = $(cli_mandocs) $(files_mandocs) $(misc_mandocs) -htmldocs = $(cli_htmldocs) $(files_htmldocs) $(misc_htmldocs) - -all: doc +all: docs latest: @echo "Installing latest published npm" @@ -65,14 +39,21 @@ dev: install link: uninstall node bin/npm-cli.js link -f -clean: markedclean marked-manclean doc-clean +clean: markedclean marked-manclean docs-clean rm -rf npmrc node bin/npm-cli.js cache clean --force uninstall: node bin/npm-cli.js rm npm -g -f -doc: $(mandocs) $(htmldocs) +mandocs: $(mandocs) + +htmldocs: + cd docs && node ../bin/npm-cli.js install && \ + node ../bin/npm-cli.js run build:static echo>&2 && \ + rm -rf node_modules .cache public/*js public/*json public/404* public/page-data public/manifest* + +docs: mandocs htmldocs markedclean: rm -rf node_modules/marked node_modules/.bin/marked .building_marked @@ -80,26 +61,28 @@ markedclean: marked-manclean: rm -rf node_modules/marked-man node_modules/.bin/marked-man .building_marked-man -docclean: doc-clean -doc-clean: +docsclean: docs-clean +docs-clean: rm -rf \ .building_marked \ .building_marked-man \ - html/doc \ - man + man \ + docs/node_modules \ + docs/public \ + docs/.cache ## build-time tools for the documentation build-doc-tools := node_modules/.bin/marked \ node_modules/.bin/marked-man # use `npm install marked-man` for this to work. -man/man1/npm-README.1: README.md scripts/doc-build.sh package.json $(build-doc-tools) +man/man1/npm-README.1: README.md scripts/docs-build.js package.json $(build-doc-tools) @[ -d man/man1 ] || mkdir -p man/man1 - scripts/doc-build.sh $< $@ + node scripts/docs-build.js $< $@ -man/man1/%.1: doc/cli/%.md scripts/doc-build.sh package.json $(build-doc-tools) +man/man1/%.1: docs/content/cli-commands/%.md scripts/docs-build.js package.json $(build-doc-tools) @[ -d man/man1 ] || mkdir -p man/man1 - scripts/doc-build.sh $< $@ + node scripts/docs-build.js $< $@ man/man1/npx.1: node_modules/libnpx/libnpx.1 cat $< | sed s/libnpx/npx/ > $@ @@ -107,46 +90,16 @@ man/man1/npx.1: node_modules/libnpx/libnpx.1 man/man5/npm-json.5: man/man5/package.json.5 cp $< $@ -man/man5/npm-global.5: man/man5/npm-folders.5 +man/man5/npm-global.5: man/man5/folders.5 cp $< $@ -man/man5/%.5: doc/files/%.md scripts/doc-build.sh package.json $(build-doc-tools) +man/man5/%.5: docs/content/configuring-npm/%.md scripts/docs-build.js package.json $(build-doc-tools) @[ -d man/man5 ] || mkdir -p man/man5 - scripts/doc-build.sh $< $@ - -doc/misc/npm-index.md: scripts/index-build.js package.json $(build-doc-tools) - node scripts/index-build.js > $@ + node scripts/docs-build.js $< $@ -html/doc/index.html: doc/misc/npm-index.md $(html_docdeps) $(build-doc-tools) - @[ -d html/doc ] || mkdir -p html/doc - scripts/doc-build.sh $< $@ - -man/man7/%.7: doc/misc/%.md scripts/doc-build.sh package.json $(build-doc-tools) +man/man7/%.7: docs/content/using-npm/%.md scripts/docs-build.js package.json $(build-doc-tools) @[ -d man/man7 ] || mkdir -p man/man7 - scripts/doc-build.sh $< $@ - -html/doc/README.html: README.md $(html_docdeps) $(build-doc-tools) - @[ -d html/doc ] || mkdir -p html/doc - scripts/doc-build.sh $< $@ - -html/doc/cli/%.html: doc/cli/%.md $(html_docdeps) $(build-doc-tools) - @[ -d html/doc/cli ] || mkdir -p html/doc/cli - scripts/doc-build.sh $< $@ - -html/doc/files/npm-json.html: html/doc/files/package.json.html - cp $< $@ - -html/doc/files/npm-global.html: html/doc/files/npm-folders.html - cp $< $@ - -html/doc/files/%.html: doc/files/%.md $(html_docdeps) $(build-doc-tools) - @[ -d html/doc/files ] || mkdir -p html/doc/files - scripts/doc-build.sh $< $@ - -html/doc/misc/%.html: doc/misc/%.md $(html_docdeps) $(build-doc-tools) - @[ -d html/doc/misc ] || mkdir -p html/doc/misc - scripts/doc-build.sh $< $@ - + node scripts/docs-build.js $< $@ marked: node_modules/.bin/marked @@ -158,11 +111,7 @@ marked-man: node_modules/.bin/marked-man node_modules/.bin/marked-man: node bin/npm-cli.js install marked-man --no-global --no-timing --no-save -doc: man - -man: $(cli_docs) - -test: doc +test: docs node bin/npm-cli.js test tag: @@ -174,17 +123,17 @@ ls-ok: gitclean: git clean -fd -publish: gitclean ls-ok link doc-clean doc +publish: gitclean ls-ok link docs-clean docs @git push origin :v$(shell node bin/npm-cli.js --no-timing -v) 2>&1 || true git push origin $(BRANCH) &&\ git push origin --tags &&\ node bin/npm-cli.js publish --tag=$(PUBLISHTAG) -release: gitclean ls-ok markedclean marked-manclean doc-clean doc +release: gitclean ls-ok markedclean marked-manclean docs-clean docs node bin/npm-cli.js prune --production --no-save @bash scripts/release.sh sandwich: @[ $$(whoami) = "root" ] && (echo "ok"; echo "ham" > sandwich) || (echo "make it yourself" && exit 13) -.PHONY: all latest install dev link doc clean uninstall test man doc-clean docclean release ls-ok realclean +.PHONY: all latest install dev link docs clean uninstall test man docs-clean docclean release ls-ok realclean diff --git a/README.md b/README.md index bb33879fd0118..05da169f25961 100644 --- a/README.md +++ b/README.md @@ -155,12 +155,7 @@ When you find issues, please report them: Be sure to include *all* of the output from the npm command that didn't work as expected. The `npm-debug.log` file is also helpful to provide. -You can also find npm people in `#npm` on https://package.community/ or -[on Twitter](https://twitter.com/npm_support). Whoever responds will no -doubt tell you to put the output in a gist or email. - ## SEE ALSO * npm(1) * npm-help(1) -* npm-index(7) diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 7b97e79d626ef..0000000000000 --- a/appveyor.yml +++ /dev/null @@ -1,34 +0,0 @@ -environment: - matrix: - # LTS is our most important target - - nodejs_version: "6" - # previous LTS is next most important - - nodejs_version: "4" - - nodejs_version: "7" - # then master - - nodejs_version: "8" - COVERALLS_REPO_TOKEN: - secure: XdC0aySefK0HLh1GNk6aKrzZPbCfPQLyA4mYtFGEp4DrTuZA/iuCUS0LDqFYO8JQ -platform: - - x86 - - x64 -install: - - ps: Install-Product node $env:nodejs_version $env:platform - - npm config set spin false - - npm rebuild - - node . install -g . - - set "PATH=%APPDATA%\npm;C:\Program Files\Git\mingw64\libexec;%PATH%" - - npm install --loglevel=http -test_script: - - node --version - - npm --version - - npm run test -- --reporter=classic -notifications: -- provider: Slack - incoming_webhook: - secure: vXiG5AgpqxJsXZ0N0CTYDuVrX6RMjBybZKtOx6IbRxCyjgd+DAx6Z9/0XgYQjuof7QFJY3M/U6HxaREQVYbNVHA+C5N5dNALRbKzAC8QNbA= -# GO_FAST -matrix: - fast_finish: true -# we don't need the builds, we just need tests -build: off diff --git a/bin/npm b/bin/npm index 5acd6fb61a798..4183703a7857e 100755 --- a/bin/npm +++ b/bin/npm @@ -8,6 +8,9 @@ case `uname` in esac NODE_EXE="$basedir/node.exe" +if ! [ -x "$NODE_EXE" ]; then + NODE_EXE="$basedir/node" +fi if ! [ -x "$NODE_EXE" ]; then NODE_EXE=node fi diff --git a/bin/npm-cli.js b/bin/npm-cli.js index 6f76b23828531..93eddc7a3c892 100755 --- a/bin/npm-cli.js +++ b/bin/npm-cli.js @@ -25,7 +25,6 @@ unsupported.checkForUnsupportedNode() - var path = require('path') var npm = require('../lib/npm.js') var npmconf = require('../lib/config/core.js') var errorHandler = require('../lib/utils/error-handler.js') @@ -37,7 +36,7 @@ // if npm is called as "npmg" or "npm_g", then // run in global mode. - if (path.basename(process.argv[1]).slice(-1) === 'g') { + if (process.argv[1][process.argv[1].length - 1] === 'g') { process.argv.splice(1, 1, 'npm', '-g') } @@ -63,6 +62,7 @@ log.info('using', 'node@%s', process.version) process.on('uncaughtException', errorHandler) + process.on('unhandledRejection', errorHandler) if (conf.usage && npm.command !== 'help') { npm.argv.unshift(npm.command) diff --git a/changelogs/CHANGELOG-3.md b/changelogs/CHANGELOG-3.md index e8ef464098d83..fc55a696fb22a 100644 --- a/changelogs/CHANGELOG-3.md +++ b/changelogs/CHANGELOG-3.md @@ -2864,7 +2864,7 @@ Historically, if you used a pre-release version of Node.js, you would get dozens and dozens of warnings when EVERY engine check failed across all of your modules, because `>= 0.10.0` doesn't match prereleases. -You might find this stream of redundent warnings undesirable. I do. +You might find this stream of redundant warnings undesirable. I do. We've moved this into a SINGLE warning you'll get about using a pre-release version of Node.js and now suppress those other warnings. @@ -3524,7 +3524,7 @@ it would just refuse to install anything. (We fixed that in [#10338](https://github.com/npm/npm/pull/10338) Updating a module could result in the module stealing some of its dependencies from the top level, potentially breaking other modules or - resulting in many redundent installations. This bug was first introduced + resulting in many redundant installations. This bug was first introduced by [`971fd47a`](https://github.com/npm/npm/commit/971fd47a). ([@iarna](https://github.com/iarna)) * [`5653366`](https://github.com/npm/npm/commit/5653366) diff --git a/configure b/configure index b13c8d0d73e22..f177f43a3a4da 100755 --- a/configure +++ b/configure @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # set configurations that will be "sticky" on this system, # surviving npm self-updates. diff --git a/doc/cli/npm-access.md b/doc/cli/npm-access.md deleted file mode 100644 index bbccfc70937c7..0000000000000 --- a/doc/cli/npm-access.md +++ /dev/null @@ -1,76 +0,0 @@ -npm-access(1) -- Set access level on published packages -======================================================= - -## SYNOPSIS - - npm access public [] - npm access restricted [] - - npm access grant [] - npm access revoke [] - - npm access ls-packages [||] - npm access ls-collaborators [ []] - npm access edit [] - -## DESCRIPTION - -Used to set access controls on private packages. - -For all of the subcommands, `npm access` will perform actions on the packages -in the current working directory if no package name is passed to the -subcommand. - -* public / restricted: - Set a package to be either publicly accessible or restricted. - -* grant / revoke: - Add or remove the ability of users and teams to have read-only or read-write - access to a package. - -* ls-packages: - Show all of the packages a user or a team is able to access, along with the - access level, except for read-only public packages (it won't print the whole - registry listing) - -* ls-collaborators: - Show all of the access privileges for a package. Will only show permissions - for packages to which you have at least read access. If `` is passed in, - the list is filtered only to teams _that_ user happens to belong to. - -* edit: - Set the access privileges for a package at once using `$EDITOR`. - -## DETAILS - -`npm access` always operates directly on the current registry, configurable -from the command line using `--registry=`. - -Unscoped packages are *always public*. - -Scoped packages *default to restricted*, but you can either publish them as -public using `npm publish --access=public`, or set their access as public using -`npm access public` after the initial publish. - -You must have privileges to set the access of a package: - -* You are an owner of an unscoped or scoped package. -* You are a member of the team that owns a scope. -* You have been given read-write privileges for a package, either as a member - of a team or directly as an owner. - -If you have two-factor authentication enabled then you'll have to pass in an -otp with `--otp` when making access changes. - -If your account is not paid, then attempts to publish scoped packages will fail -with an HTTP 402 status code (logically enough), unless you use -`--access=public`. - -Management of teams and team memberships is done with the `npm team` command. - -## SEE ALSO - -* npm-team(1) -* npm-publish(1) -* npm-config(7) -* npm-registry(7) diff --git a/doc/cli/npm-adduser.md b/doc/cli/npm-adduser.md deleted file mode 100644 index 72433e00f7483..0000000000000 --- a/doc/cli/npm-adduser.md +++ /dev/null @@ -1,85 +0,0 @@ -npm-adduser(1) -- Add a registry user account -============================================= - -## SYNOPSIS - - npm adduser [--registry=url] [--scope=@orgname] [--always-auth] [--auth-type=legacy] - - aliases: login, add-user - -## DESCRIPTION - -Create or verify a user named `` in the specified registry, and -save the credentials to the `.npmrc` file. If no registry is specified, -the default registry will be used (see `npm-config(7)`). - -The username, password, and email are read in from prompts. - -To reset your password, go to - -To change your email address, go to - -You may use this command multiple times with the same user account to -authorize on a new machine. When authenticating on a new machine, -the username, password and email address must all match with -your existing record. - -`npm login` is an alias to `adduser` and behaves exactly the same way. - -## CONFIGURATION - -### registry - -Default: https://registry.npmjs.org/ - -The base URL of the npm package registry. If `scope` is also specified, -this registry will only be used for packages with that scope. `scope` defaults -to the scope of the project directory you're currently in, if any. See `npm-scope(7)`. - -### scope - -Default: none - -If specified, the user and login credentials given will be associated -with the specified scope. See `npm-scope(7)`. You can use both at the same time, -e.g. - - npm adduser --registry=http://myregistry.example.com --scope=@myco - -This will set a registry for the given scope and login or create a user for -that registry at the same time. - -### always-auth - -Default: false - -If specified, save configuration indicating that all requests to the given -registry should include authorization information. Useful for private -registries. Can be used with `--registry` and / or `--scope`, e.g. - - npm adduser --registry=http://private-registry.example.com --always-auth - -This will ensure that all requests to that registry (including for tarballs) -include an authorization header. This setting may be necessary for use with -private registries where metadata and package tarballs are stored on hosts with -different hostnames. See `always-auth` in `npm-config(7)` for more details on -always-auth. Registry-specific configuration of `always-auth` takes precedence -over any global configuration. - -### auth-type - -* Default: `'legacy'` -* Type: `'legacy'`, `'sso'`, `'saml'`, `'oauth'` - -What authentication strategy to use with `adduser`/`login`. Some npm registries -(for example, npmE) might support alternative auth strategies besides classic -username/password entry in legacy npm. - -## SEE ALSO - -* npm-registry(7) -* npm-config(1) -* npm-config(7) -* npmrc(5) -* npm-owner(1) -* npm-whoami(1) diff --git a/doc/cli/npm-audit.md b/doc/cli/npm-audit.md deleted file mode 100644 index 4c6d717418ab3..0000000000000 --- a/doc/cli/npm-audit.md +++ /dev/null @@ -1,106 +0,0 @@ -npm-audit(1) -- Run a security audit -==================================== - -## SYNOPSIS - - npm audit [--json|--parseable] - npm audit fix [--force|--package-lock-only|--dry-run|--production|--only=dev] - -## EXAMPLES - -Scan your project for vulnerabilities and automatically install any compatible -updates to vulnerable dependencies: -``` -$ npm audit fix -``` - -Run `audit fix` without modifying `node_modules`, but still updating the -pkglock: -``` -$ npm audit fix --package-lock-only -``` - -Skip updating `devDependencies`: -``` -$ npm audit fix --only=prod -``` - -Have `audit fix` install semver-major updates to toplevel dependencies, not just -semver-compatible ones: -``` -$ npm audit fix --force -``` - -Do a dry run to get an idea of what `audit fix` will do, and _also_ output -install information in JSON format: -``` -$ npm audit fix --dry-run --json -``` - -Scan your project for vulnerabilities and just show the details, without fixing -anything: -``` -$ npm audit -``` - -Get the detailed audit report in JSON format: -``` -$ npm audit --json -``` - -Get the detailed audit report in plain text result, separated by tab characters, allowing for -future reuse in scripting or command line post processing, like for example, selecting -some of the columns printed: -``` -$ npm audit --parseable -``` - -To parse columns, you can use for example `awk`, and just print some of them: -``` -$ npm audit --parseable | awk -F $'\t' '{print $1,$4}' -``` - -## DESCRIPTION - -The audit command submits a description of the dependencies configured in -your project to your default registry and asks for a report of known -vulnerabilities. The report returned includes instructions on how to act on -this information. - -You can also have npm automatically fix the vulnerabilities by running `npm -audit fix`. Note that some vulnerabilities cannot be fixed automatically and -will require manual intervention or review. Also note that since `npm audit fix` -runs a full-fledged `npm install` under the hood, all configs that apply to the -installer will also apply to `npm install` -- so things like `npm audit fix ---package-lock-only` will work as expected. - -## CONTENT SUBMITTED - -* npm_version -* node_version -* platform -* node_env -* A scrubbed version of your package-lock.json or npm-shrinkwrap.json - -### SCRUBBING - -In order to ensure that potentially sensitive information is not included in -the audit data bundle, some dependencies may have their names (and sometimes -versions) replaced with opaque non-reversible identifiers. It is done for -the following dependency types: - -* Any module referencing a scope that is configured for a non-default - registry has its name scrubbed. (That is, a scope you did a `npm login --scope=@ourscope` for.) -* All git dependencies have their names and specifiers scrubbed. -* All remote tarball dependencies have their names and specifiers scrubbed. -* All local directory and tarball dependencies have their names and specifiers scrubbed. - -The non-reversible identifiers are a sha256 of a session-specific UUID and the -value being replaced, ensuring a consistent value within the payload that is -different between runs. - -## SEE ALSO - -* npm-install(1) -* package-locks(5) -* config(7) diff --git a/doc/cli/npm-bin.md b/doc/cli/npm-bin.md deleted file mode 100644 index 9b76ec529e30a..0000000000000 --- a/doc/cli/npm-bin.md +++ /dev/null @@ -1,19 +0,0 @@ -npm-bin(1) -- Display npm bin folder -==================================== - -## SYNOPSIS - - npm bin [-g|--global] - -## DESCRIPTION - -Print the folder where npm will install executables. - -## SEE ALSO - -* npm-prefix(1) -* npm-root(1) -* npm-folders(5) -* npm-config(1) -* npm-config(7) -* npmrc(5) diff --git a/doc/cli/npm-bugs.md b/doc/cli/npm-bugs.md deleted file mode 100644 index 55bce12f23fa3..0000000000000 --- a/doc/cli/npm-bugs.md +++ /dev/null @@ -1,43 +0,0 @@ -npm-bugs(1) -- Bugs for a package in a web browser maybe -======================================================== - -## SYNOPSIS - - npm bugs [] - - aliases: issues - -## DESCRIPTION - -This command tries to guess at the likely location of a package's -bug tracker URL, and then tries to open it using the `--browser` -config param. If no package name is provided, it will search for -a `package.json` in the current folder and use the `name` property. - -## CONFIGURATION - -### browser - -* Default: OS X: `"open"`, Windows: `"start"`, Others: `"xdg-open"` -* Type: String - -The browser that is called by the `npm bugs` command to open websites. - -### registry - -* Default: https://registry.npmjs.org/ -* Type: url - -The base URL of the npm package registry. - - -## SEE ALSO - -* npm-docs(1) -* npm-view(1) -* npm-publish(1) -* npm-registry(7) -* npm-config(1) -* npm-config(7) -* npmrc(5) -* package.json(5) diff --git a/doc/cli/npm-build.md b/doc/cli/npm-build.md deleted file mode 100644 index 019f225850dc0..0000000000000 --- a/doc/cli/npm-build.md +++ /dev/null @@ -1,25 +0,0 @@ -npm-build(1) -- Build a package -=============================== - -## SYNOPSIS - - npm build [] - -* ``: - A folder containing a `package.json` file in its root. - -## DESCRIPTION - -This is the plumbing command called by `npm link` and `npm install`. - -It should generally be called during installation, but if you need to run it -directly, run: - - npm run-script build - -## SEE ALSO - -* npm-install(1) -* npm-link(1) -* npm-scripts(7) -* package.json(5) diff --git a/doc/cli/npm-bundle.md b/doc/cli/npm-bundle.md deleted file mode 100644 index 69b3d83e458d7..0000000000000 --- a/doc/cli/npm-bundle.md +++ /dev/null @@ -1,14 +0,0 @@ -npm-bundle(1) -- REMOVED -======================== - -## DESCRIPTION - -The `npm bundle` command has been removed in 1.0, for the simple reason -that it is no longer necessary, as the default behavior is now to -install packages into the local space. - -Just use `npm install` now to do what `npm bundle` used to do. - -## SEE ALSO - -* npm-install(1) diff --git a/doc/cli/npm-completion.md b/doc/cli/npm-completion.md deleted file mode 100644 index 57fa3bbd38ce0..0000000000000 --- a/doc/cli/npm-completion.md +++ /dev/null @@ -1,32 +0,0 @@ -npm-completion(1) -- Tab Completion for npm -=========================================== - -## SYNOPSIS - - source <(npm completion) - -## DESCRIPTION - -Enables tab-completion in all npm commands. - -The synopsis above -loads the completions into your current shell. Adding it to -your ~/.bashrc or ~/.zshrc will make the completions available -everywhere: - - npm completion >> ~/.bashrc - npm completion >> ~/.zshrc - -You may of course also pipe the output of `npm completion` to a file -such as `/usr/local/etc/bash_completion.d/npm` or -`/etc/bash_completion.d/npm` if you have a system that will read -that file for you. - -When `COMP_CWORD`, `COMP_LINE`, and `COMP_POINT` are defined in the -environment, `npm completion` acts in "plumbing mode", and outputs -completions based on the arguments. - -## SEE ALSO - -* npm-developers(7) -* npm(1) diff --git a/doc/cli/npm-config.md b/doc/cli/npm-config.md deleted file mode 100644 index c60afc167c742..0000000000000 --- a/doc/cli/npm-config.md +++ /dev/null @@ -1,73 +0,0 @@ -npm-config(1) -- Manage the npm configuration files -=================================================== - -## SYNOPSIS - - npm config set [-g|--global] - npm config get - npm config delete - npm config list [-l] [--json] - npm config edit - npm get - npm set [-g|--global] - - aliases: c - -## DESCRIPTION - -npm gets its config settings from the command line, environment -variables, `npmrc` files, and in some cases, the `package.json` file. - -See npmrc(5) for more information about the npmrc files. - -See `npm-config(7)` for a more thorough discussion of the mechanisms -involved. - -The `npm config` command can be used to update and edit the contents -of the user and global npmrc files. - -## Sub-commands - -Config supports the following sub-commands: - -### set - - npm config set key value - -Sets the config key to the value. - -If value is omitted, then it sets it to "true". - -### get - - npm config get key - -Echo the config value to stdout. - -### list - - npm config list - -Show all the config settings. Use `-l` to also show defaults. Use `--json` -to show the settings in json format. - -### delete - - npm config delete key - -Deletes the key from all configuration files. - -### edit - - npm config edit - -Opens the config file in an editor. Use the `--global` flag to edit the -global config. - -## SEE ALSO - -* npm-folders(5) -* npm-config(7) -* package.json(5) -* npmrc(5) -* npm(1) diff --git a/doc/cli/npm-dedupe.md b/doc/cli/npm-dedupe.md deleted file mode 100644 index d68832145f0a5..0000000000000 --- a/doc/cli/npm-dedupe.md +++ /dev/null @@ -1,55 +0,0 @@ -npm-dedupe(1) -- Reduce duplication -=================================== - -## SYNOPSIS - - npm dedupe - npm ddp - - aliases: find-dupes, ddp - -## DESCRIPTION - -Searches the local package tree and attempts to simplify the overall -structure by moving dependencies further up the tree, where they can -be more effectively shared by multiple dependent packages. - -For example, consider this dependency graph: - - a - +-- b <-- depends on c@1.0.x - | `-- c@1.0.3 - `-- d <-- depends on c@~1.0.9 - `-- c@1.0.10 - -In this case, `npm-dedupe(1)` will transform the tree to: - - a - +-- b - +-- d - `-- c@1.0.10 - -Because of the hierarchical nature of node's module lookup, b and d -will both get their dependency met by the single c package at the root -level of the tree. - -The deduplication algorithm walks the tree, moving each dependency as far -up in the tree as possible, even if duplicates are not found. This will -result in both a flat and deduplicated tree. - -If a suitable version exists at the target location in the tree -already, then it will be left untouched, but the other duplicates will -be deleted. - -Arguments are ignored. Dedupe always acts on the entire tree. - -Modules - -Note that this operation transforms the dependency tree, but will never -result in new modules being installed. - -## SEE ALSO - -* npm-ls(1) -* npm-update(1) -* npm-install(1) diff --git a/doc/cli/npm-deprecate.md b/doc/cli/npm-deprecate.md deleted file mode 100644 index 3660f49ab654f..0000000000000 --- a/doc/cli/npm-deprecate.md +++ /dev/null @@ -1,28 +0,0 @@ -npm-deprecate(1) -- Deprecate a version of a package -==================================================== - -## SYNOPSIS - - npm deprecate [@] - -## DESCRIPTION - -This command will update the npm registry entry for a package, providing -a deprecation warning to all who attempt to install it. - -It works on [version ranges](https://semver.npmjs.com/) as well as specific -versions, so you can do something like this: - - npm deprecate my-thing@"< 0.2.3" "critical bug fixed in v0.2.3" - -Note that you must be the package owner to deprecate something. See the -`owner` and `adduser` help topics. - -To un-deprecate a package, specify an empty string (`""`) for the `message` -argument. Note that you must use double quotes with no space between them to -format an empty string. - -## SEE ALSO - -* npm-publish(1) -* npm-registry(7) diff --git a/doc/cli/npm-docs.md b/doc/cli/npm-docs.md deleted file mode 100644 index f5064c55e2829..0000000000000 --- a/doc/cli/npm-docs.md +++ /dev/null @@ -1,44 +0,0 @@ -npm-docs(1) -- Docs for a package in a web browser maybe -======================================================== - -## SYNOPSIS - - npm docs [ [ ...]] - npm docs . - npm home [ [ ...]] - npm home . - -## DESCRIPTION - -This command tries to guess at the likely location of a package's -documentation URL, and then tries to open it using the `--browser` -config param. You can pass multiple package names at once. If no -package name is provided, it will search for a `package.json` in -the current folder and use the `name` property. - -## CONFIGURATION - -### browser - -* Default: OS X: `"open"`, Windows: `"start"`, Others: `"xdg-open"` -* Type: String - -The browser that is called by the `npm docs` command to open websites. - -### registry - -* Default: https://registry.npmjs.org/ -* Type: url - -The base URL of the npm package registry. - - -## SEE ALSO - -* npm-view(1) -* npm-publish(1) -* npm-registry(7) -* npm-config(1) -* npm-config(7) -* npmrc(5) -* package.json(5) diff --git a/doc/cli/npm-edit.md b/doc/cli/npm-edit.md deleted file mode 100644 index f9913a015ad3b..0000000000000 --- a/doc/cli/npm-edit.md +++ /dev/null @@ -1,39 +0,0 @@ -npm-edit(1) -- Edit an installed package -======================================== - -## SYNOPSIS - - npm edit [/...] - -## DESCRIPTION - -Selects a (sub)dependency in the current -working directory and opens the package folder in the default editor -(or whatever you've configured as the npm `editor` config -- see -`npm-config(7)`.) - -After it has been edited, the package is rebuilt so as to pick up any -changes in compiled packages. - -For instance, you can do `npm install connect` to install connect -into your package, and then `npm edit connect` to make a few -changes to your locally installed copy. - -## CONFIGURATION - -### editor - -* Default: `EDITOR` environment variable if set, or `"vi"` on Posix, - or `"notepad"` on Windows. -* Type: path - -The command to run for `npm edit` or `npm config edit`. - -## SEE ALSO - -* npm-folders(5) -* npm-explore(1) -* npm-install(1) -* npm-config(1) -* npm-config(7) -* npmrc(5) diff --git a/doc/cli/npm-explore.md b/doc/cli/npm-explore.md deleted file mode 100644 index 44b77f94340d6..0000000000000 --- a/doc/cli/npm-explore.md +++ /dev/null @@ -1,39 +0,0 @@ -npm-explore(1) -- Browse an installed package -============================================= - -## SYNOPSIS - - npm explore [ -- ] - -## DESCRIPTION - -Spawn a subshell in the directory of the installed package specified. - -If a command is specified, then it is run in the subshell, which then -immediately terminates. - -This is particularly handy in the case of git submodules in the -`node_modules` folder: - - npm explore some-dependency -- git pull origin master - -Note that the package is *not* automatically rebuilt afterwards, so be -sure to use `npm rebuild ` if you make any changes. - -## CONFIGURATION - -### shell - -* Default: SHELL environment variable, or "bash" on Posix, or "cmd" on - Windows -* Type: path - -The shell to run for the `npm explore` command. - -## SEE ALSO - -* npm-folders(5) -* npm-edit(1) -* npm-rebuild(1) -* npm-build(1) -* npm-install(1) diff --git a/doc/cli/npm-help-search.md b/doc/cli/npm-help-search.md deleted file mode 100644 index 74e1011ab0067..0000000000000 --- a/doc/cli/npm-help-search.md +++ /dev/null @@ -1,34 +0,0 @@ -npm-help-search(1) -- Search npm help documentation -=================================================== - -## SYNOPSIS - - npm help-search - -## DESCRIPTION - -This command will search the npm markdown documentation files for the -terms provided, and then list the results, sorted by relevance. - -If only one result is found, then it will show that help topic. - -If the argument to `npm help` is not a known help topic, then it will -call `help-search`. It is rarely if ever necessary to call this -command directly. - -## CONFIGURATION - -### long - -* Type: Boolean -* Default: false - -If true, the "long" flag will cause help-search to output context around -where the terms were found in the documentation. - -If false, then help-search will just list out the help topics found. - -## SEE ALSO - -* npm(1) -* npm-help(1) diff --git a/doc/cli/npm-help.md b/doc/cli/npm-help.md deleted file mode 100644 index 5230082b923fa..0000000000000 --- a/doc/cli/npm-help.md +++ /dev/null @@ -1,38 +0,0 @@ -npm-help(1) -- Get help on npm -============================== - -## SYNOPSIS - - npm help [] - -## DESCRIPTION - -If supplied a topic, then show the appropriate documentation page. - -If the topic does not exist, or if multiple terms are provided, then run -the `help-search` command to find a match. Note that, if `help-search` -finds a single subject, then it will run `help` on that topic, so unique -matches are equivalent to specifying a topic name. - -## CONFIGURATION - -### viewer - -* Default: "man" on Posix, "browser" on Windows -* Type: path - -The program to use to view help content. - -Set to `"browser"` to view html help content in the default web browser. - -## SEE ALSO - -* npm(1) -* README -* npm-folders(5) -* npm-config(1) -* npm-config(7) -* npmrc(5) -* package.json(5) -* npm-help-search(1) -* npm-index(7) diff --git a/doc/cli/npm-init.md b/doc/cli/npm-init.md deleted file mode 100644 index b91bcafae8377..0000000000000 --- a/doc/cli/npm-init.md +++ /dev/null @@ -1,66 +0,0 @@ -npm-init(1) -- create a package.json file -======================================================= - -## SYNOPSIS - - npm init [--force|-f|--yes|-y|--scope] - npm init <@scope> (same as `npx <@scope>/create`) - npm init [<@scope>/] (same as `npx [<@scope>/]create-`) - -## EXAMPLES - -Create a new React-based project using [`create-react-app`](https://npm.im/create-react-app): -``` -$ npm init react-app ./my-react-app -``` - -Create a new `esm`-compatible package using [`create-esm`](https://npm.im/create-esm): -``` -$ mkdir my-esm-lib && cd my-esm-lib -$ npm init esm --yes -``` - -Generate a plain old package.json using legacy init: -``` -$ mkdir my-npm-pkg && cd my-npm-pkg -$ git init -$ npm init -``` - -Generate it without having it ask any questions: -``` -$ npm init -y -``` - -## DESCRIPTION - -`npm init ` can be used to set up a new or existing npm package. - -`initializer` in this case is an npm package named `create-`, which -will be installed by [`npx(1)`](https://npm.im/npx), and then have its main bin -executed -- presumably creating or updating `package.json` and running any other -initialization-related operations. - -The init command is transformed to a corresponding `npx` operation as follows: - -* `npm init foo` -> `npx create-foo` -* `npm init @usr/foo` -> `npx @usr/create-foo` -* `npm init @usr` -> `npx @usr/create` - -Any additional options will be passed directly to the command, so `npm init foo ---hello` will map to `npx create-foo --hello`. - -If the initializer is omitted (by just calling `npm init`), init will fall back -to legacy init behavior. It will ask you a bunch of questions, and then write a -package.json for you. It will attempt to make reasonable guesses based on -existing fields, dependencies, and options selected. It is strictly additive, so -it will keep any fields and values that were already set. You can also use -`-y`/`--yes` to skip the questionnaire altogether. If you pass `--scope`, it -will create a scoped package. - -## SEE ALSO - -* -* package.json(5) -* npm-version(1) -* npm-scope(7) diff --git a/doc/cli/npm-install-ci-test.md b/doc/cli/npm-install-ci-test.md deleted file mode 100644 index 4cbab9144e48f..0000000000000 --- a/doc/cli/npm-install-ci-test.md +++ /dev/null @@ -1,16 +0,0 @@ -# npm install-ci-test(1) -- Install a project with a clean slate and run tests - -## SYNOPSIS - - npm install-ci-test - - alias: npm cit - -## DESCRIPTION - -This command runs an `npm ci` followed immediately by an `npm test`. - -## SEE ALSO - -- npm-ci(1) -- npm-test(1) diff --git a/doc/cli/npm-install-test.md b/doc/cli/npm-install-test.md deleted file mode 100644 index 471b36f16b565..0000000000000 --- a/doc/cli/npm-install-test.md +++ /dev/null @@ -1,25 +0,0 @@ -# npm install-test(1) -- Install package(s) and run tests - -## SYNOPSIS - - npm install-test (with no args, in package dir) - npm install-test [<@scope>/] - npm install-test [<@scope>/]@ - npm install-test [<@scope>/]@ - npm install-test [<@scope>/]@ - npm install-test - npm install-test - npm install-test - - alias: npm it - common options: [--save|--save-dev|--save-optional] [--save-exact] [--dry-run] - -## DESCRIPTION - -This command runs an `npm install` followed immediately by an `npm test`. It -takes exactly the same arguments as `npm install`. - -## SEE ALSO - -- npm-install(1) -- npm-test(1) diff --git a/doc/cli/npm-install.md b/doc/cli/npm-install.md deleted file mode 100644 index 336311dbfb2bb..0000000000000 --- a/doc/cli/npm-install.md +++ /dev/null @@ -1,457 +0,0 @@ -npm-install(1) -- Install a package -=================================== - -## SYNOPSIS - - npm install (with no args, in package dir) - npm install [<@scope>/] - npm install [<@scope>/]@ - npm install [<@scope>/]@ - npm install [<@scope>/]@ - npm install :/ - npm install - npm install - npm install - npm install - - alias: npm i - common options: [-P|--save-prod|-D|--save-dev|-O|--save-optional] [-E|--save-exact] [-B|--save-bundle] [--no-save] [--dry-run] - -## DESCRIPTION - -This command installs a package, and any packages that it depends on. If the -package has a package-lock or shrinkwrap file, the installation of dependencies -will be driven by that, with an `npm-shrinkwrap.json` taking precedence if both -files exist. See package-lock.json(5) and npm-shrinkwrap(1). - -A `package` is: - -* a) a folder containing a program described by a `package.json(5)` file -* b) a gzipped tarball containing (a) -* c) a url that resolves to (b) -* d) a `@` that is published on the registry (see `npm-registry(7)`) with (c) -* e) a `@` (see `npm-dist-tag(1)`) that points to (d) -* f) a `` that has a "latest" tag satisfying (e) -* g) a `` that resolves to (a) - -Even if you never publish your package, you can still get a lot of -benefits of using npm if you just want to write a node program (a), and -perhaps if you also want to be able to easily install it elsewhere -after packing it up into a tarball (b). - - -* `npm install` (in package directory, no arguments): - - Install the dependencies in the local node_modules folder. - - In global mode (ie, with `-g` or `--global` appended to the command), - it installs the current package context (ie, the current working - directory) as a global package. - - By default, `npm install` will install all modules listed as dependencies - in `package.json(5)`. - - With the `--production` flag (or when the `NODE_ENV` environment variable - is set to `production`), npm will not install modules listed in - `devDependencies`. - - > NOTE: The `--production` flag has no particular meaning when adding a - dependency to a project. - -* `npm install `: - - Install the package in the directory as a symlink in the current project. - Its dependencies will be installed before it's linked. If `` sits - inside the root of your project, its dependencies may be hoisted to the - toplevel `node_modules` as they would for other types of dependencies. - -* `npm install `: - - Install a package that is sitting on the filesystem. Note: if you just want - to link a dev directory into your npm root, you can do this more easily by - using `npm link`. - - Tarball requirements: - * The filename *must* use `.tar`, `.tar.gz`, or `.tgz` as - the extension. - * The package contents should reside in a subfolder inside the tarball (usually it is called `package/`). npm strips one directory layer when installing the package (an equivalent of `tar x --strip-components=1` is run). - * The package must contain a `package.json` file with `name` and `version` properties. - - Example: - - npm install ./package.tgz - -* `npm install `: - - Fetch the tarball url, and then install it. In order to distinguish between - this and other options, the argument must start with "http://" or "https://" - - Example: - - npm install https://github.com/indexzero/forever/tarball/v0.5.6 - -* `npm install [<@scope>/]`: - - Do a `@` install, where `` is the "tag" config. (See - `npm-config(7)`. The config's default value is `latest`.) - - In most cases, this will install the version of the modules tagged as - `latest` on the npm registry. - - Example: - - npm install sax - - `npm install` saves any specified packages into `dependencies` by default. - Additionally, you can control where and how they get saved with some - additional flags: - - * `-P, --save-prod`: Package will appear in your `dependencies`. This is the - default unless `-D` or `-O` are present. - - * `-D, --save-dev`: Package will appear in your `devDependencies`. - - * `-O, --save-optional`: Package will appear in your `optionalDependencies`. - - * `--no-save`: Prevents saving to `dependencies`. - - When using any of the above options to save dependencies to your - package.json, there are two additional, optional flags: - - * `-E, --save-exact`: Saved dependencies will be configured with an - exact version rather than using npm's default semver range - operator. - - * `-B, --save-bundle`: Saved dependencies will also be added to your `bundleDependencies` list. - - Further, if you have an `npm-shrinkwrap.json` or `package-lock.json` then it - will be updated as well. - - `` is optional. The package will be downloaded from the registry - associated with the specified scope. If no registry is associated with - the given scope the default registry is assumed. See `npm-scope(7)`. - - Note: if you do not include the @-symbol on your scope name, npm will - interpret this as a GitHub repository instead, see below. Scopes names - must also be followed by a slash. - - Examples: - - npm install sax - npm install githubname/reponame - npm install @myorg/privatepackage - npm install node-tap --save-dev - npm install dtrace-provider --save-optional - npm install readable-stream --save-exact - npm install ansi-regex --save-bundle - - - **Note**: If there is a file or folder named `` in the current - working directory, then it will try to install that, and only try to - fetch the package by name if it is not valid. - -* `npm install [<@scope>/]@`: - - Install the version of the package that is referenced by the specified tag. - If the tag does not exist in the registry data for that package, then this - will fail. - - Example: - - npm install sax@latest - npm install @myorg/mypackage@latest - -* `npm install [<@scope>/]@`: - - Install the specified version of the package. This will fail if the - version has not been published to the registry. - - Example: - - npm install sax@0.1.1 - npm install @myorg/privatepackage@1.5.0 - -* `npm install [<@scope>/]@`: - - Install a version of the package matching the specified version range. This - will follow the same rules for resolving dependencies described in `package.json(5)`. - - Note that most version ranges must be put in quotes so that your shell will - treat it as a single argument. - - Example: - - npm install sax@">=0.1.0 <0.2.0" - npm install @myorg/privatepackage@">=0.1.0 <0.2.0" - -* `npm install `: - - Installs the package from the hosted git provider, cloning it with `git`. - For a full git remote url, only that URL will be attempted. - - ://[[:]@][:][:][/][# | #semver:] - - `` is one of `git`, `git+ssh`, `git+http`, `git+https`, or - `git+file`. - - If `#` is provided, it will be used to clone exactly that - commit. If the commit-ish has the format `#semver:`, `` can - be any valid semver range or exact version, and npm will look for any tags - or refs matching that range in the remote repository, much as it would for a - registry dependency. If neither `#` or `#semver:` is - specified, then the default branch of the repository is used. - - If the repository makes use of submodules, those submodules will be cloned - as well. - - If the package being installed contains a `prepare` script, its - `dependencies` and `devDependencies` will be installed, and the prepare - script will be run, before the package is packaged and installed. - - The following git environment variables are recognized by npm and will be - added to the environment when running git: - - * `GIT_ASKPASS` - * `GIT_EXEC_PATH` - * `GIT_PROXY_COMMAND` - * `GIT_SSH` - * `GIT_SSH_COMMAND` - * `GIT_SSL_CAINFO` - * `GIT_SSL_NO_VERIFY` - - See the git man page for details. - - Examples: - - npm install git+ssh://git@github.com:npm/cli.git#v1.0.27 - npm install git+ssh://git@github.com:npm/cli#semver:^5.0 - npm install git+https://isaacs@github.com/npm/cli.git - npm install git://github.com/npm/cli.git#v1.0.27 - GIT_SSH_COMMAND='ssh -i ~/.ssh/custom_ident' npm install git+ssh://git@github.com:npm/cli.git - -* `npm install /[#]`: -* `npm install github:/[#]`: - - Install the package at `https://github.com/githubname/githubrepo` by - attempting to clone it using `git`. - - If `#` is provided, it will be used to clone exactly that - commit. If the commit-ish has the format `#semver:`, `` can - be any valid semver range or exact version, and npm will look for any tags - or refs matching that range in the remote repository, much as it would for a - registry dependency. If neither `#` or `#semver:` is - specified, then `master` is used. - - As with regular git dependencies, `dependencies` and `devDependencies` will - be installed if the package has a `prepare` script, before the package is - done installing. - - Examples: - - npm install mygithubuser/myproject - npm install github:mygithubuser/myproject - -* `npm install gist:[/][#|#semver:]`: - - Install the package at `https://gist.github.com/gistID` by attempting to - clone it using `git`. The GitHub username associated with the gist is - optional and will not be saved in `package.json`. - - As with regular git dependencies, `dependencies` and `devDependencies` will - be installed if the package has a `prepare` script, before the package is - done installing. - - Example: - - npm install gist:101a11beef - -* `npm install bitbucket:/[#]`: - - Install the package at `https://bitbucket.org/bitbucketname/bitbucketrepo` - by attempting to clone it using `git`. - - If `#` is provided, it will be used to clone exactly that - commit. If the commit-ish has the format `#semver:`, `` can - be any valid semver range or exact version, and npm will look for any tags - or refs matching that range in the remote repository, much as it would for a - registry dependency. If neither `#` or `#semver:` is - specified, then `master` is used. - - As with regular git dependencies, `dependencies` and `devDependencies` will - be installed if the package has a `prepare` script, before the package is - done installing. - - Example: - - npm install bitbucket:mybitbucketuser/myproject - -* `npm install gitlab:/[#]`: - - Install the package at `https://gitlab.com/gitlabname/gitlabrepo` - by attempting to clone it using `git`. - - If `#` is provided, it will be used to clone exactly that - commit. If the commit-ish has the format `#semver:`, `` can - be any valid semver range or exact version, and npm will look for any tags - or refs matching that range in the remote repository, much as it would for a - registry dependency. If neither `#` or `#semver:` is - specified, then `master` is used. - - As with regular git dependencies, `dependencies` and `devDependencies` will - be installed if the package has a `prepare` script, before the package is - done installing. - - Example: - - npm install gitlab:mygitlabuser/myproject - npm install gitlab:myusr/myproj#semver:^5.0 - -You may combine multiple arguments, and even multiple types of arguments. -For example: - - npm install sax@">=0.1.0 <0.2.0" bench supervisor - -The `--tag` argument will apply to all of the specified install targets. If a -tag with the given name exists, the tagged version is preferred over newer -versions. - -The `--dry-run` argument will report in the usual way what the install would -have done without actually installing anything. - -The `--package-lock-only` argument will only update the `package-lock.json`, -instead of checking `node_modules` and downloading dependencies. - -The `-f` or `--force` argument will force npm to fetch remote resources even if a -local copy exists on disk. - - npm install sax --force - -The `-g` or `--global` argument will cause npm to install the package globally -rather than locally. See `npm-folders(5)`. - -The `--global-style` argument will cause npm to install the package into -your local `node_modules` folder with the same layout it uses with the -global `node_modules` folder. Only your direct dependencies will show in -`node_modules` and everything they depend on will be flattened in their -`node_modules` folders. This obviously will eliminate some deduping. - -The `--ignore-scripts` argument will cause npm to not execute any -scripts defined in the package.json. See `npm-scripts(7)`. - -The `--legacy-bundling` argument will cause npm to install the package such -that versions of npm prior to 1.4, such as the one included with node 0.8, -can install the package. This eliminates all automatic deduping. - -The `--link` argument will cause npm to link global installs into the -local space in some cases. - -The `--no-bin-links` argument will prevent npm from creating symlinks for -any binaries the package might contain. - -The `--no-optional` argument will prevent optional dependencies from -being installed. - -The `--no-shrinkwrap` argument, which will ignore an available -package lock or shrinkwrap file and use the package.json instead. - -The `--no-package-lock` argument will prevent npm from creating a -`package-lock.json` file. When running with package-lock's disabled npm -will not automatically prune your node modules when installing. - -The `--nodedir=/path/to/node/source` argument will allow npm to find the -node source code so that npm can compile native modules. - -The `--only={prod[uction]|dev[elopment]}` argument will cause either only -`devDependencies` or only non-`devDependencies` to be installed regardless of the `NODE_ENV`. - -The `--no-audit` argument can be used to disable sending of audit reports to -the configured registries. See `npm-audit(1)` for details on what is sent. - -See `npm-config(7)`. Many of the configuration params have some -effect on installation, since that's most of what npm does. - -## ALGORITHM - -To install a package, npm uses the following algorithm: - - load the existing node_modules tree from disk - clone the tree - fetch the package.json and assorted metadata and add it to the clone - walk the clone and add any missing dependencies - dependencies will be added as close to the top as is possible - without breaking any other modules - compare the original tree with the cloned tree and make a list of - actions to take to convert one to the other - execute all of the actions, deepest first - kinds of actions are install, update, remove and move - -For this `package{dep}` structure: `A{B,C}, B{C}, C{D}`, -this algorithm produces: - - A - +-- B - +-- C - +-- D - -That is, the dependency from B to C is satisfied by the fact that A -already caused C to be installed at a higher level. D is still installed -at the top level because nothing conflicts with it. - -For `A{B,C}, B{C,D@1}, C{D@2}`, this algorithm produces: - - A - +-- B - +-- C - `-- D@2 - +-- D@1 - -Because B's D@1 will be installed in the top level, C now has to install D@2 -privately for itself. This algorithm is deterministic, but different trees may -be produced if two dependencies are requested for installation in a different -order. - -See npm-folders(5) for a more detailed description of the specific -folder structures that npm creates. - -### Limitations of npm's Install Algorithm - -npm will refuse to install any package with an identical name to the -current package. This can be overridden with the `--force` flag, but in -most cases can simply be addressed by changing the local package name. - -There are some very rare and pathological edge-cases where a cycle can -cause npm to try to install a never-ending tree of packages. Here is -the simplest case: - - A -> B -> A' -> B' -> A -> B -> A' -> B' -> A -> ... - -where `A` is some version of a package, and `A'` is a different version -of the same package. Because `B` depends on a different version of `A` -than the one that is already in the tree, it must install a separate -copy. The same is true of `A'`, which must install `B'`. Because `B'` -depends on the original version of `A`, which has been overridden, the -cycle falls into infinite regress. - -To avoid this situation, npm flat-out refuses to install any -`name@version` that is already present anywhere in the tree of package -folder ancestors. A more correct, but more complex, solution would be -to symlink the existing version into the new location. If this ever -affects a real use-case, it will be investigated. - -## SEE ALSO - -* npm-folders(5) -* npm-update(1) -* npm-audit(1) -* npm-link(1) -* npm-rebuild(1) -* npm-scripts(7) -* npm-build(1) -* npm-config(1) -* npm-config(7) -* npmrc(5) -* npm-registry(7) -* npm-dist-tag(1) -* npm-uninstall(1) -* npm-shrinkwrap(1) -* package.json(5) diff --git a/doc/cli/npm-link.md b/doc/cli/npm-link.md deleted file mode 100644 index 3f6dc6e5bde7e..0000000000000 --- a/doc/cli/npm-link.md +++ /dev/null @@ -1,77 +0,0 @@ -npm-link(1) -- Symlink a package folder -======================================= - -## SYNOPSIS - - npm link (in package dir) - npm link [<@scope>/][@] - - alias: npm ln - -## DESCRIPTION - -Package linking is a two-step process. - -First, `npm link` in a package folder will create a symlink in the global folder -`{prefix}/lib/node_modules/` that links to the package where the `npm -link` command was executed. (see `npm-config(7)` for the value of `prefix`). It -will also link any bins in the package to `{prefix}/bin/{name}`. - -Next, in some other location, `npm link package-name` will create a -symbolic link from globally-installed `package-name` to `node_modules/` -of the current folder. - -Note that `package-name` is taken from `package.json`, -not from directory name. - -The package name can be optionally prefixed with a scope. See `npm-scope(7)`. -The scope must be preceded by an @-symbol and followed by a slash. - -When creating tarballs for `npm publish`, the linked packages are -"snapshotted" to their current state by resolving the symbolic links. - -This is handy for installing your own stuff, so that you can work on it and -test it iteratively without having to continually rebuild. - -For example: - - cd ~/projects/node-redis # go into the package directory - npm link # creates global link - cd ~/projects/node-bloggy # go into some other package directory. - npm link redis # link-install the package - -Now, any changes to ~/projects/node-redis will be reflected in -~/projects/node-bloggy/node_modules/node-redis/. Note that the link should -be to the package name, not the directory name for that package. - -You may also shortcut the two steps in one. For example, to do the -above use-case in a shorter way: - - cd ~/projects/node-bloggy # go into the dir of your main project - npm link ../node-redis # link the dir of your dependency - -The second line is the equivalent of doing: - - (cd ../node-redis; npm link) - npm link redis - -That is, it first creates a global link, and then links the global -installation target into your project's `node_modules` folder. - -Note that in this case, you are referring to the directory name, `node-redis`, -rather than the package name `redis`. - -If your linked package is scoped (see `npm-scope(7)`) your link command must -include that scope, e.g. - - npm link @myorg/privatepackage - -## SEE ALSO - -* npm-developers(7) -* package.json(5) -* npm-install(1) -* npm-folders(5) -* npm-config(1) -* npm-config(7) -* npmrc(5) diff --git a/doc/cli/npm-logout.md b/doc/cli/npm-logout.md deleted file mode 100644 index fe6e7b990a289..0000000000000 --- a/doc/cli/npm-logout.md +++ /dev/null @@ -1,45 +0,0 @@ -npm-logout(1) -- Log out of the registry -======================================== - -## SYNOPSIS - - npm logout [--registry=] [--scope=<@scope>] - -## DESCRIPTION - -When logged into a registry that supports token-based authentication, tell the -server to end this token's session. This will invalidate the token everywhere -you're using it, not just for the current environment. - -When logged into a legacy registry that uses username and password authentication, this will -clear the credentials in your user configuration. In this case, it will _only_ affect -the current environment. - -If `--scope` is provided, this will find the credentials for the registry -connected to that scope, if set. - -## CONFIGURATION - -### registry - -Default: https://registry.npmjs.org/ - -The base URL of the npm package registry. If `scope` is also specified, -it takes precedence. - -### scope - -Default: The scope of your current project, if any, otherwise none. - -If specified, you will be logged out of the specified scope. See `npm-scope(7)`. - - npm logout --scope=@myco - -## SEE ALSO - -* npm-adduser(1) -* npm-registry(7) -* npm-config(1) -* npm-config(7) -* npmrc(5) -* npm-whoami(1) diff --git a/doc/cli/npm-ls.md b/doc/cli/npm-ls.md deleted file mode 100644 index 7b10a19d69b2c..0000000000000 --- a/doc/cli/npm-ls.md +++ /dev/null @@ -1,111 +0,0 @@ -npm-ls(1) -- List installed packages -====================================== - -## SYNOPSIS - - npm ls [[<@scope>/] ...] - - aliases: list, la, ll - -## DESCRIPTION - -This command will print to stdout all the versions of packages that are -installed, as well as their dependencies, in a tree-structure. - -Positional arguments are `name@version-range` identifiers, which will -limit the results to only the paths to the packages named. Note that -nested packages will *also* show the paths to the specified packages. -For example, running `npm ls promzard` in npm's source tree will show: - - npm@@VERSION@ /path/to/npm - └─┬ init-package-json@0.0.4 - └── promzard@0.1.5 - -It will print out extraneous, missing, and invalid packages. - -If a project specifies git urls for dependencies these are shown -in parentheses after the name@version to make it easier for users to -recognize potential forks of a project. - -The tree shown is the logical dependency tree, based on package -dependencies, not the physical layout of your node_modules folder. - -When run as `ll` or `la`, it shows extended information by default. - -## CONFIGURATION - -### json - -* Default: false -* Type: Boolean - -Show information in JSON format. - -### long - -* Default: false -* Type: Boolean - -Show extended information. - -### parseable - -* Default: false -* Type: Boolean - -Show parseable output instead of tree view. - -### global - -* Default: false -* Type: Boolean - -List packages in the global install prefix instead of in the current -project. - -### depth - -* Type: Int - -Max display depth of the dependency tree. - -### prod / production - -* Type: Boolean -* Default: false - -Display only the dependency tree for packages in `dependencies`. - -### dev / development - -* Type: Boolean -* Default: false - -Display only the dependency tree for packages in `devDependencies`. - -### only - -* Type: String - -When "dev" or "development", is an alias to `dev`. - -When "prod" or "production", is an alias to `production`. - -### link - -* Type: Boolean -* Default: false - -Display only dependencies which are linked - -## SEE ALSO - -* npm-config(1) -* npm-config(7) -* npmrc(5) -* npm-folders(5) -* npm-install(1) -* npm-link(1) -* npm-prune(1) -* npm-outdated(1) -* npm-update(1) diff --git a/doc/cli/npm-owner.md b/doc/cli/npm-owner.md deleted file mode 100644 index 94010298c18ef..0000000000000 --- a/doc/cli/npm-owner.md +++ /dev/null @@ -1,39 +0,0 @@ -npm-owner(1) -- Manage package owners -===================================== - -## SYNOPSIS - - npm owner add [<@scope>/] - npm owner rm [<@scope>/] - npm owner ls [<@scope>/] - - aliases: author - -## DESCRIPTION - -Manage ownership of published packages. - -* ls: - List all the users who have access to modify a package and push new versions. - Handy when you need to know who to bug for help. -* add: - Add a new user as a maintainer of a package. This user is enabled to modify - metadata, publish new versions, and add other owners. -* rm: - Remove a user from the package owner list. This immediately revokes their - privileges. - -Note that there is only one level of access. Either you can modify a package, -or you can't. Future versions may contain more fine-grained access levels, but -that is not implemented at this time. - -If you have two-factor authentication enabled with `auth-and-writes` then -you'll need to include an otp on the command line when changing ownership -with `--otp`. - -## SEE ALSO - -* npm-publish(1) -* npm-registry(7) -* npm-adduser(1) -* npm-disputes(7) diff --git a/doc/cli/npm-pack.md b/doc/cli/npm-pack.md deleted file mode 100644 index 807663ac210fe..0000000000000 --- a/doc/cli/npm-pack.md +++ /dev/null @@ -1,30 +0,0 @@ -npm-pack(1) -- Create a tarball from a package -============================================== - -## SYNOPSIS - - npm pack [[<@scope>/]...] [--dry-run] - -## DESCRIPTION - -For anything that's installable (that is, a package folder, tarball, -tarball url, name@tag, name@version, name, or scoped name), this -command will fetch it to the cache, and then copy the tarball to the -current working directory as `-.tgz`, and then write -the filenames out to stdout. - -If the same package is specified multiple times, then the file will be -overwritten the second time. - -If no arguments are supplied, then npm packs the current package folder. - -The `--dry-run` argument will do everything that pack usually does without -actually packing anything. Reports on what would have gone into the tarball. - -## SEE ALSO - -* npm-cache(1) -* npm-publish(1) -* npm-config(1) -* npm-config(7) -* npmrc(5) diff --git a/doc/cli/npm-ping.md b/doc/cli/npm-ping.md deleted file mode 100644 index 3b2da9944bb24..0000000000000 --- a/doc/cli/npm-ping.md +++ /dev/null @@ -1,24 +0,0 @@ -npm-ping(1) -- Ping npm registry -================================ - -## SYNOPSIS - - npm ping [--registry ] - -## DESCRIPTION - -Ping the configured or given npm registry and verify authentication. -If it works it will output something like: -``` -Ping success: {*Details about registry*} -``` -otherwise you will get: -``` -Ping error: {*Detail about error} -``` - -## SEE ALSO - -* npm-config(1) -* npm-config(7) -* npmrc(5) diff --git a/doc/cli/npm-prefix.md b/doc/cli/npm-prefix.md deleted file mode 100644 index f262a36a752b5..0000000000000 --- a/doc/cli/npm-prefix.md +++ /dev/null @@ -1,23 +0,0 @@ -npm-prefix(1) -- Display prefix -=============================== - -## SYNOPSIS - - npm prefix [-g] - -## DESCRIPTION - -Print the local prefix to standard out. This is the closest parent directory -to contain a package.json file unless `-g` is also specified. - -If `-g` is specified, this will be the value of the global prefix. See -`npm-config(7)` for more detail. - -## SEE ALSO - -* npm-root(1) -* npm-bin(1) -* npm-folders(5) -* npm-config(1) -* npm-config(7) -* npmrc(5) diff --git a/doc/cli/npm-prune.md b/doc/cli/npm-prune.md deleted file mode 100644 index 0dde244251122..0000000000000 --- a/doc/cli/npm-prune.md +++ /dev/null @@ -1,38 +0,0 @@ -npm-prune(1) -- Remove extraneous packages -========================================== - -## SYNOPSIS - - npm prune [[<@scope>/]...] [--production] [--dry-run] [--json] - -## DESCRIPTION - -This command removes "extraneous" packages. If a package name is -provided, then only packages matching one of the supplied names are -removed. - -Extraneous packages are packages that are not listed on the parent -package's dependencies list. - -If the `--production` flag is specified or the `NODE_ENV` environment -variable is set to `production`, this command will remove the packages -specified in your `devDependencies`. Setting `--no-production` will -negate `NODE_ENV` being set to `production`. - -If the `--dry-run` flag is used then no changes will actually be made. - -If the `--json` flag is used then the changes `npm prune` made (or would -have made with `--dry-run`) are printed as a JSON object. - -In normal operation with package-locks enabled, extraneous modules are -pruned automatically when modules are installed and you'll only need -this command with the `--production` flag. - -If you've disabled package-locks then extraneous modules will not be removed -and it's up to you to run `npm prune` from time-to-time to remove them. - -## SEE ALSO - -* npm-uninstall(1) -* npm-folders(5) -* npm-ls(1) diff --git a/doc/cli/npm-publish.md b/doc/cli/npm-publish.md deleted file mode 100644 index c582ad8470c2a..0000000000000 --- a/doc/cli/npm-publish.md +++ /dev/null @@ -1,76 +0,0 @@ -npm-publish(1) -- Publish a package -=================================== - - -## SYNOPSIS - - npm publish [|] [--tag ] [--access ] [--otp otpcode] [--dry-run] - - Publishes '.' if no argument supplied - Sets tag 'latest' if no --tag specified - -## DESCRIPTION - -Publishes a package to the registry so that it can be installed by name. All -files in the package directory are included if no local `.gitignore` or -`.npmignore` file exists. If both files exist and a file is ignored by -`.gitignore` but not by `.npmignore` then it will be included. See -`npm-developers(7)` for full details on what's included in the published -package, as well as details on how the package is built. - -By default npm will publish to the public registry. This can be overridden by -specifying a different default registry or using a `npm-scope(7)` in the name -(see `package.json(5)`). - -* ``: - A folder containing a package.json file - -* ``: - A url or file path to a gzipped tar archive containing a single folder - with a package.json file inside. - -* `[--tag ]` - Registers the published package with the given tag, such that `npm install - @` will install this version. By default, `npm publish` updates - and `npm install` installs the `latest` tag. See `npm-dist-tag(1)` for - details about tags. - -* `[--access ]` - Tells the registry whether this package should be published as public or - restricted. Only applies to scoped packages, which default to `restricted`. - If you don't have a paid account, you must publish with `--access public` - to publish scoped packages. - -* `[--otp ]` - If you have two-factor authentication enabled in `auth-and-writes` mode - then you can provide a code from your authenticator with this. If you - don't include this and you're running from a TTY then you'll be prompted. - -* `[--dry-run]` - As of `npm@6`, does everything publish would do except actually publishing - to the registry. Reports the details of what would have been published. - -Fails if the package name and version combination already exists in -the specified registry. - -Once a package is published with a given name and version, that -specific name and version combination can never be used again, even if -it is removed with npm-unpublish(1). - -As of `npm@5`, both a sha1sum and an integrity field with a sha512sum of the -tarball will be submitted to the registry during publication. Subsequent -installs will use the strongest supported algorithm to verify downloads. - -Similar to `--dry-run` see `npm-pack(1)`, which figures out the files to be -included and packs them into a tarball to be uploaded to the registry. - -## SEE ALSO - -* npm-registry(7) -* npm-scope(7) -* npm-adduser(1) -* npm-owner(1) -* npm-deprecate(1) -* npm-dist-tag(1) -* npm-pack(1) -* npm-profile(1) diff --git a/doc/cli/npm-rebuild.md b/doc/cli/npm-rebuild.md deleted file mode 100644 index 437737d9f4b56..0000000000000 --- a/doc/cli/npm-rebuild.md +++ /dev/null @@ -1,19 +0,0 @@ -npm-rebuild(1) -- Rebuild a package -=================================== - -## SYNOPSIS - - npm rebuild [[<@scope>/]...] - - alias: npm rb - -## DESCRIPTION - -This command runs the `npm build` command on the matched folders. This is useful -when you install a new version of node, and must recompile all your C++ addons with -the new binary. - -## SEE ALSO - -* npm-build(1) -* npm-install(1) diff --git a/doc/cli/npm-repo.md b/doc/cli/npm-repo.md deleted file mode 100644 index 523e135e8cc31..0000000000000 --- a/doc/cli/npm-repo.md +++ /dev/null @@ -1,27 +0,0 @@ -npm-repo(1) -- Open package repository page in the browser -======================================================== - -## SYNOPSIS - - npm repo [] - -## DESCRIPTION - -This command tries to guess at the likely location of a package's -repository URL, and then tries to open it using the `--browser` -config param. If no package name is provided, it will search for -a `package.json` in the current folder and use the `name` property. - -## CONFIGURATION - -### browser - -* Default: OS X: `"open"`, Windows: `"start"`, Others: `"xdg-open"` -* Type: String - -The browser that is called by the `npm repo` command to open websites. - -## SEE ALSO - -* npm-docs(1) -* npm-config(1) diff --git a/doc/cli/npm-restart.md b/doc/cli/npm-restart.md deleted file mode 100644 index 1aa0c57a6829e..0000000000000 --- a/doc/cli/npm-restart.md +++ /dev/null @@ -1,40 +0,0 @@ -npm-restart(1) -- Restart a package -=================================== - -## SYNOPSIS - - npm restart [-- ] - -## DESCRIPTION - -This restarts a package. - -This runs a package's "stop", "restart", and "start" scripts, and associated -pre- and post- scripts, in the order given below: - -1. prerestart -2. prestop -3. stop -4. poststop -5. restart -6. prestart -7. start -8. poststart -9. postrestart - -## NOTE - -Note that the "restart" script is run **in addition to** the "stop" -and "start" scripts, not instead of them. - -This is the behavior as of `npm` major version 2. A change in this -behavior will be accompanied by an increase in major version number - -## SEE ALSO - -* npm-run-script(1) -* npm-scripts(7) -* npm-test(1) -* npm-start(1) -* npm-stop(1) -* npm-restart(3) \ No newline at end of file diff --git a/doc/cli/npm-root.md b/doc/cli/npm-root.md deleted file mode 100644 index a1d5bf8629913..0000000000000 --- a/doc/cli/npm-root.md +++ /dev/null @@ -1,19 +0,0 @@ -npm-root(1) -- Display npm root -=============================== - -## SYNOPSIS - - npm root [-g] - -## DESCRIPTION - -Print the effective `node_modules` folder to standard out. - -## SEE ALSO - -* npm-prefix(1) -* npm-bin(1) -* npm-folders(5) -* npm-config(1) -* npm-config(7) -* npmrc(5) diff --git a/doc/cli/npm-shrinkwrap.md b/doc/cli/npm-shrinkwrap.md deleted file mode 100644 index 4c223a86cc1b7..0000000000000 --- a/doc/cli/npm-shrinkwrap.md +++ /dev/null @@ -1,25 +0,0 @@ -npm-shrinkwrap(1) -- Lock down dependency versions for publication -===================================================== - -## SYNOPSIS - - npm shrinkwrap - -## DESCRIPTION - -This command repurposes `package-lock.json` into a publishable -`npm-shrinkwrap.json` or simply creates a new one. The file created and updated -by this command will then take precedence over any other existing or future -`package-lock.json` files. For a detailed explanation of the design and purpose -of package locks in npm, see npm-package-locks(5). - -## SEE ALSO - -* npm-install(1) -* npm-run-script(1) -* npm-scripts(7) -* package.json(5) -* npm-package-locks(5) -* package-lock.json(5) -* npm-shrinkwrap.json(5) -* npm-ls(1) diff --git a/doc/cli/npm-star.md b/doc/cli/npm-star.md deleted file mode 100644 index 87d90b560c202..0000000000000 --- a/doc/cli/npm-star.md +++ /dev/null @@ -1,22 +0,0 @@ -npm-star(1) -- Mark your favorite packages -========================================== - -## SYNOPSIS - - npm star [...] - npm unstar [...] - -## DESCRIPTION - -"Starring" a package means that you have some interest in it. It's -a vaguely positive way to show that you care. - -"Unstarring" is the same thing, but in reverse. - -It's a boolean thing. Starring repeatedly has no additional effect. - -## SEE ALSO - -* npm-view(1) -* npm-whoami(1) -* npm-adduser(1) diff --git a/doc/cli/npm-stars.md b/doc/cli/npm-stars.md deleted file mode 100644 index 1e225be29f3ed..0000000000000 --- a/doc/cli/npm-stars.md +++ /dev/null @@ -1,21 +0,0 @@ -npm-stars(1) -- View packages marked as favorites -================================================= - -## SYNOPSIS - - npm stars [] - -## DESCRIPTION - -If you have starred a lot of neat things and want to find them again -quickly this command lets you do just that. - -You may also want to see your friend's favorite packages, in this case -you will most certainly enjoy this command. - -## SEE ALSO - -* npm-star(1) -* npm-view(1) -* npm-whoami(1) -* npm-adduser(1) diff --git a/doc/cli/npm-start.md b/doc/cli/npm-start.md deleted file mode 100644 index e43f02149935a..0000000000000 --- a/doc/cli/npm-start.md +++ /dev/null @@ -1,24 +0,0 @@ -npm-start(1) -- Start a package -=============================== - -## SYNOPSIS - - npm start [-- ] - -## DESCRIPTION - -This runs an arbitrary command specified in the package's `"start"` property of -its `"scripts"` object. If no `"start"` property is specified on the -`"scripts"` object, it will run `node server.js`. - -As of [`npm@2.0.0`](https://blog.npmjs.org/post/98131109725/npm-2-0-0), you can -use custom arguments when executing scripts. Refer to npm-run-script(1) for -more details. - -## SEE ALSO - -* npm-run-script(1) -* npm-scripts(7) -* npm-test(1) -* npm-restart(1) -* npm-stop(1) diff --git a/doc/cli/npm-stop.md b/doc/cli/npm-stop.md deleted file mode 100644 index 92b14b417962b..0000000000000 --- a/doc/cli/npm-stop.md +++ /dev/null @@ -1,18 +0,0 @@ -npm-stop(1) -- Stop a package -============================= - -## SYNOPSIS - - npm stop [-- ] - -## DESCRIPTION - -This runs a package's "stop" script, if one was provided. - -## SEE ALSO - -* npm-run-script(1) -* npm-scripts(7) -* npm-test(1) -* npm-start(1) -* npm-restart(1) diff --git a/doc/cli/npm-team.md b/doc/cli/npm-team.md deleted file mode 100644 index 9e01a451c7945..0000000000000 --- a/doc/cli/npm-team.md +++ /dev/null @@ -1,58 +0,0 @@ -npm-team(1) -- Manage organization teams and team memberships -============================================================= - -## SYNOPSIS - - npm team create - npm team destroy - - npm team add - npm team rm - - npm team ls | - - npm team edit - -## DESCRIPTION - -Used to manage teams in organizations, and change team memberships. Does not -handle permissions for packages. - -Teams must always be fully qualified with the organization/scope they belong to -when operating on them, separated by a colon (`:`). That is, if you have a -`developers` team on a `foo` organization, you must always refer to that team as -`foo:developers` in these commands. - -* create / destroy: - Create a new team, or destroy an existing one. - -* add / rm: - Add a user to an existing team, or remove a user from a team they belong to. - -* ls: - If performed on an organization name, will return a list of existing teams - under that organization. If performed on a team, it will instead return a list - of all users belonging to that particular team. - -* edit: - Edit a current team. - -## DETAILS - -`npm team` always operates directly on the current registry, configurable from -the command line using `--registry=`. - -In order to create teams and manage team membership, you must be a *team admin* -under the given organization. Listing teams and team memberships may be done by -any member of the organizations. - -Organization creation and management of team admins and *organization* members -is done through the website, not the npm CLI. - -To use teams to manage permissions on packages belonging to your organization, -use the `npm access` command to grant or revoke the appropriate permissions. - -## SEE ALSO - -* npm-access(1) -* npm-registry(7) diff --git a/doc/cli/npm-test.md b/doc/cli/npm-test.md deleted file mode 100644 index 8a379e2efd098..0000000000000 --- a/doc/cli/npm-test.md +++ /dev/null @@ -1,20 +0,0 @@ -npm-test(1) -- Test a package -============================= - -## SYNOPSIS - - npm test [-- ] - - aliases: t, tst - -## DESCRIPTION - -This runs a package's "test" script, if one was provided. - -## SEE ALSO - -* npm-run-script(1) -* npm-scripts(7) -* npm-start(1) -* npm-restart(1) -* npm-stop(1) diff --git a/doc/cli/npm-uninstall.md b/doc/cli/npm-uninstall.md deleted file mode 100644 index 38302b20d61fc..0000000000000 --- a/doc/cli/npm-uninstall.md +++ /dev/null @@ -1,53 +0,0 @@ -npm-uninstall(1) -- Remove a package -============================= - -## SYNOPSIS - - npm uninstall [<@scope>/][@]... [-S|--save|-D|--save-dev|-O|--save-optional|--no-save] - - aliases: remove, rm, r, un, unlink - -## DESCRIPTION - -This uninstalls a package, completely removing everything npm installed -on its behalf. - -Example: - - npm uninstall sax - -In global mode (ie, with `-g` or `--global` appended to the command), -it uninstalls the current package context as a global package. - -`npm uninstall` takes 3 exclusive, optional flags which save or update -the package version in your main package.json: - -* `-S, --save`: Package will be removed from your `dependencies`. - -* `-D, --save-dev`: Package will be removed from your `devDependencies`. - -* `-O, --save-optional`: Package will be removed from your `optionalDependencies`. - -* `--no-save`: Package will not be removed from your `package.json` file. - -Further, if you have an `npm-shrinkwrap.json` then it will be updated as -well. - -Scope is optional and follows the usual rules for `npm-scope(7)`. - -Examples: - - npm uninstall sax --save - npm uninstall @myorg/privatepackage --save - npm uninstall node-tap --save-dev - npm uninstall dtrace-provider --save-optional - npm uninstall lodash --no-save - -## SEE ALSO - -* npm-prune(1) -* npm-install(1) -* npm-folders(5) -* npm-config(1) -* npm-config(7) -* npmrc(5) diff --git a/doc/cli/npm-unpublish.md b/doc/cli/npm-unpublish.md deleted file mode 100644 index b5b02154e9309..0000000000000 --- a/doc/cli/npm-unpublish.md +++ /dev/null @@ -1,45 +0,0 @@ -npm-unpublish(1) -- Remove a package from the registry -====================================================== - -## SYNOPSIS - - npm unpublish [<@scope>/][@] - -## WARNING - -**It is generally considered bad behavior to remove versions of a library -that others are depending on!** - -Consider using the `deprecate` command -instead, if your intent is to encourage users to upgrade. - -There is plenty of room on the registry. - -## DESCRIPTION - -This removes a package version from the registry, deleting its -entry and removing the tarball. - -If no version is specified, or if all versions are removed then -the root package entry is removed from the registry entirely. - -Even if a package version is unpublished, that specific name and -version combination can never be reused. In order to publish the -package again, a new version number must be used. Additionally, -new versions of packages with every version unpublished may not -be republished until 24 hours have passed. - -With the default registry (`registry.npmjs.org`), unpublish is -only allowed with versions published in the last 72 hours. If you -are trying to unpublish a version published longer ago than that, -contact support@npmjs.com. - -The scope is optional and follows the usual rules for `npm-scope(7)`. - -## SEE ALSO - -* npm-deprecate(1) -* npm-publish(1) -* npm-registry(7) -* npm-adduser(1) -* npm-owner(1) diff --git a/doc/cli/npm-view.md b/doc/cli/npm-view.md deleted file mode 100644 index 35e42adf9af8f..0000000000000 --- a/doc/cli/npm-view.md +++ /dev/null @@ -1,96 +0,0 @@ -npm-view(1) -- View registry info -================================= - -## SYNOPSIS - - npm view [<@scope>/][@] [[.]...] - - aliases: info, show, v - -## DESCRIPTION - -This command shows data about a package and prints it to the stream -referenced by the `outfd` config, which defaults to stdout. - -To show the package registry entry for the `connect` package, you can do -this: - - npm view connect - -The default version is "latest" if unspecified. - -Field names can be specified after the package descriptor. -For example, to show the dependencies of the `ronn` package at version -0.3.5, you could do the following: - - npm view ronn@0.3.5 dependencies - -You can view child fields by separating them with a period. -To view the git repository URL for the latest version of npm, you could -do this: - - npm view npm repository.url - -This makes it easy to view information about a dependency with a bit of -shell scripting. For example, to view all the data about the version of -opts that ronn depends on, you can do this: - - npm view opts@$(npm view ronn dependencies.opts) - -For fields that are arrays, requesting a non-numeric field will return -all of the values from the objects in the list. For example, to get all -the contributor names for the "express" project, you can do this: - - npm view express contributors.email - -You may also use numeric indices in square braces to specifically select -an item in an array field. To just get the email address of the first -contributor in the list, you can do this: - - npm view express contributors[0].email - -Multiple fields may be specified, and will be printed one after another. -For example, to get all the contributor names and email addresses, you -can do this: - - npm view express contributors.name contributors.email - -"Person" fields are shown as a string if they would be shown as an -object. So, for example, this will show the list of npm contributors in -the shortened string format. (See `package.json(5)` for more on this.) - - npm view npm contributors - -If a version range is provided, then data will be printed for every -matching version of the package. This will show which version of jsdom -was required by each matching version of yui3: - - npm view yui3@'>0.5.4' dependencies.jsdom - -To show the `connect` package version history, you can do -this: - - npm view connect versions - -## OUTPUT - -If only a single string field for a single version is output, then it -will not be colorized or quoted, so as to enable piping the output to -another command. If the field is an object, it will be output as a JavaScript object literal. - -If the --json flag is given, the outputted fields will be JSON. - -If the version range matches multiple versions, than each printed value -will be prefixed with the version it applies to. - -If multiple fields are requested, than each of them are prefixed with -the field name. - -## SEE ALSO - -* npm-search(1) -* npm-registry(7) -* npm-config(1) -* npm-config(7) -* npmrc(5) -* npm-docs(1) diff --git a/doc/cli/npm-whoami.md b/doc/cli/npm-whoami.md deleted file mode 100644 index 70b6a48f44671..0000000000000 --- a/doc/cli/npm-whoami.md +++ /dev/null @@ -1,17 +0,0 @@ -npm-whoami(1) -- Display npm username -===================================== - -## SYNOPSIS - - npm whoami [--registry ] - -## DESCRIPTION - -Print the `username` config to standard output. - -## SEE ALSO - -* npm-config(1) -* npm-config(7) -* npmrc(5) -* npm-adduser(1) diff --git a/doc/files/npm-folders.md b/doc/files/npm-folders.md deleted file mode 100644 index 456cb58bc89e1..0000000000000 --- a/doc/files/npm-folders.md +++ /dev/null @@ -1,213 +0,0 @@ -npm-folders(5) -- Folder Structures Used by npm -=============================================== - -## DESCRIPTION - -npm puts various things on your computer. That's its job. - -This document will tell you what it puts where. - -### tl;dr - -* Local install (default): puts stuff in `./node_modules` of the current - package root. -* Global install (with `-g`): puts stuff in /usr/local or wherever node - is installed. -* Install it **locally** if you're going to `require()` it. -* Install it **globally** if you're going to run it on the command line. -* If you need both, then install it in both places, or use `npm link`. - -### prefix Configuration - -The `prefix` config defaults to the location where node is installed. -On most systems, this is `/usr/local`. On Windows, it's `%AppData%\npm`. -On Unix systems, it's one level up, since node is typically installed at -`{prefix}/bin/node` rather than `{prefix}/node.exe`. - -When the `global` flag is set, npm installs things into this prefix. -When it is not set, it uses the root of the current package, or the -current working directory if not in a package already. - -### Node Modules - -Packages are dropped into the `node_modules` folder under the `prefix`. -When installing locally, this means that you can -`require("packagename")` to load its main module, or -`require("packagename/lib/path/to/sub/module")` to load other modules. - -Global installs on Unix systems go to `{prefix}/lib/node_modules`. -Global installs on Windows go to `{prefix}/node_modules` (that is, no -`lib` folder.) - -Scoped packages are installed the same way, except they are grouped together -in a sub-folder of the relevant `node_modules` folder with the name of that -scope prefix by the @ symbol, e.g. `npm install @myorg/package` would place -the package in `{prefix}/node_modules/@myorg/package`. See `scope(7)` for -more details. - -If you wish to `require()` a package, then install it locally. - -### Executables - -When in global mode, executables are linked into `{prefix}/bin` on Unix, -or directly into `{prefix}` on Windows. - -When in local mode, executables are linked into -`./node_modules/.bin` so that they can be made available to scripts run -through npm. (For example, so that a test runner will be in the path -when you run `npm test`.) - -### Man Pages - -When in global mode, man pages are linked into `{prefix}/share/man`. - -When in local mode, man pages are not installed. - -Man pages are not installed on Windows systems. - -### Cache - -See `npm-cache(1)`. Cache files are stored in `~/.npm` on Posix, or -`%AppData%/npm-cache` on Windows. - -This is controlled by the `cache` configuration param. - -### Temp Files - -Temporary files are stored by default in the folder specified by the -`tmp` config, which defaults to the TMPDIR, TMP, or TEMP environment -variables, or `/tmp` on Unix and `c:\windows\temp` on Windows. - -Temp files are given a unique folder under this root for each run of the -program, and are deleted upon successful exit. - -## More Information - -When installing locally, npm first tries to find an appropriate -`prefix` folder. This is so that `npm install foo@1.2.3` will install -to the sensible root of your package, even if you happen to have `cd`ed -into some other folder. - -Starting at the $PWD, npm will walk up the folder tree checking for a -folder that contains either a `package.json` file, or a `node_modules` -folder. If such a thing is found, then that is treated as the effective -"current directory" for the purpose of running npm commands. (This -behavior is inspired by and similar to git's .git-folder seeking -logic when running git commands in a working dir.) - -If no package root is found, then the current folder is used. - -When you run `npm install foo@1.2.3`, then the package is loaded into -the cache, and then unpacked into `./node_modules/foo`. Then, any of -foo's dependencies are similarly unpacked into -`./node_modules/foo/node_modules/...`. - -Any bin files are symlinked to `./node_modules/.bin/`, so that they may -be found by npm scripts when necessary. - -### Global Installation - -If the `global` configuration is set to true, then npm will -install packages "globally". - -For global installation, packages are installed roughly the same way, -but using the folders described above. - -### Cycles, Conflicts, and Folder Parsimony - -Cycles are handled using the property of node's module system that it -walks up the directories looking for `node_modules` folders. So, at every -stage, if a package is already installed in an ancestor `node_modules` -folder, then it is not installed at the current location. - -Consider the case above, where `foo -> bar -> baz`. Imagine if, in -addition to that, baz depended on bar, so you'd have: -`foo -> bar -> baz -> bar -> baz ...`. However, since the folder -structure is: `foo/node_modules/bar/node_modules/baz`, there's no need to -put another copy of bar into `.../baz/node_modules`, since when it calls -require("bar"), it will get the copy that is installed in -`foo/node_modules/bar`. - -This shortcut is only used if the exact same -version would be installed in multiple nested `node_modules` folders. It -is still possible to have `a/node_modules/b/node_modules/a` if the two -"a" packages are different versions. However, without repeating the -exact same package multiple times, an infinite regress will always be -prevented. - -Another optimization can be made by installing dependencies at the -highest level possible, below the localized "target" folder. - -#### Example - -Consider this dependency graph: - - foo - +-- blerg@1.2.5 - +-- bar@1.2.3 - | +-- blerg@1.x (latest=1.3.7) - | +-- baz@2.x - | | `-- quux@3.x - | | `-- bar@1.2.3 (cycle) - | `-- asdf@* - `-- baz@1.2.3 - `-- quux@3.x - `-- bar - -In this case, we might expect a folder structure like this: - - foo - +-- node_modules - +-- blerg (1.2.5) <---[A] - +-- bar (1.2.3) <---[B] - | `-- node_modules - | +-- baz (2.0.2) <---[C] - | | `-- node_modules - | | `-- quux (3.2.0) - | `-- asdf (2.3.4) - `-- baz (1.2.3) <---[D] - `-- node_modules - `-- quux (3.2.0) <---[E] - -Since foo depends directly on `bar@1.2.3` and `baz@1.2.3`, those are -installed in foo's `node_modules` folder. - -Even though the latest copy of blerg is 1.3.7, foo has a specific -dependency on version 1.2.5. So, that gets installed at [A]. Since the -parent installation of blerg satisfies bar's dependency on `blerg@1.x`, -it does not install another copy under [B]. - -Bar [B] also has dependencies on baz and asdf, so those are installed in -bar's `node_modules` folder. Because it depends on `baz@2.x`, it cannot -re-use the `baz@1.2.3` installed in the parent `node_modules` folder [D], -and must install its own copy [C]. - -Underneath bar, the `baz -> quux -> bar` dependency creates a cycle. -However, because bar is already in quux's ancestry [B], it does not -unpack another copy of bar into that folder. - -Underneath `foo -> baz` [D], quux's [E] folder tree is empty, because its -dependency on bar is satisfied by the parent folder copy installed at [B]. - -For a graphical breakdown of what is installed where, use `npm ls`. - -### Publishing - -Upon publishing, npm will look in the `node_modules` folder. If any of -the items there are not in the `bundledDependencies` array, then they will -not be included in the package tarball. - -This allows a package maintainer to install all of their dependencies -(and dev dependencies) locally, but only re-publish those items that -cannot be found elsewhere. See `package.json(5)` for more information. - -## SEE ALSO - -* package.json(5) -* npm-install(1) -* npm-pack(1) -* npm-cache(1) -* npm-config(1) -* npmrc(5) -* npm-config(7) -* npm-publish(1) diff --git a/doc/files/npm-package-locks.md b/doc/files/npm-package-locks.md deleted file mode 100644 index cbb62bdc3841c..0000000000000 --- a/doc/files/npm-package-locks.md +++ /dev/null @@ -1,164 +0,0 @@ -npm-package-locks(5) -- An explanation of npm lockfiles -===================================================== - -## DESCRIPTION - -Conceptually, the "input" to npm-install(1) is a package.json(5), while its -"output" is a fully-formed `node_modules` tree: a representation of the -dependencies you declared. In an ideal world, npm would work like a pure -function: the same `package.json` should produce the exact same `node_modules` -tree, any time. In some cases, this is indeed true. But in many others, npm is -unable to do this. There are multiple reasons for this: - -* different versions of npm (or other package managers) may have been used to install a package, each using slightly different installation algorithms. - -* a new version of a direct semver-range package may have been published since the last time your packages were installed, and thus a newer version will be used. - -* A dependency of one of your dependencies may have published a new version, which will update even if you used pinned dependency specifiers (`1.2.3` instead of `^1.2.3`) - -* The registry you installed from is no longer available, or allows mutation of versions (unlike the primary npm registry), and a different version of a package exists under the same version number now. - -As an example, consider package A: - - { - "name": "A", - "version": "0.1.0", - "dependencies": { - "B": "<0.1.0" - } - } - -package B: - - { - "name": "B", - "version": "0.0.1", - "dependencies": { - "C": "<0.1.0" - } - } - -and package C: - - { - "name": "C", - "version": "0.0.1" - } - -If these are the only versions of A, B, and C available in the -registry, then a normal `npm install A` will install: - - A@0.1.0 - `-- B@0.0.1 - `-- C@0.0.1 - -However, if B@0.0.2 is published, then a fresh `npm install A` will -install: - - A@0.1.0 - `-- B@0.0.2 - `-- C@0.0.1 - -assuming the new version did not modify B's dependencies. Of course, -the new version of B could include a new version of C and any number -of new dependencies. If such changes are undesirable, the author of A -could specify a dependency on B@0.0.1. However, if A's author and B's -author are not the same person, there's no way for A's author to say -that he or she does not want to pull in newly published versions of C -when B hasn't changed at all. - -To prevent this potential issue, npm uses package-lock.json(5) or, if present, -npm-shrinkwrap.json(5). These files are called package locks, or lockfiles. - -Whenever you run `npm install`, npm generates or updates your package lock, -which will look something like this: - - { - "name": "A", - "version": "0.1.0", - ...metadata fields... - "dependencies": { - "B": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/B/-/B-0.0.1.tgz", - "integrity": "sha512-DeAdb33F+" - "dependencies": { - "C": { - "version": "git://github.com/org/C.git#5c380ae319fc4efe9e7f2d9c78b0faa588fd99b4" - } - } - } - } - } - -This file describes an *exact*, and more importantly *reproducible* -`node_modules` tree. Once it's present, any future installation will base its -work off this file, instead of recalculating dependency versions off -package.json(5). - -The presence of a package lock changes the installation behavior such that: - -1. The module tree described by the package lock is reproduced. This means -reproducing the structure described in the file, using the specific files -referenced in "resolved" if available, falling back to normal package resolution -using "version" if one isn't. - -2. The tree is walked and any missing dependencies are installed in the usual -fashion. - -If `preshrinkwrap`, `shrinkwrap` or `postshrinkwrap` are in the `scripts` -property of the `package.json`, they will be executed in order. `preshrinkwrap` -and `shrinkwrap` are executed before the shrinkwrap, `postshrinkwrap` is -executed afterwards. These scripts run for both `package-lock.json` and -`npm-shrinkwrap.json`. For example to run some postprocessing on the generated -file: - - "scripts": { - "postshrinkwrap": "json -I -e \"this.myMetadata = $MY_APP_METADATA\"" - } - - -### Using locked packages - -Using a locked package is no different than using any package without a package -lock: any commands that update `node_modules` and/or `package.json`'s -dependencies will automatically sync the existing lockfile. This includes `npm -install`, `npm rm`, `npm update`, etc. To prevent this update from happening, -you can use the `--no-save` option to prevent saving altogether, or -`--no-shrinkwrap` to allow `package.json` to be updated while leaving -`package-lock.json` or `npm-shrinkwrap.json` intact. - -It is highly recommended you commit the generated package lock to source -control: this will allow anyone else on your team, your deployments, your -CI/continuous integration, and anyone else who runs `npm install` in your -package source to get the exact same dependency tree that you were developing -on. Additionally, the diffs from these changes are human-readable and will -inform you of any changes npm has made to your `node_modules`, so you can notice -if any transitive dependencies were updated, hoisted, etc. - -### Resolving lockfile conflicts - -Occasionally, two separate npm install will create package locks that cause -merge conflicts in source control systems. As of `npm@5.7.0`, these conflicts -can be resolved by manually fixing any `package.json` conflicts, and then -running `npm install [--package-lock-only]` again. npm will automatically -resolve any conflicts for you and write a merged package lock that includes all -the dependencies from both branches in a reasonable tree. If -`--package-lock-only` is provided, it will do this without also modifying your -local `node_modules/`. - -To make this process seamless on git, consider installing -[`npm-merge-driver`](https://npm.im/npm-merge-driver), which will teach git how -to do this itself without any user interaction. In short: `$ npx -npm-merge-driver install -g` will let you do this, and even works with -pre-`npm@5.7.0` versions of npm 5, albeit a bit more noisily. Note that if -`package.json` itself conflicts, you will have to resolve that by hand and run -`npm install` manually, even with the merge driver. - -## SEE ALSO - -* https://medium.com/@sdboyer/so-you-want-to-write-a-package-manager-4ae9c17d9527 -* package.json(5) -* package-lock.json(5) -* npm-shrinkwrap.json(5) -* npm-shrinkwrap(1) diff --git a/doc/files/npm-shrinkwrap.json.md b/doc/files/npm-shrinkwrap.json.md deleted file mode 100644 index 541a1f5a6d765..0000000000000 --- a/doc/files/npm-shrinkwrap.json.md +++ /dev/null @@ -1,27 +0,0 @@ -npm-shrinkwrap.json(5) -- A publishable lockfile -===================================================== - -## DESCRIPTION - -`npm-shrinkwrap.json` is a file created by npm-shrinkwrap(1). It is identical to -`package-lock.json`, with one major caveat: Unlike `package-lock.json`, -`npm-shrinkwrap.json` may be included when publishing a package. - -The recommended use-case for `npm-shrinkwrap.json` is applications deployed -through the publishing process on the registry: for example, daemons and -command-line tools intended as global installs or `devDependencies`. It's -strongly discouraged for library authors to publish this file, since that would -prevent end users from having control over transitive dependency updates. - -Additionally, if both `package-lock.json` and `npm-shrinkwrap.json` are present -in a package root, `package-lock.json` will be ignored in favor of this file. - -For full details and description of the `npm-shrinkwrap.json` file format, refer -to the manual page for package-lock.json(5). - -## SEE ALSO - -* npm-shrinkwrap(1) -* package-lock.json(5) -* package.json(5) -* npm-install(1) diff --git a/doc/files/npmrc.md b/doc/files/npmrc.md deleted file mode 100644 index 0980c1090ad4c..0000000000000 --- a/doc/files/npmrc.md +++ /dev/null @@ -1,90 +0,0 @@ -npmrc(5) -- The npm config files -================================ - -## DESCRIPTION - -npm gets its config settings from the command line, environment -variables, and `npmrc` files. - -The `npm config` command can be used to update and edit the contents -of the user and global npmrc files. - -For a list of available configuration options, see npm-config(7). - -## FILES - -The four relevant files are: - -* per-project config file (/path/to/my/project/.npmrc) -* per-user config file (~/.npmrc) -* global config file ($PREFIX/etc/npmrc) -* npm builtin config file (/path/to/npm/npmrc) - -All npm config files are an ini-formatted list of `key = value` -parameters. Environment variables can be replaced using -`${VARIABLE_NAME}`. For example: - - prefix = ${HOME}/.npm-packages - -Each of these files is loaded, and config options are resolved in -priority order. For example, a setting in the userconfig file would -override the setting in the globalconfig file. - -Array values are specified by adding "[]" after the key name. For -example: - - key[] = "first value" - key[] = "second value" - -#### Comments - -Lines in `.npmrc` files are interpreted as comments when they begin with a `;` or `#` character. `.npmrc` files are parsed by [npm/ini](https://github.com/npm/ini), which specifies this comment syntax. - -For example: - - # last modified: 01 Jan 2016 - ; Set a new registry for a scoped package - @myscope:registry=https://mycustomregistry.example.org - -### Per-project config file - -When working locally in a project, a `.npmrc` file in the root of the -project (ie, a sibling of `node_modules` and `package.json`) will set -config values specific to this project. - -Note that this only applies to the root of the project that you're -running npm in. It has no effect when your module is published. For -example, you can't publish a module that forces itself to install -globally, or in a different location. - -Additionally, this file is not read in global mode, such as when running -`npm install -g`. - -### Per-user config file - -`$HOME/.npmrc` (or the `userconfig` param, if set in the environment -or on the command line) - -### Global config file - -`$PREFIX/etc/npmrc` (or the `globalconfig` param, if set above): -This file is an ini-file formatted list of `key = value` parameters. -Environment variables can be replaced as above. - -### Built-in config file - -`path/to/npm/itself/npmrc` - -This is an unchangeable "builtin" configuration file that npm keeps -consistent across updates. Set fields in here using the `./configure` -script that comes with npm. This is primarily for distribution -maintainers to override default configs in a standard and consistent -manner. - -## SEE ALSO - -* npm-folders(5) -* npm-config(1) -* npm-config(7) -* package.json(5) -* npm(1) diff --git a/doc/files/package-lock.json.md b/doc/files/package-lock.json.md deleted file mode 100644 index 1b4ba93497117..0000000000000 --- a/doc/files/package-lock.json.md +++ /dev/null @@ -1,142 +0,0 @@ -package-lock.json(5) -- A manifestation of the manifest -===================================================== - -## DESCRIPTION - -`package-lock.json` is automatically generated for any operations where npm -modifies either the `node_modules` tree, or `package.json`. It describes the -exact tree that was generated, such that subsequent installs are able to -generate identical trees, regardless of intermediate dependency updates. - -This file is intended to be committed into source repositories, and serves -various purposes: - -* Describe a single representation of a dependency tree such that teammates, deployments, and continuous integration are guaranteed to install exactly the same dependencies. - -* Provide a facility for users to "time-travel" to previous states of `node_modules` without having to commit the directory itself. - -* To facilitate greater visibility of tree changes through readable source control diffs. - -* And optimize the installation process by allowing npm to skip repeated metadata resolutions for previously-installed packages. - -One key detail about `package-lock.json` is that it cannot be published, and it -will be ignored if found in any place other than the toplevel package. It shares -a format with npm-shrinkwrap.json(5), which is essentially the same file, but -allows publication. This is not recommended unless deploying a CLI tool or -otherwise using the publication process for producing production packages. - -If both `package-lock.json` and `npm-shrinkwrap.json` are present in the root of -a package, `package-lock.json` will be completely ignored. - - -## FILE FORMAT - -### name - -The name of the package this is a package-lock for. This must match what's in -`package.json`. - -### version - -The version of the package this is a package-lock for. This must match what's in -`package.json`. - -### lockfileVersion - -An integer version, starting at `1` with the version number of this document -whose semantics were used when generating this `package-lock.json`. - -### packageIntegrity - -This is a [subresource -integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) value -created from the `package.json`. No preprocessing of the `package.json` should -be done. Subresource integrity strings can be produced by modules like -[`ssri`](https://www.npmjs.com/package/ssri). - -### preserveSymlinks - -Indicates that the install was done with the environment variable -`NODE_PRESERVE_SYMLINKS` enabled. The installer should insist that the value of -this property match that environment variable. - -### dependencies - -A mapping of package name to dependency object. Dependency objects have the -following properties: - -#### version - -This is a specifier that uniquely identifies this package and should be -usable in fetching a new copy of it. - -* bundled dependencies: Regardless of source, this is a version number that is purely for informational purposes. -* registry sources: This is a version number. (eg, `1.2.3`) -* git sources: This is a git specifier with resolved committish. (eg, `git+https://example.com/foo/bar#115311855adb0789a0466714ed48a1499ffea97e`) -* http tarball sources: This is the URL of the tarball. (eg, `https://example.com/example-1.3.0.tgz`) -* local tarball sources: This is the file URL of the tarball. (eg `file:///opt/storage/example-1.3.0.tgz`) -* local link sources: This is the file URL of the link. (eg `file:libs/our-module`) - -#### integrity - -This is a [Standard Subresource -Integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) for this -resource. - -* For bundled dependencies this is not included, regardless of source. -* For registry sources, this is the `integrity` that the registry provided, or if one wasn't provided the SHA1 in `shasum`. -* For git sources this is the specific commit hash we cloned from. -* For remote tarball sources this is an integrity based on a SHA512 of - the file. -* For local tarball sources: This is an integrity field based on the SHA512 of the file. - -#### resolved - -* For bundled dependencies this is not included, regardless of source. -* For registry sources this is path of the tarball relative to the registry - URL. If the tarball URL isn't on the same server as the registry URL then - this is a complete URL. - -#### bundled - -If true, this is the bundled dependency and will be installed by the parent -module. When installing, this module will be extracted from the parent -module during the extract phase, not installed as a separate dependency. - -#### dev - -If true then this dependency is either a development dependency ONLY of the -top level module or a transitive dependency of one. This is false for -dependencies that are both a development dependency of the top level and a -transitive dependency of a non-development dependency of the top level. - -#### optional - -If true then this dependency is either an optional dependency ONLY of the -top level module or a transitive dependency of one. This is false for -dependencies that are both an optional dependency of the top level and a -transitive dependency of a non-optional dependency of the top level. - -All optional dependencies should be included even if they're uninstallable -on the current platform. - - -#### requires - -This is a mapping of module name to version. This is a list of everything -this module requires, regardless of where it will be installed. The version -should match via normal matching rules a dependency either in our -`dependencies` or in a level higher than us. - - -#### dependencies - -The dependencies of this dependency, exactly as at the top level. - -## SEE ALSO - -* npm-shrinkwrap(1) -* npm-shrinkwrap.json(5) -* npm-package-locks(5) -* package.json(5) -* npm-install(1) diff --git a/doc/files/package.json.md b/doc/files/package.json.md deleted file mode 100644 index dd6492af08430..0000000000000 --- a/doc/files/package.json.md +++ /dev/null @@ -1,797 +0,0 @@ -package.json(5) -- Specifics of npm's package.json handling -=========================================================== - -## DESCRIPTION - -This document is all you need to know about what's required in your package.json -file. It must be actual JSON, not just a JavaScript object literal. - -A lot of the behavior described in this document is affected by the config -settings described in `npm-config(7)`. - -## name - -If you plan to publish your package, the *most* important things in your -package.json are the name and version fields as they will be required. The name -and version together form an identifier that is assumed to be completely unique. -Changes to the package should come along with changes to the version. If you don't -plan to publish your package, the name and version fields are optional. - -The name is what your thing is called. - -Some rules: - -* The name must be less than or equal to 214 characters. This includes the scope for - scoped packages. -* The name can't start with a dot or an underscore. -* New packages must not have uppercase letters in the name. -* The name ends up being part of a URL, an argument on the command line, and a - folder name. Therefore, the name can't contain any non-URL-safe characters. - -Some tips: - -* Don't use the same name as a core Node module. -* Don't put "js" or "node" in the name. It's assumed that it's js, since you're - writing a package.json file, and you can specify the engine using the "engines" - field. (See below.) -* The name will probably be passed as an argument to require(), so it should - be something short, but also reasonably descriptive. -* You may want to check the npm registry to see if there's something by that name - already, before you get too attached to it. - -A name can be optionally prefixed by a scope, e.g. `@myorg/mypackage`. See -`npm-scope(7)` for more detail. - -## version - -If you plan to publish your package, the *most* important things in your -package.json are the name and version fields as they will be required. The name -and version together form an identifier that is assumed to be completely unique. -Changes to the package should come along with changes to the version. If you don't -plan to publish your package, the name and version fields are optional. - -Version must be parseable by -[node-semver](https://github.com/isaacs/node-semver), which is bundled -with npm as a dependency. (`npm install semver` to use it yourself.) - -More on version numbers and ranges at semver(7). - -## description - -Put a description in it. It's a string. This helps people discover your -package, as it's listed in `npm search`. - -## keywords - -Put keywords in it. It's an array of strings. This helps people -discover your package as it's listed in `npm search`. - -## homepage - -The url to the project homepage. - -Example: - - "homepage": "https://github.com/owner/project#readme" - -## bugs - -The url to your project's issue tracker and / or the email address to which -issues should be reported. These are helpful for people who encounter issues -with your package. - -It should look like this: - - { "url" : "https://github.com/owner/project/issues" - , "email" : "project@hostname.com" - } - -You can specify either one or both values. If you want to provide only a url, -you can specify the value for "bugs" as a simple string instead of an object. - -If a url is provided, it will be used by the `npm bugs` command. - -## license - -You should specify a license for your package so that people know how they are -permitted to use it, and any restrictions you're placing on it. - -If you're using a common license such as BSD-2-Clause or MIT, add a -current SPDX license identifier for the license you're using, like this: - - { "license" : "BSD-3-Clause" } - -You can check [the full list of SPDX license IDs](https://spdx.org/licenses/). -Ideally you should pick one that is -[OSI](https://opensource.org/licenses/alphabetical) approved. - -If your package is licensed under multiple common licenses, use an [SPDX license -expression syntax version 2.0 string](https://www.npmjs.com/package/spdx), like this: - - { "license" : "(ISC OR GPL-3.0)" } - -If you are using a license that hasn't been assigned an SPDX identifier, or if -you are using a custom license, use a string value like this one: - - { "license" : "SEE LICENSE IN " } - -Then include a file named `` at the top level of the package. - -Some old packages used license objects or a "licenses" property containing an -array of license objects: - - // Not valid metadata - { "license" : - { "type" : "ISC" - , "url" : "https://opensource.org/licenses/ISC" - } - } - - // Not valid metadata - { "licenses" : - [ - { "type": "MIT" - , "url": "https://www.opensource.org/licenses/mit-license.php" - } - , { "type": "Apache-2.0" - , "url": "https://opensource.org/licenses/apache2.0.php" - } - ] - } - -Those styles are now deprecated. Instead, use SPDX expressions, like this: - - { "license": "ISC" } - - { "license": "(MIT OR Apache-2.0)" } - -Finally, if you do not wish to grant others the right to use a private or -unpublished package under any terms: - - { "license": "UNLICENSED" } - -Consider also setting `"private": true` to prevent accidental publication. - -## people fields: author, contributors - -The "author" is one person. "contributors" is an array of people. A "person" -is an object with a "name" field and optionally "url" and "email", like this: - - { "name" : "Barney Rubble" - , "email" : "b@rubble.com" - , "url" : "http://barnyrubble.tumblr.com/" - } - -Or you can shorten that all into a single string, and npm will parse it for you: - - "Barney Rubble (http://barnyrubble.tumblr.com/)" - -Both email and url are optional either way. - -npm also sets a top-level "maintainers" field with your npm user info. - -## files - -The optional `files` field is an array of file patterns that describes -the entries to be included when your package is installed as a -dependency. File patterns follow a similar syntax to `.gitignore`, but -reversed: including a file, directory, or glob pattern (`*`, `**/*`, and such) -will make it so that file is included in the tarball when it's packed. Omitting -the field will make it default to `["*"]`, which means it will include all files. - -Some special files and directories are also included or excluded regardless of -whether they exist in the `files` array (see below). - -You can also provide a `.npmignore` file in the root of your package or -in subdirectories, which will keep files from being included. At the -root of your package it will not override the "files" field, but in -subdirectories it will. The `.npmignore` file works just like a -`.gitignore`. If there is a `.gitignore` file, and `.npmignore` is -missing, `.gitignore`'s contents will be used instead. - -Files included with the "package.json#files" field _cannot_ be excluded -through `.npmignore` or `.gitignore`. - -Certain files are always included, regardless of settings: - -* `package.json` -* `README` -* `CHANGES` / `CHANGELOG` / `HISTORY` -* `LICENSE` / `LICENCE` -* `NOTICE` -* The file in the "main" field - -`README`, `CHANGES`, `LICENSE` & `NOTICE` can have any case and extension. - -Conversely, some files are always ignored: - -* `.git` -* `CVS` -* `.svn` -* `.hg` -* `.lock-wscript` -* `.wafpickle-N` -* `.*.swp` -* `.DS_Store` -* `._*` -* `npm-debug.log` -* `.npmrc` -* `node_modules` -* `config.gypi` -* `*.orig` -* `package-lock.json` (use shrinkwrap instead) - -## main - -The main field is a module ID that is the primary entry point to your program. -That is, if your package is named `foo`, and a user installs it, and then does -`require("foo")`, then your main module's exports object will be returned. - -This should be a module ID relative to the root of your package folder. - -For most modules, it makes the most sense to have a main script and often not -much else. - -## browser - -If your module is meant to be used client-side the browser field should be -used instead of the main field. This is helpful to hint users that it might -rely on primitives that aren't available in Node.js modules. (e.g. `window`) - -## bin - -A lot of packages have one or more executable files that they'd like to -install into the PATH. npm makes this pretty easy (in fact, it uses this -feature to install the "npm" executable.) - -To use this, supply a `bin` field in your package.json which is a map of -command name to local file name. On install, npm will symlink that file into -`prefix/bin` for global installs, or `./node_modules/.bin/` for local -installs. - - -For example, myapp could have this: - - { "bin" : { "myapp" : "./cli.js" } } - -So, when you install myapp, it'll create a symlink from the `cli.js` script to -`/usr/local/bin/myapp`. - -If you have a single executable, and its name should be the name -of the package, then you can just supply it as a string. For example: - - { "name": "my-program" - , "version": "1.2.5" - , "bin": "./path/to/program" } - -would be the same as this: - - { "name": "my-program" - , "version": "1.2.5" - , "bin" : { "my-program" : "./path/to/program" } } - -Please make sure that your file(s) referenced in `bin` starts with -`#!/usr/bin/env node`, otherwise the scripts are started without the node -executable! - -## man - -Specify either a single file or an array of filenames to put in place for the -`man` program to find. - -If only a single file is provided, then it's installed such that it is the -result from `man `, regardless of its actual filename. For example: - - { "name" : "foo" - , "version" : "1.2.3" - , "description" : "A packaged foo fooer for fooing foos" - , "main" : "foo.js" - , "man" : "./man/doc.1" - } - -would link the `./man/doc.1` file in such that it is the target for `man foo` - -If the filename doesn't start with the package name, then it's prefixed. -So, this: - - { "name" : "foo" - , "version" : "1.2.3" - , "description" : "A packaged foo fooer for fooing foos" - , "main" : "foo.js" - , "man" : [ "./man/foo.1", "./man/bar.1" ] - } - -will create files to do `man foo` and `man foo-bar`. - -Man files must end with a number, and optionally a `.gz` suffix if they are -compressed. The number dictates which man section the file is installed into. - - { "name" : "foo" - , "version" : "1.2.3" - , "description" : "A packaged foo fooer for fooing foos" - , "main" : "foo.js" - , "man" : [ "./man/foo.1", "./man/foo.2" ] - } - -will create entries for `man foo` and `man 2 foo` - -## directories - -The CommonJS [Packages](http://wiki.commonjs.org/wiki/Packages/1.0) spec details a -few ways that you can indicate the structure of your package using a `directories` -object. If you look at [npm's package.json](https://registry.npmjs.org/npm/latest), -you'll see that it has directories for doc, lib, and man. - -In the future, this information may be used in other creative ways. - -### directories.lib - -Tell people where the bulk of your library is. Nothing special is done -with the lib folder in any way, but it's useful meta info. - -### directories.bin - -If you specify a `bin` directory in `directories.bin`, all the files in -that folder will be added. - -Because of the way the `bin` directive works, specifying both a -`bin` path and setting `directories.bin` is an error. If you want to -specify individual files, use `bin`, and for all the files in an -existing `bin` directory, use `directories.bin`. - -### directories.man - -A folder that is full of man pages. Sugar to generate a "man" array by -walking the folder. - -### directories.doc - -Put markdown files in here. Eventually, these will be displayed nicely, -maybe, someday. - -### directories.example - -Put example scripts in here. Someday, it might be exposed in some clever way. - -### directories.test - -Put your tests in here. It is currently not exposed, but it might be in the -future. - -## repository - -Specify the place where your code lives. This is helpful for people who -want to contribute. If the git repo is on GitHub, then the `npm docs` -command will be able to find you. - -Do it like this: - - "repository": { - "type" : "git", - "url" : "https://github.com/npm/cli.git" - } - - "repository": { - "type" : "svn", - "url" : "https://v8.googlecode.com/svn/trunk/" - } - -The URL should be a publicly available (perhaps read-only) url that can be handed -directly to a VCS program without any modification. It should not be a url to an -html project page that you put in your browser. It's for computers. - -For GitHub, GitHub gist, Bitbucket, or GitLab repositories you can use the same -shortcut syntax you use for `npm install`: - - "repository": "npm/npm" - - "repository": "github:user/repo" - - "repository": "gist:11081aaa281" - - "repository": "bitbucket:user/repo" - - "repository": "gitlab:user/repo" - -## scripts - -The "scripts" property is a dictionary containing script commands that are run -at various times in the lifecycle of your package. The key is the lifecycle -event, and the value is the command to run at that point. - -See `npm-scripts(7)` to find out more about writing package scripts. - -## config - -A "config" object can be used to set configuration parameters used in package -scripts that persist across upgrades. For instance, if a package had the -following: - - { "name" : "foo" - , "config" : { "port" : "8080" } } - -and then had a "start" command that then referenced the -`npm_package_config_port` environment variable, then the user could -override that by doing `npm config set foo:port 8001`. - -See `npm-config(7)` and `npm-scripts(7)` for more on package -configs. - -## dependencies - -Dependencies are specified in a simple object that maps a package name to a -version range. The version range is a string which has one or more -space-separated descriptors. Dependencies can also be identified with a -tarball or git URL. - -**Please do not put test harnesses or transpilers in your -`dependencies` object.** See `devDependencies`, below. - -See semver(7) for more details about specifying version ranges. - -* `version` Must match `version` exactly -* `>version` Must be greater than `version` -* `>=version` etc -* `=version1 <=version2`. -* `range1 || range2` Passes if either range1 or range2 are satisfied. -* `git...` See 'Git URLs as Dependencies' below -* `user/repo` See 'GitHub URLs' below -* `tag` A specific version tagged and published as `tag` See `npm-dist-tag(1)` -* `path/path/path` See [Local Paths](#local-paths) below - -For example, these are all valid: - - { "dependencies" : - { "foo" : "1.0.0 - 2.9999.9999" - , "bar" : ">=1.0.2 <2.1.2" - , "baz" : ">1.0.2 <=2.3.4" - , "boo" : "2.0.1" - , "qux" : "<1.0.0 || >=2.3.1 <2.4.5 || >=2.5.2 <3.0.0" - , "asd" : "http://asdf.com/asdf.tar.gz" - , "til" : "~1.2" - , "elf" : "~1.2.3" - , "two" : "2.x" - , "thr" : "3.3.x" - , "lat" : "latest" - , "dyl" : "file:../dyl" - } - } - -### URLs as Dependencies - -You may specify a tarball URL in place of a version range. - -This tarball will be downloaded and installed locally to your package at -install time. - -### Git URLs as Dependencies - -Git urls are of the form: - - ://[[:]@][:][:][/][# | #semver:] - -`` is one of `git`, `git+ssh`, `git+http`, `git+https`, or -`git+file`. - -If `#` is provided, it will be used to clone exactly that -commit. If the commit-ish has the format `#semver:`, `` can -be any valid semver range or exact version, and npm will look for any tags -or refs matching that range in the remote repository, much as it would for a -registry dependency. If neither `#` or `#semver:` is -specified, then `master` is used. - -Examples: - - git+ssh://git@github.com:npm/cli.git#v1.0.27 - git+ssh://git@github.com:npm/cli#semver:^5.0 - git+https://isaacs@github.com/npm/cli.git - git://github.com/npm/cli.git#v1.0.27 - -### GitHub URLs - -As of version 1.1.65, you can refer to GitHub urls as just "foo": -"user/foo-project". Just as with git URLs, a `commit-ish` suffix can be -included. For example: - - { - "name": "foo", - "version": "0.0.0", - "dependencies": { - "express": "expressjs/express", - "mocha": "mochajs/mocha#4727d357ea", - "module": "user/repo#feature\/branch" - } - } - -### Local Paths - -As of version 2.0.0 you can provide a path to a local directory that contains a -package. Local paths can be saved using `npm install -S` or -`npm install --save`, using any of these forms: - - ../foo/bar - ~/foo/bar - ./foo/bar - /foo/bar - -in which case they will be normalized to a relative path and added to your -`package.json`. For example: - - { - "name": "baz", - "dependencies": { - "bar": "file:../foo/bar" - } - } - -This feature is helpful for local offline development and creating -tests that require npm installing where you don't want to hit an -external server, but should not be used when publishing packages -to the public registry. - -## devDependencies - -If someone is planning on downloading and using your module in their -program, then they probably don't want or need to download and build -the external test or documentation framework that you use. - -In this case, it's best to map these additional items in a `devDependencies` -object. - -These things will be installed when doing `npm link` or `npm install` -from the root of a package, and can be managed like any other npm -configuration param. See `npm-config(7)` for more on the topic. - -For build steps that are not platform-specific, such as compiling -CoffeeScript or other languages to JavaScript, use the `prepare` -script to do this, and make the required package a devDependency. - -For example: - - { "name": "ethopia-waza", - "description": "a delightfully fruity coffee varietal", - "version": "1.2.3", - "devDependencies": { - "coffee-script": "~1.6.3" - }, - "scripts": { - "prepare": "coffee -o lib/ -c src/waza.coffee" - }, - "main": "lib/waza.js" - } - -The `prepare` script will be run before publishing, so that users -can consume the functionality without requiring them to compile it -themselves. In dev mode (ie, locally running `npm install`), it'll -run this script as well, so that you can test it easily. - -## peerDependencies - -In some cases, you want to express the compatibility of your package with a -host tool or library, while not necessarily doing a `require` of this host. -This is usually referred to as a *plugin*. Notably, your module may be exposing -a specific interface, expected and specified by the host documentation. - -For example: - - { - "name": "tea-latte", - "version": "1.3.5", - "peerDependencies": { - "tea": "2.x" - } - } - -This ensures your package `tea-latte` can be installed *along* with the second -major version of the host package `tea` only. `npm install tea-latte` could -possibly yield the following dependency graph: - - ├── tea-latte@1.3.5 - └── tea@2.2.0 - -**NOTE: npm versions 1 and 2 will automatically install `peerDependencies` if -they are not explicitly depended upon higher in the dependency tree. In the -next major version of npm (npm@3), this will no longer be the case. You will -receive a warning that the peerDependency is not installed instead.** The -behavior in npms 1 & 2 was frequently confusing and could easily put you into -dependency hell, a situation that npm is designed to avoid as much as possible. - -Trying to install another plugin with a conflicting requirement will cause an -error. For this reason, make sure your plugin requirement is as broad as -possible, and not to lock it down to specific patch versions. - -Assuming the host complies with [semver](https://semver.org/), only changes in -the host package's major version will break your plugin. Thus, if you've worked -with every 1.x version of the host package, use `"^1.0"` or `"1.x"` to express -this. If you depend on features introduced in 1.5.2, use `">= 1.5.2 < 2"`. - -## bundledDependencies - -This defines an array of package names that will be bundled when publishing -the package. - -In cases where you need to preserve npm packages locally or have them -available through a single file download, you can bundle the packages in a -tarball file by specifying the package names in the `bundledDependencies` -array and executing `npm pack`. - -For example: - -If we define a package.json like this: - -``` -{ - "name": "awesome-web-framework", - "version": "1.0.0", - "bundledDependencies": [ - "renderized", "super-streams" - ] -} -``` -we can obtain `awesome-web-framework-1.0.0.tgz` file by running `npm pack`. -This file contains the dependencies `renderized` and `super-streams` which -can be installed in a new project by executing `npm install -awesome-web-framework-1.0.0.tgz`. - -If this is spelled `"bundleDependencies"`, then that is also honored. - -## optionalDependencies - -If a dependency can be used, but you would like npm to proceed if it cannot be -found or fails to install, then you may put it in the `optionalDependencies` -object. This is a map of package name to version or url, just like the -`dependencies` object. The difference is that build failures do not cause -installation to fail. - -It is still your program's responsibility to handle the lack of the -dependency. For example, something like this: - - try { - var foo = require('foo') - var fooVersion = require('foo/package.json').version - } catch (er) { - foo = null - } - if ( notGoodFooVersion(fooVersion) ) { - foo = null - } - - // .. then later in your program .. - - if (foo) { - foo.doFooThings() - } - -Entries in `optionalDependencies` will override entries of the same name in -`dependencies`, so it's usually best to only put in one place. - -## engines - -You can specify the version of node that your stuff works on: - - { "engines" : { "node" : ">=0.10.3 <0.12" } } - -And, like with dependencies, if you don't specify the version (or if you -specify "\*" as the version), then any version of node will do. - -If you specify an "engines" field, then npm will require that "node" be -somewhere on that list. If "engines" is omitted, then npm will just assume -that it works on node. - -You can also use the "engines" field to specify which versions of npm -are capable of properly installing your program. For example: - - { "engines" : { "npm" : "~1.0.20" } } - -Unless the user has set the `engine-strict` config flag, this -field is advisory only and will only produce warnings when your package is installed as a dependency. - -## engineStrict - -**This feature was removed in npm 3.0.0** - -Prior to npm 3.0.0, this feature was used to treat this package as if the -user had set `engine-strict`. It is no longer used. - -## os - -You can specify which operating systems your -module will run on: - - "os" : [ "darwin", "linux" ] - -You can also blacklist instead of whitelist operating systems, -just prepend the blacklisted os with a '!': - - "os" : [ "!win32" ] - -The host operating system is determined by `process.platform` - -It is allowed to both blacklist, and whitelist, although there isn't any -good reason to do this. - -## cpu - -If your code only runs on certain cpu architectures, -you can specify which ones. - - "cpu" : [ "x64", "ia32" ] - -Like the `os` option, you can also blacklist architectures: - - "cpu" : [ "!arm", "!mips" ] - -The host architecture is determined by `process.arch` - -## preferGlobal - -**DEPRECATED** - -This option used to trigger an npm warning, but it will no longer warn. It is -purely there for informational purposes. It is now recommended that you install -any binaries as local devDependencies wherever possible. - -## private - -If you set `"private": true` in your package.json, then npm will refuse -to publish it. - -This is a way to prevent accidental publication of private repositories. If -you would like to ensure that a given package is only ever published to a -specific registry (for example, an internal registry), then use the -`publishConfig` dictionary described below to override the `registry` config -param at publish-time. - -## publishConfig - -This is a set of config values that will be used at publish-time. It's -especially handy if you want to set the tag, registry or access, so that -you can ensure that a given package is not tagged with "latest", published -to the global public registry or that a scoped module is private by default. - -Any config values can be overridden, but only "tag", "registry" and "access" -probably matter for the purposes of publishing. - -See `npm-config(7)` to see the list of config options that can be -overridden. - -## DEFAULT VALUES - -npm will default some values based on package contents. - -* `"scripts": {"start": "node server.js"}` - - If there is a `server.js` file in the root of your package, then npm - will default the `start` command to `node server.js`. - -* `"scripts":{"install": "node-gyp rebuild"}` - - If there is a `binding.gyp` file in the root of your package and you have not defined an `install` or `preinstall` script, npm will - default the `install` command to compile using node-gyp. - -* `"contributors": [...]` - - If there is an `AUTHORS` file in the root of your package, npm will - treat each line as a `Name (url)` format, where email and url - are optional. Lines which start with a `#` or are blank, will be - ignored. - -## SEE ALSO - -* semver(7) -* npm-init(1) -* npm-version(1) -* npm-config(1) -* npm-config(7) -* npm-help(1) -* npm-install(1) -* npm-publish(1) -* npm-uninstall(1) diff --git a/doc/misc/npm-coding-style.md b/doc/misc/npm-coding-style.md deleted file mode 100644 index 1199f63fcc3f0..0000000000000 --- a/doc/misc/npm-coding-style.md +++ /dev/null @@ -1,192 +0,0 @@ -npm-coding-style(7) -- npm's "funny" coding style -================================================= - -## DESCRIPTION - -npm's coding style is a bit unconventional. It is not different for -difference's sake, but rather a carefully crafted style that is -designed to reduce visual clutter and make bugs more apparent. - -If you want to contribute to npm (which is very encouraged), you should -make your code conform to npm's style. - -Note: this concerns npm's code not the specific packages that you can download from the npm registry. - -## Line Length - -Keep lines shorter than 80 characters. It's better for lines to be -too short than to be too long. Break up long lists, objects, and other -statements onto multiple lines. - -## Indentation - -Two-spaces. Tabs are better, but they look like hell in web browsers -(and on GitHub), and node uses 2 spaces, so that's that. - -Configure your editor appropriately. - -## Curly braces - -Curly braces belong on the same line as the thing that necessitates them. - -Bad: - - function () - { - -Good: - - function () { - -If a block needs to wrap to the next line, use a curly brace. Don't -use it if it doesn't. - -Bad: - - if (foo) { bar() } - while (foo) - bar() - -Good: - - if (foo) bar() - while (foo) { - bar() - } - -## Semicolons - -Don't use them except in four situations: - -* `for (;;)` loops. They're actually required. -* null loops like: `while (something) ;` (But you'd better have a good - reason for doing that.) -* `case 'foo': doSomething(); break` -* In front of a leading `(` or `[` at the start of the line. - This prevents the expression from being interpreted - as a function call or property access, respectively. - -Some examples of good semicolon usage: - - ;(x || y).doSomething() - ;[a, b, c].forEach(doSomething) - for (var i = 0; i < 10; i ++) { - switch (state) { - case 'begin': start(); continue - case 'end': finish(); break - default: throw new Error('unknown state') - } - end() - } - -Note that starting lines with `-` and `+` also should be prefixed -with a semicolon, but this is much less common. - -## Comma First - -If there is a list of things separated by commas, and it wraps -across multiple lines, put the comma at the start of the next -line, directly below the token that starts the list. Put the -final token in the list on a line by itself. For example: - - var magicWords = [ 'abracadabra' - , 'gesundheit' - , 'ventrilo' - ] - , spells = { 'fireball' : function () { setOnFire() } - , 'water' : function () { putOut() } - } - , a = 1 - , b = 'abc' - , etc - , somethingElse - -## Quotes -Use single quotes for strings except to avoid escaping. - -Bad: - - var notOk = "Just double quotes" - -Good: - - var ok = 'String contains "double" quotes' - var alsoOk = "String contains 'single' quotes or apostrophe" - -## Whitespace - -Put a single space in front of `(` for anything other than a function call. -Also use a single space wherever it makes things more readable. - -Don't leave trailing whitespace at the end of lines. Don't indent empty -lines. Don't use more spaces than are helpful. - -## Functions - -Use named functions. They make stack traces a lot easier to read. - -## Callbacks, Sync/async Style - -Use the asynchronous/non-blocking versions of things as much as possible. -It might make more sense for npm to use the synchronous fs APIs, but this -way, the fs and http and child process stuff all uses the same callback-passing -methodology. - -The callback should always be the last argument in the list. Its first -argument is the Error or null. - -Be very careful never to ever ever throw anything. It's worse than useless. -Just send the error message back as the first argument to the callback. - -## Errors - -Always create a new Error object with your message. Don't just return a -string message to the callback. Stack traces are handy. - -## Logging - -Logging is done using the [npmlog](https://github.com/npm/npmlog) -utility. - -Please clean up logs when they are no longer helpful. In particular, -logging the same object over and over again is not helpful. Logs should -report what's happening so that it's easier to track down where a fault -occurs. - -Use appropriate log levels. See `npm-config(7)` and search for -"loglevel". - -## Case, naming, etc. - -Use `lowerCamelCase` for multiword identifiers when they refer to objects, -functions, methods, properties, or anything not specified in this section. - -Use `UpperCamelCase` for class names (things that you'd pass to "new"). - -Use `all-lower-hyphen-css-case` for multiword filenames and config keys. - -Use named functions. They make stack traces easier to follow. - -Use `CAPS_SNAKE_CASE` for constants, things that should never change -and are rarely used. - -Use a single uppercase letter for function names where the function -would normally be anonymous, but needs to call itself recursively. It -makes it clear that it's a "throwaway" function. - -## null, undefined, false, 0 - -Boolean variables and functions should always be either `true` or -`false`. Don't set it to 0 unless it's supposed to be a number. - -When something is intentionally missing or removed, set it to `null`. - -Don't set things to `undefined`. Reserve that value to mean "not yet -set to anything." - -Boolean objects are forbidden. - -## SEE ALSO - -* npm-developers(7) -* npm(1) diff --git a/doc/misc/npm-config.md b/doc/misc/npm-config.md deleted file mode 100644 index 8f04a76010c1a..0000000000000 --- a/doc/misc/npm-config.md +++ /dev/null @@ -1,1243 +0,0 @@ -npm-config(7) -- More than you probably want to know about npm configuration -============================================================================ - -## DESCRIPTION - -npm gets its configuration values from the following sources, sorted by priority: - -### Command Line Flags - -Putting `--foo bar` on the command line sets the `foo` configuration -parameter to `"bar"`. A `--` argument tells the cli parser to stop -reading flags. Using `--flag` without specifying any value will set -the value to `true`. - -Example: `--flag1 --flag2` will set both configuration parameters -to `true`, while `--flag1 --flag2 bar` will set `flag1` to `true`, -and `flag2` to `bar`. Finally, `--flag1 --flag2 -- bar` will set -both configuration parameters to `true`, and the `bar` is taken -as a command argument. - -### Environment Variables - -Any environment variables that start with `npm_config_` will be -interpreted as a configuration parameter. For example, putting -`npm_config_foo=bar` in your environment will set the `foo` -configuration parameter to `bar`. Any environment configurations that -are not given a value will be given the value of `true`. Config -values are case-insensitive, so `NPM_CONFIG_FOO=bar` will work the -same. However, please note that inside [npm-scripts](/misc/scripts) -npm will set its own environment variables and Node will prefer -those lowercase versions over any uppercase ones that you might set. -For details see [this issue](https://github.com/npm/npm/issues/14528). - -Notice that you need to use underscores instead of dashes, so `--allow-same-version` -would become `npm_config_allow_same_version=true`. - -### npmrc Files - -The four relevant files are: - -* per-project configuration file (`/path/to/my/project/.npmrc`) -* per-user configuration file (defaults to `$HOME/.npmrc`; configurable via CLI - option `--userconfig` or environment variable `$NPM_CONFIG_USERCONFIG`) -* global configuration file (defaults to `$PREFIX/etc/npmrc`; configurable via - CLI option `--globalconfig` or environment variable `$NPM_CONFIG_GLOBALCONFIG`) -* npm's built-in configuration file (`/path/to/npm/npmrc`) - -See npmrc(5) for more details. - -### Default Configs - -Run `npm config ls -l` to see a set of configuration parameters that are -internal to npm, and are defaults if nothing else is specified. - -## Shorthands and Other CLI Niceties - -The following shorthands are parsed on the command-line: - -* `-v`: `--version` -* `-h`, `-?`, `--help`, `-H`: `--usage` -* `-s`, `--silent`: `--loglevel silent` -* `-q`, `--quiet`: `--loglevel warn` -* `-d`: `--loglevel info` -* `-dd`, `--verbose`: `--loglevel verbose` -* `-ddd`: `--loglevel silly` -* `-g`: `--global` -* `-C`: `--prefix` -* `-l`: `--long` -* `-m`: `--message` -* `-p`, `--porcelain`: `--parseable` -* `-reg`: `--registry` -* `-f`: `--force` -* `-desc`: `--description` -* `-S`: `--save` -* `-P`: `--save-prod` -* `-D`: `--save-dev` -* `-O`: `--save-optional` -* `-B`: `--save-bundle` -* `-E`: `--save-exact` -* `-y`: `--yes` -* `-n`: `--yes false` -* `ll` and `la` commands: `ls --long` - -If the specified configuration param resolves unambiguously to a known -configuration parameter, then it is expanded to that configuration -parameter. For example: - - npm ls --par - # same as: - npm ls --parseable - -If multiple single-character shorthands are strung together, and the -resulting combination is unambiguously not some other configuration -param, then it is expanded to its various component pieces. For -example: - - npm ls -gpld - # same as: - npm ls --global --parseable --long --loglevel info - -## Per-Package Config Settings - -When running scripts (see `npm-scripts(7)`) the package.json "config" -keys are overwritten in the environment if there is a config param of -`[@]:`. For example, if the package.json has -this: - - { "name" : "foo" - , "config" : { "port" : "8080" } - , "scripts" : { "start" : "node server.js" } } - -and the server.js is this: - - http.createServer(...).listen(process.env.npm_package_config_port) - -then the user could change the behavior by doing: - - npm config set foo:port 80 - -See package.json(5) for more information. - -## Config Settings - -### access - -* Default: `restricted` -* Type: Access - -When publishing scoped packages, the access level defaults to `restricted`. If -you want your scoped package to be publicly viewable (and installable) set -`--access=public`. The only valid values for `access` are `public` and -`restricted`. Unscoped packages _always_ have an access level of `public`. - -### allow-same-version - -* Default: false -* Type: Boolean - -Prevents throwing an error when `npm version` is used to set the new version -to the same value as the current version. - -### always-auth - -* Default: false -* Type: Boolean - -Force npm to always require authentication when accessing the registry, -even for `GET` requests. - -### also - -* Default: null -* Type: String - -When "dev" or "development" and running local `npm shrinkwrap`, -`npm outdated`, or `npm update`, is an alias for `--dev`. - -### audit - -* Default: true -* Type: Boolean - -When "true" submit audit reports alongside `npm install` runs to the default -registry and all registries configured for scopes. See the documentation -for npm-audit(1) for details on what is submitted. - -### audit-level - -* Default: `"low"` -* Type: `'low'`, `'moderate'`, `'high'`, `'critical'` - -The minimum level of vulnerability for `npm audit` to exit with -a non-zero exit code. - -### auth-type - -* Default: `'legacy'` -* Type: `'legacy'`, `'sso'`, `'saml'`, `'oauth'` - -What authentication strategy to use with `adduser`/`login`. - -### bin-links - -* Default: `true` -* Type: Boolean - -Tells npm to create symlinks (or `.cmd` shims on Windows) for package -executables. - -Set to false to have it not do this. This can be used to work around -the fact that some file systems don't support symlinks, even on -ostensibly Unix systems. - -### browser - -* Default: OS X: `"open"`, Windows: `"start"`, Others: `"xdg-open"` -* Type: String - -The browser that is called by the `npm docs` command to open websites. - -### ca - -* Default: The npm CA certificate -* Type: String, Array or null - -The Certificate Authority signing certificate that is trusted for SSL -connections to the registry. Values should be in PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with newlines -replaced by the string "\n". For example: - - ca="-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----" - -Set to `null` to only allow "known" registrars, or to a specific CA cert -to trust only that specific signing authority. - -Multiple CAs can be trusted by specifying an array of certificates: - - ca[]="..." - ca[]="..." - -See also the `strict-ssl` config. - -### cafile - -* Default: `null` -* Type: path - -A path to a file containing one or multiple Certificate Authority signing -certificates. Similar to the `ca` setting, but allows for multiple CA's, as -well as for the CA information to be stored in a file on disk. - -### cache - -* Default: Windows: `%AppData%\npm-cache`, Posix: `~/.npm` -* Type: path - -The location of npm's cache directory. See `npm-cache(1)` - -### cache-lock-stale - -* Default: 60000 (1 minute) -* Type: Number - -The number of ms before cache folder lockfiles are considered stale. - -### cache-lock-retries - -* Default: 10 -* Type: Number - -Number of times to retry to acquire a lock on cache folder lockfiles. - -### cache-lock-wait - -* Default: 10000 (10 seconds) -* Type: Number - -Number of ms to wait for cache lock files to expire. - -### cache-max - -* Default: Infinity -* Type: Number - -**DEPRECATED**: This option has been deprecated in favor of `--prefer-online`. - -`--cache-max=0` is an alias for `--prefer-online`. - -### cache-min - -* Default: 10 -* Type: Number - -**DEPRECATED**: This option has been deprecated in favor of `--prefer-offline`. - -`--cache-min=9999 (or bigger)` is an alias for `--prefer-offline`. - -### cert - -* Default: `null` -* Type: String - -A client certificate to pass when accessing the registry. Values should be in -PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with newlines replaced by the string "\n". For example: - - cert="-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----" - -It is _not_ the path to a certificate file (and there is no "certfile" option). - -### cidr - -* Default: `null` -* Type: String, Array, null - -This is a list of CIDR address to be used when configuring limited access tokens with the `npm token create` command. - -### color - -* Default: true -* Type: Boolean or `"always"` - -If false, never shows colors. If `"always"` then always shows colors. -If true, then only prints color codes for tty file descriptors. - -This option can also be changed using the environment: colors are -disabled when the environment variable `NO_COLOR` is set to any value. - -### depth - -* Default: Infinity -* Type: Number - -The depth to go when recursing directories for `npm ls`, -`npm cache ls`, and `npm outdated`. - -For `npm outdated`, a setting of `Infinity` will be treated as `0` -since that gives more useful information. To show the outdated status -of all packages and dependents, use a large integer value, -e.g., `npm outdated --depth 9999` - -### description - -* Default: true -* Type: Boolean - -Show the description in `npm search` - -### dev - -* Default: false -* Type: Boolean - -Install `dev-dependencies` along with packages. - -### dry-run - -* Default: false -* Type: Boolean - -Indicates that you don't want npm to make any changes and that it should -only report what it would have done. This can be passed into any of the -commands that modify your local installation, eg, `install`, `update`, -`dedupe`, `uninstall`. This is NOT currently honored by some network related -commands, eg `dist-tags`, `owner`, etc. - -### editor - -* Default: `EDITOR` environment variable if set, or `"vi"` on Posix, - or `"notepad"` on Windows. -* Type: path - -The command to run for `npm edit` or `npm config edit`. - -### engine-strict - -* Default: false -* Type: Boolean - -If set to true, then npm will stubbornly refuse to install (or even -consider installing) any package that claims to not be compatible with -the current Node.js version. - -### force - -* Default: false -* Type: Boolean - -Makes various commands more forceful. - -* lifecycle script failure does not block progress. -* publishing clobbers previously published versions. -* skips cache when requesting from the registry. -* prevents checks against clobbering non-npm files. - -### fetch-retries - -* Default: 2 -* Type: Number - -The "retries" config for the `retry` module to use when fetching -packages from the registry. - -### fetch-retry-factor - -* Default: 10 -* Type: Number - -The "factor" config for the `retry` module to use when fetching -packages. - -### fetch-retry-mintimeout - -* Default: 10000 (10 seconds) -* Type: Number - -The "minTimeout" config for the `retry` module to use when fetching -packages. - -### fetch-retry-maxtimeout - -* Default: 60000 (1 minute) -* Type: Number - -The "maxTimeout" config for the `retry` module to use when fetching -packages. - -### git - -* Default: `"git"` -* Type: String - -The command to use for git commands. If git is installed on the -computer, but is not in the `PATH`, then set this to the full path to -the git binary. - -### git-tag-version - -* Default: `true` -* Type: Boolean - -Tag the commit when using the `npm version` command. - -### commit-hooks - -* Default: `true` -* Type: Boolean - -Run git commit hooks when using the `npm version` command. - -### global - -* Default: false -* Type: Boolean - -Operates in "global" mode, so that packages are installed into the -`prefix` folder instead of the current working directory. See -`npm-folders(5)` for more on the differences in behavior. - -* packages are installed into the `{prefix}/lib/node_modules` folder, instead of the - current working directory. -* bin files are linked to `{prefix}/bin` -* man pages are linked to `{prefix}/share/man` - -### globalconfig - -* Default: {prefix}/etc/npmrc -* Type: path - -The config file to read for global config options. - -### global-style - -* Default: false -* Type: Boolean - -Causes npm to install the package into your local `node_modules` folder with -the same layout it uses with the global `node_modules` folder. Only your -direct dependencies will show in `node_modules` and everything they depend -on will be flattened in their `node_modules` folders. This obviously will -eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` will be -preferred. - -### group - -* Default: GID of the current process -* Type: String or Number - -The group to use when running package scripts in global mode as the root -user. - -### heading - -* Default: `"npm"` -* Type: String - -The string that starts all the debugging log output. - -### https-proxy - -* Default: null -* Type: url - -A proxy to use for outgoing https requests. If the `HTTPS_PROXY` or -`https_proxy` or `HTTP_PROXY` or `http_proxy` environment variables are set, -proxy settings will be honored by the underlying `request` library. - -### if-present - -* Default: false -* Type: Boolean - -If true, npm will not exit with an error code when `run-script` is invoked for -a script that isn't defined in the `scripts` section of `package.json`. This -option can be used when it's desirable to optionally run a script when it's -present and fail if the script fails. This is useful, for example, when running -scripts that may only apply for some builds in an otherwise generic CI setup. - -### ignore-prepublish - -* Default: false -* Type: Boolean - -If true, npm will not run `prepublish` scripts. - -### ignore-scripts - -* Default: false -* Type: Boolean - -If true, npm does not run scripts specified in package.json files. - -### init-module - -* Default: ~/.npm-init.js -* Type: path - -A module that will be loaded by the `npm init` command. See the -documentation for the -[init-package-json](https://github.com/isaacs/init-package-json) module -for more information, or npm-init(1). - -### init-author-name - -* Default: "" -* Type: String - -The value `npm init` should use by default for the package author's name. - -### init-author-email - -* Default: "" -* Type: String - -The value `npm init` should use by default for the package author's email. - -### init-author-url - -* Default: "" -* Type: String - -The value `npm init` should use by default for the package author's homepage. - -### init-license - -* Default: "ISC" -* Type: String - -The value `npm init` should use by default for the package license. - -### init-version - -* Default: "1.0.0" -* Type: semver - -The value that `npm init` should use by default for the package -version number, if not already set in package.json. - -### json - -* Default: false -* Type: Boolean - -Whether or not to output JSON data, rather than the normal output. - -This feature is currently experimental, and the output data structures for many -commands is either not implemented in JSON yet, or subject to change. Only the -output from `npm ls --json` and `npm search --json` are currently valid. - -### key - -* Default: `null` -* Type: String - -A client key to pass when accessing the registry. Values should be in PEM -format with newlines replaced by the string "\n". For example: - - key="-----BEGIN PRIVATE KEY-----\nXXXX\nXXXX\n-----END PRIVATE KEY-----" - -It is _not_ the path to a key file (and there is no "keyfile" option). - -### legacy-bundling - -* Default: false -* Type: Boolean - -Causes npm to install the package such that versions of npm prior to 1.4, -such as the one included with node 0.8, can install the package. This -eliminates all automatic deduping. If used with `global-style` this option -will be preferred. - -### link - -* Default: false -* Type: Boolean - -If true, then local installs will link if there is a suitable globally -installed package. - -Note that this means that local installs can cause things to be -installed into the global space at the same time. The link is only done -if one of the two conditions are met: - -* The package is not already installed globally, or -* the globally installed version is identical to the version that is - being installed locally. - -### local-address - -* Default: undefined -* Type: IP Address - -The IP address of the local interface to use when making connections -to the npm registry. Must be IPv4 in versions of Node prior to 0.12. - -### loglevel - -* Default: "notice" -* Type: String -* Values: "silent", "error", "warn", "notice", "http", "timing", "info", - "verbose", "silly" - -What level of logs to report. On failure, *all* logs are written to -`npm-debug.log` in the current working directory. - -Any logs of a higher level than the setting are shown. The default is "notice". - -### logstream - -* Default: process.stderr -* Type: Stream - -This is the stream that is passed to the -[npmlog](https://github.com/npm/npmlog) module at run time. - -It cannot be set from the command line, but if you are using npm -programmatically, you may wish to send logs to somewhere other than -stderr. - -If the `color` config is set to true, then this stream will receive -colored output if it is a TTY. - -### logs-max - -* Default: 10 -* Type: Number - -The maximum number of log files to store. - -### long - -* Default: false -* Type: Boolean - -Show extended information in `npm ls` and `npm search`. - -### maxsockets - -* Default: 50 -* Type: Number - -The maximum number of connections to use per origin (protocol/host/port -combination). Passed to the `http` `Agent` used to make the request. - -### message - -* Default: "%s" -* Type: String - -Commit message which is used by `npm version` when creating version commit. - -Any "%s" in the message will be replaced with the version number. - -### metrics-registry - -* Default: The value of `registry` (which defaults to "https://registry.npmjs.org/") -* Type: String - -The registry you want to send cli metrics to if `send-metrics` is true. - -### node-options - -* Default: null -* Type: String - -Options to pass through to Node.js via the `NODE_OPTIONS` environment -variable. This does not impact how npm itself is executed but it does -impact how lifecycle scripts are called. - -### node-version - -* Default: process.version -* Type: semver or false - -The node version to use when checking a package's `engines` map. - -### noproxy - -* Default: null -* Type: String or Array - -A comma-separated string or an array of domain extensions that a proxy should not be used for. - -### offline - -* Default: false -* Type: Boolean - -Force offline mode: no network requests will be done during install. To allow -the CLI to fill in missing cache data, see `--prefer-offline`. - -### onload-script - -* Default: false -* Type: path - -A node module to `require()` when npm loads. Useful for programmatic -usage. - -### only - -* Default: null -* Type: String - -When "dev" or "development" and running local `npm install` without any -arguments, only devDependencies (and their dependencies) are installed. - -When "dev" or "development" and running local `npm ls`, `npm outdated`, or -`npm update`, is an alias for `--dev`. - -When "prod" or "production" and running local `npm install` without any -arguments, only non-devDependencies (and their dependencies) are -installed. - -When "prod" or "production" and running local `npm ls`, `npm outdated`, or -`npm update`, is an alias for `--production`. - -### optional - -* Default: true -* Type: Boolean - -Attempt to install packages in the `optionalDependencies` object. Note -that if these packages fail to install, the overall installation -process is not aborted. - -### otp - -* Default: null -* Type: Number - -This is a one-time password from a two-factor authenticator. It's needed -when publishing or changing package permissions with `npm access`. - -### package-lock - -* Default: true -* Type: Boolean - -If set to false, then ignore `package-lock.json` files when installing. This -will also prevent _writing_ `package-lock.json` if `save` is true. - -When package package-locks are disabled, automatic pruning of extraneous -modules will also be disabled. To remove extraneous modules with -package-locks disabled use `npm prune`. - -This option is an alias for `--shrinkwrap`. - -### package-lock-only - -* Default: false -* Type: Boolean - -If set to true, it will update only the `package-lock.json`, -instead of checking `node_modules` and downloading dependencies. - -### parseable - -* Default: false -* Type: Boolean - -Output parseable results from commands that write to -standard output. For `npm search`, this will be tab-separated table format. - -### prefer-offline - -* Default: false -* Type: Boolean - -If true, staleness checks for cached data will be bypassed, but missing data -will be requested from the server. To force full offline mode, use `--offline`. - -This option is effectively equivalent to `--cache-min=9999999`. - -### prefer-online - -* Default: false -* Type: Boolean - -If true, staleness checks for cached data will be forced, making the CLI look -for updates immediately even for fresh package data. - -### prefix - -* Default: see npm-folders(5) -* Type: path - -The location to install global items. If set on the command line, then -it forces non-global commands to run in the specified folder. - -### preid - -* Default: "" -* Type: String - -The "prerelease identifier" to use as a prefix for the "prerelease" part of a -semver. Like the `rc` in `1.2.0-rc.8`. - -### production - -* Default: false -* Type: Boolean - -Set to true to run in "production" mode. - -1. devDependencies are not installed at the topmost level when running - local `npm install` without any arguments. -2. Set the NODE_ENV="production" for lifecycle scripts. - -### progress - -* Default: true, unless TRAVIS or CI env vars set. -* Type: Boolean - -When set to `true`, npm will display a progress bar during time intensive -operations, if `process.stderr` is a TTY. - -Set to `false` to suppress the progress bar. - -### proxy - -* Default: null -* Type: url - -A proxy to use for outgoing http requests. If the `HTTP_PROXY` or -`http_proxy` environment variables are set, proxy settings will be -honored by the underlying `request` library. - -### read-only - -* Default: false -* Type: Boolean - -This is used to mark a token as unable to publish when configuring limited access tokens with the `npm token create` command. - -### rebuild-bundle - -* Default: true -* Type: Boolean - -Rebuild bundled dependencies after installation. - -### registry - -* Default: https://registry.npmjs.org/ -* Type: url - -The base URL of the npm package registry. - -### rollback - -* Default: true -* Type: Boolean - -Remove failed installs. - -### save - -* Default: true -* Type: Boolean - -Save installed packages to a package.json file as dependencies. - -When used with the `npm rm` command, it removes it from the `dependencies` -object. - -Only works if there is already a package.json file present. - -### save-bundle - -* Default: false -* Type: Boolean - -If a package would be saved at install time by the use of `--save`, -`--save-dev`, or `--save-optional`, then also put it in the -`bundleDependencies` list. - -When used with the `npm rm` command, it removes it from the -bundledDependencies list. - -### save-prod - -* Default: false -* Type: Boolean - -Makes sure that a package will be saved into `dependencies` specifically. This -is useful if a package already exists in `devDependencies` or -`optionalDependencies`, but you want to move it to be a production dep. This is -also the default behavior if `--save` is true, and neither `--save-dev` or -`--save-optional` are true. - -### save-dev - -* Default: false -* Type: Boolean - -Save installed packages to a package.json file as `devDependencies`. - -When used with the `npm rm` command, it removes it from the -`devDependencies` object. - -Only works if there is already a package.json file present. - -### save-exact - -* Default: false -* Type: Boolean - -Dependencies saved to package.json using `--save`, `--save-dev` or -`--save-optional` will be configured with an exact version rather than -using npm's default semver range operator. - -### save-optional - -* Default: false -* Type: Boolean - -Save installed packages to a package.json file as -optionalDependencies. - -When used with the `npm rm` command, it removes it from the -`devDependencies` object. - -Only works if there is already a package.json file present. - -### save-prefix - -* Default: '^' -* Type: String - -Configure how versions of packages installed to a package.json file via -`--save` or `--save-dev` get prefixed. - -For example if a package has version `1.2.3`, by default its version is -set to `^1.2.3` which allows minor upgrades for that package, but after -`npm config set save-prefix='~'` it would be set to `~1.2.3` which only allows -patch upgrades. - -### scope - -* Default: the scope of the current project, if any, or "" -* Type: String - -Associate an operation with a scope for a scoped registry. Useful when logging -in to a private registry for the first time: -`npm login --scope=@organization --registry=registry.organization.com`, which -will cause `@organization` to be mapped to the registry for future installation -of packages specified according to the pattern `@organization/package`. - -### script-shell - -* Default: `null` -* Type: path - -The shell to use for scripts run with the `npm run` command. - -### scripts-prepend-node-path - -* Default: "warn-only" -* Type: Boolean, `"auto"` or `"warn-only"` - -If set to `true`, add the directory in which the current `node` executable -resides to the `PATH` environment variable when running scripts, -even if that means that `npm` will invoke a different `node` executable than -the one which it is running. - -If set to `false`, never modify `PATH` with that. - -If set to `"warn-only"`, never modify `PATH` but print a warning if `npm` thinks -that you may want to run it with `true`, e.g. because the `node` executable -in the `PATH` is not the one `npm` was invoked with. - -If set to `auto`, only add that directory to the `PATH` environment variable -if the `node` executable with which `npm` was invoked and the one that is found -first on the `PATH` are different. - -### searchexclude - -* Default: "" -* Type: String - -Space-separated options that limit the results from search. - -### searchopts - -* Default: "" -* Type: String - -Space-separated options that are always passed to search. - -### searchlimit - -* Default: 20 -* Type: Number - -Number of items to limit search results to. Will not apply at all to legacy -searches. - -### searchstaleness - -* Default: 900 (15 minutes) -* Type: Number - -The age of the cache, in seconds, before another registry request is made if -using legacy search endpoint. - -### send-metrics - -* Default: false -* Type: Boolean - -If true, success/failure metrics will be reported to the registry stored in -`metrics-registry`. These requests contain the number of successful and -failing runs of the npm CLI and the time period overwhich those counts were -gathered. No identifying information is included in these requests. - -### shell - -* Default: SHELL environment variable, or "bash" on Posix, or "cmd" on - Windows -* Type: path - -The shell to run for the `npm explore` command. - -### shrinkwrap - -* Default: true -* Type: Boolean - -If set to false, then ignore `npm-shrinkwrap.json` files when installing. This -will also prevent _writing_ `npm-shrinkwrap.json` if `save` is true. - -This option is an alias for `--package-lock`. - -### sign-git-commit - -* Default: false -* Type: Boolean - -If set to true, then the `npm version` command will commit the new package -version using `-S` to add a signature. - -Note that git requires you to have set up GPG keys in your git configs -for this to work properly. - -### sign-git-tag - -* Default: false -* Type: Boolean - -If set to true, then the `npm version` command will tag the version -using `-s` to add a signature. - -Note that git requires you to have set up GPG keys in your git configs -for this to work properly. - -### sso-poll-frequency - -* Default: 500 -* Type: Number - -When used with SSO-enabled `auth-type`s, configures how regularly the registry -should be polled while the user is completing authentication. - -### sso-type - -* Default: 'oauth' -* Type: 'oauth', 'saml', or null - -If `--auth-type=sso`, the type of SSO type to use. - -### strict-ssl - -* Default: true -* Type: Boolean - -Whether or not to do SSL key validation when making requests to the -registry via https. - -See also the `ca` config. - -### tag - -* Default: latest -* Type: String - -If you ask npm to install a package and don't tell it a specific version, then -it will install the specified tag. - -Also the tag that is added to the package@version specified by the `npm -tag` command, if no explicit tag is given. - -### tag-version-prefix - -* Default: `"v"` -* Type: String - -If set, alters the prefix used when tagging a new version when performing a -version increment using `npm-version`. To remove the prefix altogether, set it -to the empty string: `""`. - -Because other tools may rely on the convention that npm version tags look like -`v1.0.0`, _only use this property if it is absolutely necessary_. In -particular, use care when overriding this setting for public packages. - -### timing - -* Default: `false` -* Type: Boolean - -If true, writes an `npm-debug` log to `_logs` and timing information to -`_timing.json`, both in your cache. `_timing.json` is a newline delimited -list of JSON objects. You can quickly view it with this -[json](https://www.npmjs.com/package/json) command line: -`json -g < ~/.npm/_timing.json`. - -### tmp - -* Default: TMPDIR environment variable, or "/tmp" -* Type: path - -Where to store temporary files and folders. All temp files are deleted -on success, but left behind on failure for forensic purposes. - -### unicode - -* Default: false on windows, true on mac/unix systems with a unicode locale -* Type: Boolean - -When set to true, npm uses unicode characters in the tree output. When -false, it uses ascii characters to draw trees. - -### unsafe-perm - -* Default: false if running as root, true otherwise -* Type: Boolean - -Set to true to suppress the UID/GID switching when running package -scripts. If set explicitly to false, then installing as a non-root user -will fail. - -### update-notifier - -* Default: true -* Type: Boolean - -Set to false to suppress the update notification when using an older -version of npm than the latest. - -### usage - -* Default: false -* Type: Boolean - -Set to show short usage output (like the -H output) -instead of complete help when doing `npm-help(1)`. - -### user - -* Default: "nobody" -* Type: String or Number - -The UID to set to when running package scripts as root. - -### userconfig - -* Default: ~/.npmrc -* Type: path - -The location of user-level configuration settings. - -### umask - -* Default: 022 -* Type: Octal numeric string in range 0000..0777 (0..511) - -The "umask" value to use when setting the file creation mode on files -and folders. - -Folders and executables are given a mode which is `0777` masked against -this value. Other files are given a mode which is `0666` masked against -this value. Thus, the defaults are `0755` and `0644` respectively. - -### user-agent - -* Default: node/{process.version} {process.platform} {process.arch} -* Type: String - -Sets a User-Agent to the request header - -### version - -* Default: false -* Type: boolean - -If true, output the npm version and exit successfully. - -Only relevant when specified explicitly on the command line. - -### versions - -* Default: false -* Type: boolean - -If true, output the npm version as well as node's `process.versions` map, and -exit successfully. - -Only relevant when specified explicitly on the command line. - -### viewer - -* Default: "man" on Posix, "browser" on Windows -* Type: path - -The program to use to view help content. - -Set to `"browser"` to view html help content in the default web browser. - -## SEE ALSO - -* npm-config(1) -* npmrc(5) -* npm-scripts(7) -* npm-folders(5) -* npm(1) diff --git a/doc/misc/npm-developers.md b/doc/misc/npm-developers.md deleted file mode 100644 index 55c8d9b08d131..0000000000000 --- a/doc/misc/npm-developers.md +++ /dev/null @@ -1,233 +0,0 @@ -npm-developers(7) -- Developer Guide -==================================== - -## DESCRIPTION - -So, you've decided to use npm to develop (and maybe publish/deploy) -your project. - -Fantastic! - -There are a few things that you need to do above the simple steps -that your users will do to install your program. - -## About These Documents - -These are man pages. If you install npm, you should be able to -then do `man npm-thing` to get the documentation on a particular -topic, or `npm help thing` to see the same information. - -## What is a `package` - -A package is: - -* a) a folder containing a program described by a package.json file -* b) a gzipped tarball containing (a) -* c) a url that resolves to (b) -* d) a `@` that is published on the registry with (c) -* e) a `@` that points to (d) -* f) a `` that has a "latest" tag satisfying (e) -* g) a `git` url that, when cloned, results in (a). - -Even if you never publish your package, you can still get a lot of -benefits of using npm if you just want to write a node program (a), and -perhaps if you also want to be able to easily install it elsewhere -after packing it up into a tarball (b). - -Git urls can be of the form: - - git://github.com/user/project.git#commit-ish - git+ssh://user@hostname:project.git#commit-ish - git+http://user@hostname/project/blah.git#commit-ish - git+https://user@hostname/project/blah.git#commit-ish - -The `commit-ish` can be any tag, sha, or branch which can be supplied as -an argument to `git checkout`. The default is `master`. - -## The package.json File - -You need to have a `package.json` file in the root of your project to do -much of anything with npm. That is basically the whole interface. - -See `package.json(5)` for details about what goes in that file. At the very -least, you need: - -* name: - This should be a string that identifies your project. Please do not - use the name to specify that it runs on node, or is in JavaScript. - You can use the "engines" field to explicitly state the versions of - node (or whatever else) that your program requires, and it's pretty - well assumed that it's JavaScript. - - It does not necessarily need to match your github repository name. - - So, `node-foo` and `bar-js` are bad names. `foo` or `bar` are better. - -* version: - A semver-compatible version. - -* engines: - Specify the versions of node (or whatever else) that your program - runs on. The node API changes a lot, and there may be bugs or new - functionality that you depend on. Be explicit. - -* author: - Take some credit. - -* scripts: - If you have a special compilation or installation script, then you - should put it in the `scripts` object. You should definitely have at - least a basic smoke-test command as the "scripts.test" field. - See npm-scripts(7). - -* main: - If you have a single module that serves as the entry point to your - program (like what the "foo" package gives you at require("foo")), - then you need to specify that in the "main" field. - -* directories: - This is an object mapping names to folders. The best ones to include are - "lib" and "doc", but if you use "man" to specify a folder full of man pages, - they'll get installed just like these ones. - -You can use `npm init` in the root of your package in order to get you -started with a pretty basic package.json file. See `npm-init(1)` for -more info. - -## Keeping files *out* of your package - -Use a `.npmignore` file to keep stuff out of your package. If there's -no `.npmignore` file, but there *is* a `.gitignore` file, then npm will -ignore the stuff matched by the `.gitignore` file. If you *want* to -include something that is excluded by your `.gitignore` file, you can -create an empty `.npmignore` file to override it. Like `git`, `npm` looks -for `.npmignore` and `.gitignore` files in all subdirectories of your -package, not only the root directory. - -`.npmignore` files follow the [same pattern rules](https://git-scm.com/book/en/v2/Git-Basics-Recording-Changes-to-the-Repository#Ignoring-Files) -as `.gitignore` files: - -* Blank lines or lines starting with `#` are ignored. -* Standard glob patterns work. -* You can end patterns with a forward slash `/` to specify a directory. -* You can negate a pattern by starting it with an exclamation point `!`. - -By default, the following paths and files are ignored, so there's no -need to add them to `.npmignore` explicitly: - -* `.*.swp` -* `._*` -* `.DS_Store` -* `.git` -* `.hg` -* `.npmrc` -* `.lock-wscript` -* `.svn` -* `.wafpickle-*` -* `config.gypi` -* `CVS` -* `npm-debug.log` - -Additionally, everything in `node_modules` is ignored, except for -bundled dependencies. npm automatically handles this for you, so don't -bother adding `node_modules` to `.npmignore`. - -The following paths and files are never ignored, so adding them to -`.npmignore` is pointless: - -* `package.json` -* `README` (and its variants) -* `CHANGELOG` (and its variants) -* `LICENSE` / `LICENCE` - -If, given the structure of your project, you find `.npmignore` to be a -maintenance headache, you might instead try populating the `files` -property of `package.json`, which is an array of file or directory names -that should be included in your package. Sometimes a whitelist is easier -to manage than a blacklist. - -### Testing whether your `.npmignore` or `files` config works - -If you want to double check that your package will include only the files -you intend it to when published, you can run the `npm pack` command locally -which will generate a tarball in the working directory, the same way it -does for publishing. - -## Link Packages - -`npm link` is designed to install a development package and see the -changes in real time without having to keep re-installing it. (You do -need to either re-link or `npm rebuild -g` to update compiled packages, -of course.) - -More info at `npm-link(1)`. - -## Before Publishing: Make Sure Your Package Installs and Works - -**This is important.** - -If you can not install it locally, you'll have -problems trying to publish it. Or, worse yet, you'll be able to -publish it, but you'll be publishing a broken or pointless package. -So don't do that. - -In the root of your package, do this: - - npm install . -g - -That'll show you that it's working. If you'd rather just create a symlink -package that points to your working directory, then do this: - - npm link - -Use `npm ls -g` to see if it's there. - -To test a local install, go into some other folder, and then do: - - cd ../some-other-folder - npm install ../my-package - -to install it locally into the node_modules folder in that other place. - -Then go into the node-repl, and try using require("my-thing") to -bring in your module's main module. - -## Create a User Account - -Create a user with the adduser command. It works like this: - - npm adduser - -and then follow the prompts. - -This is documented better in npm-adduser(1). - -## Publish your package - -This part's easy. In the root of your folder, do this: - - npm publish - -You can give publish a url to a tarball, or a filename of a tarball, -or a path to a folder. - -Note that pretty much **everything in that folder will be exposed** -by default. So, if you have secret stuff in there, use a -`.npmignore` file to list out the globs to ignore, or publish -from a fresh checkout. - -## Brag about it - -Send emails, write blogs, blab in IRC. - -Tell the world how easy it is to install your program! - -## SEE ALSO - -* npm(1) -* npm-init(1) -* package.json(5) -* npm-scripts(7) -* npm-publish(1) -* npm-adduser(1) -* npm-registry(7) diff --git a/doc/misc/npm-disputes.md b/doc/misc/npm-disputes.md deleted file mode 100644 index 8c9f0489f9238..0000000000000 --- a/doc/misc/npm-disputes.md +++ /dev/null @@ -1,130 +0,0 @@ -npm-disputes(7) -- Handling Module Name Disputes -================================================ - -This document describes the steps that you should take to resolve module name -disputes with other npm publishers. It also describes special steps you should -take about names you think infringe your trademarks. - -This document is a clarification of the acceptable behavior outlined in the -[npm Code of Conduct](https://www.npmjs.com/policies/conduct), and nothing in -this document should be interpreted to contradict any aspect of the npm Code of -Conduct. - -## TL;DR - -1. Get the author email with `npm owner ls ` -2. Email the author, CC -3. After a few weeks, if there's no resolution, we'll sort it out. - -Don't squat on package names. Publish code or move out of the way. - -## DESCRIPTION - -There sometimes arise cases where a user publishes a module, and then later, -some other user wants to use that name. Here are some common ways that happens -(each of these is based on actual events.) - -1. Alice writes a JavaScript module `foo`, which is not node-specific. Alice - doesn't use node at all. Yusuf wants to use `foo` in node, so he wraps it in - an npm module. Some time later, Alice starts using node, and wants to take - over management of her program. -2. Yusuf writes an npm module `foo`, and publishes it. Perhaps much later, Alice - finds a bug in `foo`, and fixes it. She sends a pull request to Yusuf, but - Yusuf doesn't have the time to deal with it, because he has a new job and a - new baby and is focused on his new Erlang project, and kind of not involved - with node any more. Alice would like to publish a new `foo`, but can't, - because the name is taken. -3. Yusuf writes a 10-line flow-control library, and calls it `foo`, and - publishes it to the npm registry. Being a simple little thing, it never - really has to be updated. Alice works for Foo Inc, the makers of the - critically acclaimed and widely-marketed `foo` JavaScript toolkit framework. - They publish it to npm as `foojs`, but people are routinely confused when - `npm install foo` is some different thing. -4. Yusuf writes a parser for the widely-known `foo` file format, because he - needs it for work. Then, he gets a new job, and never updates the prototype. - Later on, Alice writes a much more complete `foo` parser, but can't publish, - because Yusuf's `foo` is in the way. - -1. `npm owner ls foo`. This will tell Alice the email address of the owner - (Yusuf). -2. Alice emails Yusuf, explaining the situation **as respectfully as possible**, - and what she would like to do with the module name. She adds the npm support - staff to the CC list of the email. Mention in the email - that Yusuf can run npm owner `add alice foo` to add Alice as an owner of the - foo package. -3. After a reasonable amount of time, if Yusuf has not responded, or if Yusuf - and Alice can't come to any sort of resolution, email support - and we'll sort it out. ("Reasonable" is usually at least - 4 weeks.) - -## REASONING - -In almost every case so far, the parties involved have been able to reach an -amicable resolution without any major intervention. Most people really do want -to be reasonable, and are probably not even aware that they're in your way. - -Module ecosystems are most vibrant and powerful when they are as self-directed -as possible. If an admin one day deletes something you had worked on, then that -is going to make most people quite upset, regardless of the justification. When -humans solve their problems by talking to other humans with respect, everyone -has the chance to end up feeling good about the interaction. - -## EXCEPTIONS - -Some things are not allowed, and will be removed without discussion if they are -brought to the attention of the npm registry admins, including but not limited -to: - -1. Malware (that is, a package designed to exploit or harm the machine on which - it is installed). -2. Violations of copyright or licenses (for example, cloning an MIT-licensed - program, and then removing or changing the copyright and license statement). -3. Illegal content. -4. "Squatting" on a package name that you plan to use, but aren't actually - using. Sorry, I don't care how great the name is, or how perfect a fit it is - for the thing that someday might happen. If someone wants to use it today, - and you're just taking up space with an empty tarball, you're going to be - evicted. -5. Putting empty packages in the registry. Packages must have SOME - functionality. It can be silly, but it can't be nothing. (See also: - squatting.) -6. Doing weird things with the registry, like using it as your own personal - application database or otherwise putting non-packagey things into it. -7. Other things forbidden by the npm - [Code of Conduct](https://www.npmjs.com/policies/conduct) such as hateful - language, pornographic content, or harassment. - -If you see bad behavior like this, please report it to right -away. **You are never expected to resolve abusive behavior on your own. We are -here to help.** - -## TRADEMARKS - -If you think another npm publisher is infringing your trademark, such as by -using a confusingly similar package name, email with a link to -the package or user account on [https://www.npmjs.com/](https://www.npmjs.com/). -Attach a copy of your trademark registration certificate. - -If we see that the package's publisher is intentionally misleading others by -misusing your registered mark without permission, we will transfer the package -name to you. Otherwise, we will contact the package publisher and ask them to -clear up any confusion with changes to their package's `README` file or -metadata. - -## CHANGES - -This is a living document and may be updated from time to time. Please refer to -the [git history for this document](https://github.com/npm/cli/commits/latest/doc/misc/npm-disputes.md) -to view the changes. - -## LICENSE - -Copyright (C) npm, Inc., All rights reserved - -This document may be reused under a Creative Commons Attribution-ShareAlike -License. - -## SEE ALSO - -* npm-registry(7) -* npm-owner(1) diff --git a/doc/misc/npm-orgs.md b/doc/misc/npm-orgs.md deleted file mode 100644 index 3db22f8c7eab4..0000000000000 --- a/doc/misc/npm-orgs.md +++ /dev/null @@ -1,90 +0,0 @@ -npm-orgs(7) -- Working with Teams & Orgs -======================================== - -## DESCRIPTION - -There are three levels of org users: - -1. Super admin, controls billing & adding people to the org. -2. Team admin, manages team membership & package access. -3. Developer, works on packages they are given access to. - -The super admin is the only person who can add users to the org because it impacts the monthly bill. The super admin will use the website to manage membership. Every org has a `developers` team that all users are automatically added to. - -The team admin is the person who manages team creation, team membership, and package access for teams. The team admin grants package access to teams, not individuals. - -The developer will be able to access packages based on the teams they are on. Access is either read-write or read-only. - -There are two main commands: - -1. `npm team` see npm-team(1) for more details -2. `npm access` see npm-access(1) for more details - -## Team Admins create teams - -* Check who you’ve added to your org: - -``` -npm team ls :developers -``` - -* Each org is automatically given a `developers` team, so you can see the whole list of team members in your org. This team automatically gets read-write access to all packages, but you can change that with the `access` command. - -* Create a new team: - -``` -npm team create -``` - -* Add members to that team: - -``` -npm team add -``` - -## Publish a package and adjust package access - -* In package directory, run - -``` -npm init --scope= -``` -to scope it for your org & publish as usual - -* Grant access: - -``` -npm access grant [] -``` - -* Revoke access: - -``` -npm access revoke [] -``` - -## Monitor your package access - -* See what org packages a team member can access: - -``` -npm access ls-packages -``` - -* See packages available to a specific team: - -``` -npm access ls-packages -``` - -* Check which teams are collaborating on a package: - -``` -npm access ls-collaborators -``` - -## SEE ALSO - -* npm-team(1) -* npm-access(1) -* npm-scope(7) diff --git a/doc/misc/npm-registry.md b/doc/misc/npm-registry.md deleted file mode 100644 index 03966007cad63..0000000000000 --- a/doc/misc/npm-registry.md +++ /dev/null @@ -1,100 +0,0 @@ -npm-registry(7) -- The JavaScript Package Registry -================================================== - -## DESCRIPTION - -To resolve packages by name and version, npm talks to a registry website -that implements the CommonJS Package Registry specification for reading -package info. - -npm is configured to use npm, Inc.'s public registry at - by default. Use of the npm public registry is -subject to terms of use available at . - -You can configure npm to use any compatible registry you like, and even run -your own registry. Use of someone else's registry may be governed by their -terms of use. - -npm's package registry implementation supports several -write APIs as well, to allow for publishing packages and managing user -account information. - -The npm public registry is powered by a CouchDB database, -of which there is a public mirror at -. The code for the couchapp is -available at . - -The registry URL used is determined by the scope of the package (see -`npm-scope(7)`). If no scope is specified, the default registry is used, which is -supplied by the `registry` config parameter. See `npm-config(1)`, -`npmrc(5)`, and `npm-config(7)` for more on managing npm's configuration. - -## Does npm send any information about me back to the registry? - -Yes. - -When making requests of the registry npm adds two headers with information -about your environment: - -* `Npm-Scope` – If your project is scoped, this header will contain its - scope. In the future npm hopes to build registry features that use this - information to allow you to customize your experience for your - organization. -* `Npm-In-CI` – Set to "true" if npm believes this install is running in a - continuous integration environment, "false" otherwise. This is detected by - looking for the following environment variables: `CI`, `TDDIUM`, - `JENKINS_URL`, `bamboo.buildKey`. If you'd like to learn more you may find - the [original PR](https://github.com/npm/npm-registry-client/pull/129) - interesting. - This is used to gather better metrics on how npm is used by humans, versus - build farms. - -The npm registry does not try to correlate the information in these headers -with any authenticated accounts that may be used in the same requests. - -## Can I run my own private registry? - -Yes! - -The easiest way is to replicate the couch database, and use the same (or -similar) design doc to implement the APIs. - -If you set up continuous replication from the official CouchDB, and then -set your internal CouchDB as the registry config, then you'll be able -to read any published packages, in addition to your private ones, and by -default will only publish internally. - -If you then want to publish a package for the whole world to see, you can -simply override the `--registry` option for that `publish` command. - -## I don't want my package published in the official registry. It's private. - -Set `"private": true` in your package.json to prevent it from being -published at all, or -`"publishConfig":{"registry":"http://my-internal-registry.local"}` -to force it to be published only to your internal registry. - -See `package.json(5)` for more info on what goes in the package.json file. - -## Will you replicate from my registry into the public one? - -No. If you want things to be public, then publish them into the public -registry using npm. What little security there is would be for nought -otherwise. - -## Do I have to use couchdb to build a registry that npm can talk to? - -No, but it's way easier. Basically, yes, you do, or you have to -effectively implement the entire CouchDB API anyway. - -## Is there a website or something to see package docs and such? - -Yes, head over to - -## SEE ALSO - -* npm-config(1) -* npm-config(7) -* npmrc(5) -* npm-developers(7) -* npm-disputes(7) diff --git a/doc/misc/npm-scope.md b/doc/misc/npm-scope.md deleted file mode 100644 index a65af92bcdcb6..0000000000000 --- a/doc/misc/npm-scope.md +++ /dev/null @@ -1,113 +0,0 @@ -npm-scope(7) -- Scoped packages -=============================== - -## DESCRIPTION - -All npm packages have a name. Some package names also have a scope. A scope -follows the usual rules for package names (URL-safe characters, no leading dots -or underscores). When used in package names, scopes are preceded by an `@` symbol -and followed by a slash, e.g. - - @somescope/somepackagename - -Scopes are a way of grouping related packages together, and also affect a few -things about the way npm treats the package. - -Each npm user/organization has their own scope, and only you can add packages -in your scope. This means you don't have to worry about someone taking your -package name ahead of you. Thus it is also a good way to signal official packages -for organizations. - -Scoped packages can be published and installed as of `npm@2` and are supported -by the primary npm registry. Unscoped packages can depend on scoped packages and -vice versa. The npm client is backwards-compatible with unscoped registries, -so it can be used to work with scoped and unscoped registries at the same time. - -## Installing scoped packages - -Scoped packages are installed to a sub-folder of the regular installation -folder, e.g. if your other packages are installed in `node_modules/packagename`, -scoped modules will be installed in `node_modules/@myorg/packagename`. The scope -folder (`@myorg`) is simply the name of the scope preceded by an `@` symbol, and can -contain any number of scoped packages. - -A scoped package is installed by referencing it by name, preceded by an -`@` symbol, in `npm install`: - - npm install @myorg/mypackage - -Or in `package.json`: - - "dependencies": { - "@myorg/mypackage": "^1.3.0" - } - -Note that if the `@` symbol is omitted, in either case, npm will instead attempt to -install from GitHub; see `npm-install(1)`. - -## Requiring scoped packages - -Because scoped packages are installed into a scope folder, you have to -include the name of the scope when requiring them in your code, e.g. - - require('@myorg/mypackage') - -There is nothing special about the way Node treats scope folders. This -simply requires the `mypackage` module in the folder named `@myorg`. - -## Publishing scoped packages - -Scoped packages can be published from the CLI as of `npm@2` and can be -published to any registry that supports them, including the primary npm -registry. - -(As of 2015-04-19, and with npm 2.0 or better, the primary npm registry -**does** support scoped packages.) - -If you wish, you may associate a scope with a registry; see below. - -### Publishing public scoped packages to the primary npm registry - -To publish a public scoped package, you must specify `--access public` with -the initial publication. This will publish the package and set access -to `public` as if you had run `npm access public` after publishing. - -### Publishing private scoped packages to the npm registry - -To publish a private scoped package to the npm registry, you must have -an [npm Private Modules](https://docs.npmjs.com/private-modules/intro) -account. - -You can then publish the module with `npm publish` or `npm publish ---access restricted`, and it will be present in the npm registry, with -restricted access. You can then change the access permissions, if -desired, with `npm access` or on the npmjs.com website. - -## Associating a scope with a registry - -Scopes can be associated with a separate registry. This allows you to -seamlessly use a mix of packages from the primary npm registry and one or more -private registries, such as npm Enterprise. - -You can associate a scope with a registry at login, e.g. - - npm login --registry=http://reg.example.com --scope=@myco - -Scopes have a many-to-one relationship with registries: one registry can -host multiple scopes, but a scope only ever points to one registry. - -You can also associate a scope with a registry using `npm config`: - - npm config set @myco:registry http://reg.example.com - -Once a scope is associated with a registry, any `npm install` for a package -with that scope will request packages from that registry instead. Any -`npm publish` for a package name that contains the scope will be published to -that registry instead. - -## SEE ALSO - -* npm-install(1) -* npm-publish(1) -* npm-access(1) -* npm-registry(7) diff --git a/doc/misc/npm-scripts.md b/doc/misc/npm-scripts.md deleted file mode 100644 index 22ded49001632..0000000000000 --- a/doc/misc/npm-scripts.md +++ /dev/null @@ -1,268 +0,0 @@ -npm-scripts(7) -- How npm handles the "scripts" field -===================================================== - -## DESCRIPTION - -npm supports the "scripts" property of the package.json file, for the -following scripts: - -* prepublish: - Run BEFORE the package is packed and published, as well as on local `npm - install` without any arguments. (See below) -* prepare: - Run both BEFORE the package is packed and published, on local `npm - install` without any arguments, and when installing git dependencies (See - below). This is run AFTER `prepublish`, but BEFORE `prepublishOnly`. -* prepublishOnly: - Run BEFORE the package is prepared and packed, ONLY on `npm publish`. (See - below.) -* prepack: - run BEFORE a tarball is packed (on `npm pack`, `npm publish`, and when - installing git dependencies) -* postpack: - Run AFTER the tarball has been generated and moved to its final destination. -* publish, postpublish: - Run AFTER the package is published. -* preinstall: - Run BEFORE the package is installed -* install, postinstall: - Run AFTER the package is installed. -* preuninstall, uninstall: - Run BEFORE the package is uninstalled. -* postuninstall: - Run AFTER the package is uninstalled. -* preversion: - Run BEFORE bumping the package version. -* version: - Run AFTER bumping the package version, but BEFORE commit. -* postversion: - Run AFTER bumping the package version, and AFTER commit. -* pretest, test, posttest: - Run by the `npm test` command. -* prestop, stop, poststop: - Run by the `npm stop` command. -* prestart, start, poststart: - Run by the `npm start` command. -* prerestart, restart, postrestart: - Run by the `npm restart` command. Note: `npm restart` will run the - stop and start scripts if no `restart` script is provided. -* preshrinkwrap, shrinkwrap, postshrinkwrap: - Run by the `npm shrinkwrap` command. - -Additionally, arbitrary scripts can be executed by running `npm -run-script `. *Pre* and *post* commands with matching -names will be run for those as well (e.g. `premyscript`, `myscript`, -`postmyscript`). Scripts from dependencies can be run with -`npm explore -- npm run `. - -## PREPUBLISH AND PREPARE - -### DEPRECATION NOTE - -Since `npm@1.1.71`, the npm CLI has run the `prepublish` script for both `npm -publish` and `npm install`, because it's a convenient way to prepare a package -for use (some common use cases are described in the section below). It has -also turned out to be, in practice, [very -confusing](https://github.com/npm/npm/issues/10074). As of `npm@4.0.0`, a new -event has been introduced, `prepare`, that preserves this existing behavior. A -_new_ event, `prepublishOnly` has been added as a transitional strategy to -allow users to avoid the confusing behavior of existing npm versions and only -run on `npm publish` (for instance, running the tests one last time to ensure -they're in good shape). - -See for a much lengthier -justification, with further reading, for this change. - -### USE CASES - -If you need to perform operations on your package before it is used, in a way -that is not dependent on the operating system or architecture of the -target system, use a `prepublish` script. This includes -tasks such as: - -* Compiling CoffeeScript source code into JavaScript. -* Creating minified versions of JavaScript source code. -* Fetching remote resources that your package will use. - -The advantage of doing these things at `prepublish` time is that they can be done once, in a -single place, thus reducing complexity and variability. -Additionally, this means that: - -* You can depend on `coffee-script` as a `devDependency`, and thus - your users don't need to have it installed. -* You don't need to include minifiers in your package, reducing - the size for your users. -* You don't need to rely on your users having `curl` or `wget` or - other system tools on the target machines. - -## DEFAULT VALUES - -npm will default some script values based on package contents. - -* `"start": "node server.js"`: - - If there is a `server.js` file in the root of your package, then npm - will default the `start` command to `node server.js`. - -* `"install": "node-gyp rebuild"`: - - If there is a `binding.gyp` file in the root of your package and you - haven't defined your own `install` or `preinstall` scripts, npm will - default the `install` command to compile using node-gyp. - -## USER - -If npm was invoked with root privileges, then it will change the uid -to the user account or uid specified by the `user` config, which -defaults to `nobody`. Set the `unsafe-perm` flag to run scripts with -root privileges. - -## ENVIRONMENT - -Package scripts run in an environment where many pieces of information -are made available regarding the setup of npm and the current state of -the process. - - -### path - -If you depend on modules that define executable scripts, like test -suites, then those executables will be added to the `PATH` for -executing the scripts. So, if your package.json has this: - - { "name" : "foo" - , "dependencies" : { "bar" : "0.1.x" } - , "scripts": { "start" : "bar ./test" } } - -then you could run `npm start` to execute the `bar` script, which is -exported into the `node_modules/.bin` directory on `npm install`. - -### package.json vars - -The package.json fields are tacked onto the `npm_package_` prefix. So, -for instance, if you had `{"name":"foo", "version":"1.2.5"}` in your -package.json file, then your package scripts would have the -`npm_package_name` environment variable set to "foo", and the -`npm_package_version` set to "1.2.5". You can access these variables -in your code with `process.env.npm_package_name` and -`process.env.npm_package_version`, and so on for other fields. - -### configuration - -Configuration parameters are put in the environment with the -`npm_config_` prefix. For instance, you can view the effective `root` -config by checking the `npm_config_root` environment variable. - -### Special: package.json "config" object - -The package.json "config" keys are overwritten in the environment if -there is a config param of `[@]:`. For example, -if the package.json has this: - - { "name" : "foo" - , "config" : { "port" : "8080" } - , "scripts" : { "start" : "node server.js" } } - -and the server.js is this: - - http.createServer(...).listen(process.env.npm_package_config_port) - -then the user could change the behavior by doing: - - npm config set foo:port 80 - -### current lifecycle event - -Lastly, the `npm_lifecycle_event` environment variable is set to -whichever stage of the cycle is being executed. So, you could have a -single script used for different parts of the process which switches -based on what's currently happening. - -Objects are flattened following this format, so if you had -`{"scripts":{"install":"foo.js"}}` in your package.json, then you'd -see this in the script: - - process.env.npm_package_scripts_install === "foo.js" - -## EXAMPLES - -For example, if your package.json contains this: - - { "scripts" : - { "install" : "scripts/install.js" - , "postinstall" : "scripts/install.js" - , "uninstall" : "scripts/uninstall.js" - } - } - -then `scripts/install.js` will be called for the install -and post-install stages of the lifecycle, and `scripts/uninstall.js` -will be called when the package is uninstalled. Since -`scripts/install.js` is running for two different phases, it would -be wise in this case to look at the `npm_lifecycle_event` environment -variable. - -If you want to run a make command, you can do so. This works just -fine: - - { "scripts" : - { "preinstall" : "./configure" - , "install" : "make && make install" - , "test" : "make test" - } - } - -## EXITING - -Scripts are run by passing the line as a script argument to `sh`. - -If the script exits with a code other than 0, then this will abort the -process. - -Note that these script files don't have to be nodejs or even -javascript programs. They just have to be some kind of executable -file. - -## HOOK SCRIPTS - -If you want to run a specific script at a specific lifecycle event for -ALL packages, then you can use a hook script. - -Place an executable file at `node_modules/.hooks/{eventname}`, and -it'll get run for all packages when they are going through that point -in the package lifecycle for any packages installed in that root. - -Hook scripts are run exactly the same way as package.json scripts. -That is, they are in a separate child process, with the env described -above. - -## BEST PRACTICES - -* Don't exit with a non-zero error code unless you *really* mean it. - Except for uninstall scripts, this will cause the npm action to - fail, and potentially be rolled back. If the failure is minor or - only will prevent some optional features, then it's better to just - print a warning and exit successfully. -* Try not to use scripts to do what npm can do for you. Read through - `package.json(5)` to see all the things that you can specify and enable - by simply describing your package appropriately. In general, this - will lead to a more robust and consistent state. -* Inspect the env to determine where to put things. For instance, if - the `npm_config_binroot` environment variable is set to `/home/user/bin`, then - don't try to install executables into `/usr/local/bin`. The user - probably set it up that way for a reason. -* Don't prefix your script commands with "sudo". If root permissions - are required for some reason, then it'll fail with that error, and - the user will sudo the npm command in question. -* Don't use `install`. Use a `.gyp` file for compilation, and `prepublish` - for anything else. You should almost never have to explicitly set a - preinstall or install script. If you are doing this, please consider if - there is another option. The only valid use of `install` or `preinstall` - scripts is for compilation which must be done on the target architecture. - -## SEE ALSO - -* npm-run-script(1) -* package.json(5) -* npm-developers(7) -* npm-install(1) diff --git a/doc/misc/removing-npm.md b/doc/misc/removing-npm.md deleted file mode 100644 index 84274522c22fe..0000000000000 --- a/doc/misc/removing-npm.md +++ /dev/null @@ -1,54 +0,0 @@ -npm-removal(1) -- Cleaning the Slate -==================================== - -## SYNOPSIS - -So sad to see you go. - - sudo npm uninstall npm -g - -Or, if that fails, get the npm source code, and do: - - sudo make uninstall - -## More Severe Uninstalling - -Usually, the above instructions are sufficient. That will remove -npm, but leave behind anything you've installed. - -If that doesn't work, or if you require more drastic measures, -continue reading. - -Note that this is only necessary for globally-installed packages. Local -installs are completely contained within a project's `node_modules` -folder. Delete that folder, and everything is gone (unless a package's -install script is particularly ill-behaved). - -This assumes that you installed node and npm in the default place. If -you configured node with a different `--prefix`, or installed npm with a -different prefix setting, then adjust the paths accordingly, replacing -`/usr/local` with your install prefix. - -To remove everything npm-related manually: - - rm -rf /usr/local/{lib/node{,/.npm,_modules},bin,share/man}/npm* - -If you installed things *with* npm, then your best bet is to uninstall -them with npm first, and then install them again once you have a -proper install. This can help find any symlinks that are lying -around: - - ls -laF /usr/local/{lib/node{,/.npm},bin,share/man} | grep npm - -Prior to version 0.3, npm used shim files for executables and node -modules. To track those down, you can do the following: - - find /usr/local/{lib/node,bin} -exec grep -l npm \{\} \; ; - -(This is also in the README file.) - -## SEE ALSO - -* README -* npm-uninstall(1) -* npm-prune(1) diff --git a/doc/spec/file-specifiers.md b/doc/spec/file-specifiers.md deleted file mode 100644 index e737909db5257..0000000000000 --- a/doc/spec/file-specifiers.md +++ /dev/null @@ -1,151 +0,0 @@ -# `file:` specifiers - -`specifier` refers to the value part of the `package.json`'s `dependencies` -object. This is a semver expression for registry dependencies and -URLs and URL-like strings for other types. - -### Dependency Specifiers - -* A `file:` specifier is either an absolute path (eg `/path/to/thing`, `d:\path\to\thing`): - * An absolute `file:///absolute/path` with any number of leading slashes - being treated as a single slash. That is, `file:/foo/bar` and - `file:///foo/bar` reference the same package. -* … or a relative path (eg `../path/to/thing`, `path\to\subdir`). Leading - slashes on a file specifier will be removed, that is 'file://../foo/bar` - references the same package as same as `file:../foo/bar`. The latter is - considered canonical. -* Attempting to install a specifier that has a windows drive letter will - produce an error on non-Windows systems. -* A valid `file:` specifier points is: - * a valid package file. That is, a `.tar`, `.tar.gz` or `.tgz` containing - `/package.json`. - * OR, a directory that contains a `package.json` - -Relative specifiers are relative to the file they were found in, or, if -provided on the command line, the CWD that the command was run from. - -An absolute specifier found in a `package.json` or `npm-shrinkwrap.json` is -probably an error as it's unlikely to be portable between computers and -should warn. - -A specifier provided as a command line argument that is on a different drive -is an error. That is, `npm install file:d:/foo/bar` is an error if the -current drive is `c`. The point of this rule is that if we can't produce a -relative path then it's an error. - -### Specifier Disambiguation - -On the command line, plain paths are allowed. These paths can be ambiguous -as they could be a path, a plain package name or a github shortcut. This -ambiguity is resolved by checking to see if either a directory exists that -contains a `package.json`. If either is the case then the specifier is a -file specifier, otherwise it's a registry or github specifier. - -### Specifier Matching - -A specifier is considered to match a dependency on disk when the `realpath` -of the fully resolved specifier matches the `realpath` of the package on disk. - -### Saving File Specifiers - -When saving to both `package.json` and `npm-shrinkwrap.json` they will be -saved using the `file:../relative/path` form, and the relative path will be -relative to the project's root folder. This is particularly important to -note for the `npm-shrinkwrap.json` as it means the specifier there will -be different then the original `package.json` (where it was relative to that -`package.json`). - -When shrinkwrapping file specifiers, the contents of the destination -package's `node_modules` WILL NOT be included in the shrinkwrap. If you want to lock -down the destination package's `node_modules` you should create a shrinkwrap for it -separately. - -This is necessary to support the mono repo use case where many projects file -to the same package. If each project included its own `npm-shrinkwrap.json` -then they would each have their own distinct set of transitive dependencies -and they'd step on each other any time you ran an install in one or the other. - -NOTE: This should not have an effect on shrinkwrapping of other sorts of -shrinkwrapped packages. - -### Installation - -#### File type specifiers pointing at tarballs - -File-type specifiers pointing at a `.tgz` or `.tar.gz` or `.tar` file will -install it as a package file in the same way we would a remote tarball. The -checksum of the package file should be recorded so that we can check for updates. - -#### File type specifers pointing at directories - -File-type specifiers that point at directories will necessarily not do -anything for `fetch` and `extract` phases. - -The symlink should be created during the `finalize` phase. - -The `preinstall` for file-type specifiers MUST be run AFTER the -`finalize` phase as the symlink may be a relative path reaching outside the -current project root and a symlink that resolves in `.staging` won't resolve -in the package's final resting place. - -If the module is inside the package root that we're running the install for then -dependencies of the linked package will be hoisted to the top level as usual. - -If the module is outside the package root then dependencies will be installed inside -the linked module's `node_modules` folder. - -### Removal - -Removal should remove the symlink. - -Removal MUST NOT remove the transitive dependencies IF they're installed in -the linked module's `node_modules` folder. - -### Listing - -In listings they should not include a version as the version is not -something `npm` is concerned about. This also makes them easily -distinguishable from symlinks of packages that have other dependency -specifiers. - -If you had run: - -``` -npm install --save file:../a -``` - -And then run: -``` -npm ls -``` - -You would see: - -``` -example-package@1.0.0 /path/to/example-package -└── a → file:../a -``` - -``` -example-package@1.0.0 /path/to/example-package -+-- a -> file:../a -``` - -Of note here: No version is included as the relevant detail is WHERE the -package came from, not what version happened to be in that path. - -### Outdated - -Local specifiers should only show up in `npm outdated` if they're missing -and when they do, they should be reported as: - -``` -Package Current Wanted Latest Location -a MISSING LOCAL LOCAL example-package -``` - -### Updating - -If a dependency with a local specifier is already installed then `npm -update` shouldn't do anything. If one is missing then it should be -installed as if you ran `npm install`. diff --git a/doc/spec/package-lock.md b/doc/spec/package-lock.md deleted file mode 100644 index 9da5d8c03b45a..0000000000000 --- a/doc/spec/package-lock.md +++ /dev/null @@ -1,276 +0,0 @@ -# package-lock and npm-shrinkwrap - -`npm` can have one of two different lock files: - -* `package-lock.json`, which is ordinarily always present and is never published. -* `npm-shrinkwrap.json`, which is created with `npm shrinkwrap` and usually published. - -You can only have one of them and in the event that you have both, -`npm-shrinkwrap.json` takes precedence. The files are exactly the same -format and in fact all the `npm shrinkwrap` command does is rename your -`package-lock.json`. - -Through the rest of this document we will refer to the package-lock and -`package-lock.json` but everything also applies to `npm-shrinkwrap.json`. - -## File Format - -### name - -The name of the package this is a package-lock for. This must match what's in `package.json`. - -### version - -The version of the package this is a package-lock for. This must match what's in `package.json`. - -### lockfileVersion *(new)* - -An integer version, starting at `1` with the version number of this document -whose semantics were used when generating this `package-lock.json`. - -### preserveSymlinks *(new)* - -Indicates that the install was done with the environment variable -`NODE_PRESERVE_SYMLINKS` enabled. The installer should insist that the value of this -property match that environment variable. - -### dependencies - -These are the modules installed in the `node_modules`. Some of these are -dependencies some of these are transitive dependencies (that is, -dependencies of our dependencies). - -This is a mapping of package name to dependency object. Dependency objects have the -following properties: - -#### version *(changed)* - -This is a specifier that uniquely identifies this package and should be -usable in fetching a new copy of it. - -* bundled dependencies: Regardless of source, this is a version number that is purely for informational purposes. -* registry sources: This is a version number. (eg, `1.2.3`) -* git sources: This is a git specifier with resolved committish. (eg, `git+https://example.com/foo/bar#115311855adb0789a0466714ed48a1499ffea97e`) -* http tarball sources: This is the URL of the tarball. (eg, `https://example.com/example-1.3.0.tgz`) -* local tarball sources: This is the file URL of the tarball. (eg `file:///opt/storage/example-1.3.0.tgz`) -* local link sources: This is the file URL of the link. (eg `file:libs/our-module`) - -#### integrity *(new)* - -This is a [Standard Subresource -Integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) for -this resource. - -* For bundled dependencies this is not included, regardless of source. -* For registry sources, this is the `integrity` that the registry provided, or if one wasn't provided the SHA1 in `shasum`. -* For git sources this is the specific commit hash we cloned from. -* For remote tarball sources this is an integrity based on a SHA512 of - the file. -* For local tarball sources: This is an integrity field based on the SHA512 of the file. - -#### resolved - -* For bundled dependencies this is not included, regardless of source. -* For registry sources this is path of the tarball relative to the registry - URL. If the tarball URL isn't on the same server as the registry URL then - this is a complete URL. - -#### link *(new)* - -If this module was symlinked in development but had semver in the -`package.json` then this is the relative path of that link. - -Discussion of the semantics of this will go in the symlinks RFC. - -Implementation note: To be implemented post npm@5. - -#### bundled *(new)* - -If true, this is the bundled dependency and will be installed by the parent -module. When installing, this module will be extracted from the parent -module during the extract phase, not installed as a separate dependency. - -#### dev - -If true then this dependency is either a development dependency ONLY of the -top level module or a transitive dependency of one. This is false for -dependencies that are both a development dependency of the top level and a -transitive dependency of a non-development dependency of the top level. - -#### optional - -If true then this dependency is either an optional dependency ONLY of the -top level module or a transitive dependency of one. This is false for -dependencies that are both an optional dependency of the top level and a -transitive dependency of a non-optional dependency of the top level. - -All optional dependencies should be included even if they're uninstallable -on the current platform. - -#### from - -This is a record of what specifier was used to originally install this -package. This should be used only for git dependencies. - -#### requires - -This is a mapping of module name to version. This is a list of everything -this module requires, regardless of where it will be installed. The version -should match via normal matching rules a dependency either in our -`dependencies` or in a level higher than us. - -#### dependencies - -Exactly like `dependencies` at the top level, this is a list of modules to -install in the `node_modules` of this module. - -## Generating - -### `npm init` - -If neither a `package-lock.json` nor an `npm-shrinkwrap.json` exist then -`npm init` will create a `package-lock.json`. This is functionally -equivalent to running `npm shrinkwrap` after the current init completes and -renaming the result to `package-lock.json`. - -### `npm install --save` - -If either an `npm-shrinkwrap.json` or a `package-lock.json` exists then it -will be updated. - -If neither exist then a `package-lock.json` should be generated. - -If a `package.json` does not exist, it should be generated. The generated -`package.json` should be empty, as in: - -``` -{ - "dependencies": { - } -} -``` - -If the user wants to get a default package name/version added they can run `npm init`. - -### `npm shrinkwrap` - -If a `package-lock.json` exists, rename it to `npm-shrinkwrap.json`. -Refresh the data from the installer's ideal tree. - -The top level `name` and `version` come from the `package.json`. It is an -error if either are missing or invalid. - -#### dependencies.dev - -This is `true` if this dependency is ONLY installed to fulfill either a top -level development dependency, or one of its transitive dependencies. - -Given: -``` -B (Dev) → C -``` - -Then both B and C would be `dev: true`. - -Given: -``` -A → B → C -B (Dev) -> C -``` - -Then all dependencies would be `dev: false`. - -#### dependencies.optional - -This is `true` if this dependency is ONLY ever either an optional dependency -or a transitive dependency of optional dependencies. - -Given: -``` -A (Opt) → B → C -``` - -Then all three of A, B and C would be flagged as optional. - -Given: -``` -A (Opt) → B → C -D → C -``` - -Then A and B would be flagged as optional, but C would not be. - -Given: -``` -A (Opt) → B → C -D → A -``` - -Then none would be flagged as optional. - -## Installing - -If the `packageIntegrity` in the `package-lock.json` differs from the one -computed from the `package.json` then places where the `package.json` is -incompatible with the `package-lock.json` a new module should be installed. -That is, while the `package-lock.json` ordinarily defines the state of your -project, if your `package.json` is edited independently it will take -precedence. - -The `package-lock.json` describes the exact tree that `npm` should create. -Any deviation between the `package.json` and the shrinkwrap/lock should -result in a warning be issued. This includes: - -* Modules in `package.json` but missing from the `package-lock.json` -* Modules in the `package-lock.json` but missing from the `package.json`. -* Modules in `package.json` whose specifiers don't match the version in `package-lock.json`. - -Warn if the `lockfileVersion` in the `package-lock.json` is for a different -major version than we implement. - -Module resolution from package-lock data works as such: - -* If install was run with `--resolve-links` and a dependency has a `link` - property then a symlink is made using that. If the version of the - destination can not be matched to the package-lock and/or the package.json - then a warning will be issued. - -* Otherwise, if a `integrity` is available then we try to install it from the cache using it. - -If `integrity` is unavailable or we are unable to locate a module from the `integrity` then: - -* If `lockfileVersion` is set: - * Install using the value of `version` and validate the result against the - `integrity`. -* Otherwise, try these in turn and validate the result against the `integrity`: - * `resolved`, then `from`, then `version. - * `from` can be either `package@specifier` or just `specifier`. - -Regardless of how the module is installed the metadata in the installed -module should be identical to what it would have been if the module were -installed w/o a package-lock. - -## Implied Changes To Other Commands - -### `npm rm --save` - -Currently if you ask to remove a package that's both a direct and a -transitive dependency, we'll remove the package from `node_modules` even if -this results in a broken tree. This was chosen at the time because we felt -that users would expect `npm rm pkgname` to be equivalent of -`rm -rf node_modules/pkgname`. - -As you are no longer going to be allowed to put your `node_modules` in a -state that's not a valid package-lock, this means this behavior is no longer -valid. Instead we should follow normal rules, removing it from the -dependencies for the top level but only removing the module on disk if -nothing requires it any more. - -## Additional fields / Adding new fields - -Installers should ignore any field they aren't aware of. It's not an error -to have additional properties in the package-lock or lock file. - -Installers that want to add new fields should either have one added via RFC -in the npm issue tracker and an accompanying documentation PR, or should prefix -it with the name of their project. diff --git a/docs/LICENSE b/docs/LICENSE new file mode 100644 index 0000000000000..5169a5e4135e9 --- /dev/null +++ b/docs/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 gatsbyjs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/docs/content/.DS_Store b/docs/content/.DS_Store new file mode 100644 index 0000000000000..257478af794e9 Binary files /dev/null and b/docs/content/.DS_Store differ diff --git a/docs/content/cli-commands/npm-access.md b/docs/content/cli-commands/npm-access.md new file mode 100644 index 0000000000000..0fbce9c0759d1 --- /dev/null +++ b/docs/content/cli-commands/npm-access.md @@ -0,0 +1,93 @@ +--- +section: cli-commands +title: npm-access +description: Set access level on published packages +--- + +# npm-access(1) + +## Set access level on published packages + +### Synopsis + +```bash +npm access public [] +npm access restricted [] + +npm access grant [] +npm access revoke [] + +npm access 2fa-required [] +npm access 2fa-not-required [] + +npm access ls-packages [||] +npm access ls-collaborators [ []] +npm access edit [] +``` + +### Description + +Used to set access controls on private packages. + +For all of the subcommands, `npm access` will perform actions on the packages +in the current working directory if no package name is passed to the +subcommand. + +* public / restricted: + Set a package to be either publicly accessible or restricted. + +* grant / revoke: + Add or remove the ability of users and teams to have read-only or read-write + access to a package. + +* 2fa-required / 2fa-not-required: + Configure whether a package requires that anyone publishing it have two-factor + authentication enabled on their account. + +* ls-packages: + Show all of the packages a user or a team is able to access, along with the + access level, except for read-only public packages (it won't print the whole + registry listing) + +* ls-collaborators: + Show all of the access privileges for a package. Will only show permissions + for packages to which you have at least read access. If `` is passed in, + the list is filtered only to teams _that_ user happens to belong to. + +* edit: + Set the access privileges for a package at once using `$EDITOR`. + +### Details + +`npm access` always operates directly on the current registry, configurable +from the command line using `--registry=`. + +Unscoped packages are *always public*. + +Scoped packages *default to restricted*, but you can either publish them as +public using `npm publish --access=public`, or set their access as public using +`npm access public` after the initial publish. + +You must have privileges to set the access of a package: + +* You are an owner of an unscoped or scoped package. +* You are a member of the team that owns a scope. +* You have been given read-write privileges for a package, either as a member + of a team or directly as an owner. + +If you have two-factor authentication enabled then you'll have to pass in an +otp with `--otp` when making access changes. + +If your account is not paid, then attempts to publish scoped packages will fail +with an HTTP 402 status code (logically enough), unless you use +`--access=public`. + +Management of teams and team memberships is done with the `npm team` command. + +### See Also + +* [`libnpmaccess`](https://npm.im/libnpmaccess) +* [npm team](/cli-commands/npm-team) +* [npm publish](/cli-commands/npm-publish) +* [npm config](/cli-commands/npm-config) +* [npm registry](/using-npm/registry) diff --git a/docs/content/cli-commands/npm-adduser.md b/docs/content/cli-commands/npm-adduser.md new file mode 100644 index 0000000000000..2df35e45fc550 --- /dev/null +++ b/docs/content/cli-commands/npm-adduser.md @@ -0,0 +1,95 @@ + --- +section: cli-commands +title: npm-adduser +description: Set access level on published packages +--- + +# npm-adduser(1) + +## Add a registry user account + +### Synopsis + +```bash +npm adduser [--registry=url] [--scope=@orgname] [--always-auth] [--auth-type=legacy] + +aliases: login, add-user +``` + +### Description + +Create or verify a user named `` in the specified registry, and +save the credentials to the `.npmrc` file. If no registry is specified, +the default registry will be used (see [`config`](/using-npm/config)). + +The username, password, and email are read in from prompts. + +To reset your password, go to + +To change your email address, go to + +You may use this command multiple times with the same user account to +authorize on a new machine. When authenticating on a new machine, +the username, password and email address must all match with +your existing record. + +`npm login` is an alias to `adduser` and behaves exactly the same way. + +### Configuration + +#### registry + +Default: https://registry.npmjs.org/ + +The base URL of the npm package registry. If `scope` is also specified, +this registry will only be used for packages with that scope. `scope` defaults +to the scope of the project directory you're currently in, if any. See [`scope`](/using-npm/scope). + +#### scope + +Default: none + +If specified, the user and login credentials given will be associated +with the specified scope. See [`scope`](/using-npm/scope). You can use both at the same time, +e.g. + +```bash + npm adduser --registry=http://myregistry.example.com --scope=@myco +``` + +This will set a registry for the given scope and login or create a user for +that registry at the same time. + +#### always-auth + +Default: false + +If specified, save configuration indicating that all requests to the given +registry should include authorization information. Useful for private +registries. Can be used with `--registry` and / or `--scope`, e.g. + +```bash + npm adduser --registry=http://private-registry.example.com --always-auth +``` + +This will ensure that all requests to that registry (including for tarballs) +include an authorization header. This setting may be necessary for use with +private registries where metadata and package tarballs are stored on hosts with +different hostnames. See `always-auth` in [`config`](/using-npm/config) for more details on always-auth. Registry-specific configuration of `always-auth` takes precedence over any global configuration. + +#### auth-type + +* Default: `'legacy'` +* Type: `'legacy'`, `'sso'`, `'saml'`, `'oauth'` + +What authentication strategy to use with `adduser`/`login`. Some npm registries +(for example, npmE) might support alternative auth strategies besides classic +username/password entry in legacy npm. + +### See Also + +* [npm registry](/using-npm/registry) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) +* [npm owner](/cli-commands/npm-owner) +* [npm whoami](/cli-commands/npm-whoami) diff --git a/docs/content/cli-commands/npm-audit.md b/docs/content/cli-commands/npm-audit.md new file mode 100644 index 0000000000000..0aba874f96df8 --- /dev/null +++ b/docs/content/cli-commands/npm-audit.md @@ -0,0 +1,136 @@ +--- +section: cli-commands +title: npm-audit +description: Run a security audit +--- + +# npm-audit(1) + +## Run a security audit + +### Synopsis + +```bash +npm audit [--json|--parseable|--audit-level=(low|moderate|high|critical)] +npm audit fix [--force|--package-lock-only|--dry-run] + +common options: [--production] [--only=(dev|prod)] +``` + +### Examples + +Scan your project for vulnerabilities and automatically install any compatible +updates to vulnerable dependencies: +```bash +$ npm audit fix +``` + +Run `audit fix` without modifying `node_modules`, but still updating the +pkglock: +```bash +$ npm audit fix --package-lock-only +``` + +Skip updating `devDependencies`: +```bash +$ npm audit fix --only=prod +``` + +Have `audit fix` install semver-major updates to toplevel dependencies, not just +semver-compatible ones: +```bash +$ npm audit fix --force +``` + +Do a dry run to get an idea of what `audit fix` will do, and _also_ output +install information in JSON format: +```bash +$ npm audit fix --dry-run --json +``` + +Scan your project for vulnerabilities and just show the details, without fixing +anything: +```bash +$ npm audit +``` + +Get the detailed audit report in JSON format: +```bash +$ npm audit --json +``` + +Get the detailed audit report in plain text result, separated by tab characters, allowing for +future reuse in scripting or command line post processing, like for example, selecting +some of the columns printed: +```bash +$ npm audit --parseable +``` + +To parse columns, you can use for example `awk`, and just print some of them: +```bash +$ npm audit --parseable | awk -F $'\t' '{print $1,$4}' +``` + +Fail an audit only if the results include a vulnerability with a level of moderate or higher: +```bash +$ npm audit --audit-level=moderate +``` + +### Description + +The audit command submits a description of the dependencies configured in +your project to your default registry and asks for a report of known +vulnerabilities. The report returned includes instructions on how to act on +this information. The command will exit with a 0 exit code if no +vulnerabilities were found. + +You can also have npm automatically fix the vulnerabilities by running `npm +audit fix`. Note that some vulnerabilities cannot be fixed automatically and +will require manual intervention or review. Also note that since `npm audit fix` +runs a full-fledged `npm install` under the hood, all configs that apply to the +installer will also apply to `npm install` -- so things like `npm audit fix +--package-lock-only` will work as expected. + +By default, the audit command will exit with a non-zero code if any vulnerability +is found. It may be useful in CI environments to include the `--audit-level` parameter +to specify the minimum vulnerability level that will cause the command to fail. This +option does not filter the report output, it simply changes the command's failure +threshold. + +### Content Submitted + +* npm_version +* node_version +* platform +* node_env +* A scrubbed version of your package-lock.json or npm-shrinkwrap.json + +#### Scrubbing + +In order to ensure that potentially sensitive information is not included in +the audit data bundle, some dependencies may have their names (and sometimes +versions) replaced with opaque non-reversible identifiers. It is done for +the following dependency types: + +* Any module referencing a scope that is configured for a non-default + registry has its name scrubbed. (That is, a scope you did a `npm login --scope=@ourscope` for.) +* All git dependencies have their names and specifiers scrubbed. +* All remote tarball dependencies have their names and specifiers scrubbed. +* All local directory and tarball dependencies have their names and specifiers scrubbed. + +The non-reversible identifiers are a sha256 of a session-specific UUID and the +value being replaced, ensuring a consistent value within the payload that is +different between runs. + +### Exit Code + +The `npm audit` command will exit with a 0 exit code if no vulnerabilities were found. + +If vulnerabilities were found the exit code will depend on the `audit-level` +configuration setting. + +### See Also + +* [npm install](/cli-commands/npm-install) +* [package-locks](/configuring-npm/package-locks) +* [config](/using-npm/config) diff --git a/docs/content/cli-commands/npm-bin.md b/docs/content/cli-commands/npm-bin.md new file mode 100644 index 0000000000000..6c7ce0eee54e2 --- /dev/null +++ b/docs/content/cli-commands/npm-bin.md @@ -0,0 +1,26 @@ +--- +section: cli-commands +title: npm-bin +description: Display npm bin folder +--- + +# npm-bin(1) + +## Display npm bin folder + +### Synopsis +```bash +npm bin [-g|--global] +``` + +### Description + +Print the folder where npm will install executables. + +### See Also + +* [npm prefix](/cli-commands/npm-prefix) +* [npm root](/cli-commands/npm-root) +* [npm folders](/configuring-npm/folders) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) diff --git a/docs/content/cli-commands/npm-bugs.md b/docs/content/cli-commands/npm-bugs.md new file mode 100644 index 0000000000000..dcc9c358df14d --- /dev/null +++ b/docs/content/cli-commands/npm-bugs.md @@ -0,0 +1,50 @@ +--- +section: cli-commands +title: npm-bugs +description: Bugs for a package in a web browser maybe +--- + +# npm-bugs(1) + +## Bugs for a package in a web browser maybe + +### Synopsis +```bash +npm bugs [] + +aliases: issues +``` + +### Description + +This command tries to guess at the likely location of a package's +bug tracker URL, and then tries to open it using the `--browser` +config param. If no package name is provided, it will search for +a `package.json` in the current folder and use the `name` property. + +### Configuration + +#### browser + +* Default: OS X: `"open"`, Windows: `"start"`, Others: `"xdg-open"` +* Type: String + +The browser that is called by the `npm bugs` command to open websites. + +#### registry + +* Default: https://registry.npmjs.org/ +* Type: url + +The base URL of the npm package registry. + + +### See Also + +* [npm docs](/cli-commands/npm-docs) +* [npm view](/cli-commands/npm-view) +* [npm publish](/cli-commands/npm-publish) +* [npm registry](/using-npm/registry) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) +* [package.json](/configuring-npm/package-json) diff --git a/docs/content/cli-commands/npm-build.md b/docs/content/cli-commands/npm-build.md new file mode 100644 index 0000000000000..b657129787663 --- /dev/null +++ b/docs/content/cli-commands/npm-build.md @@ -0,0 +1,34 @@ +--- +section: cli-commands +title: npm-build +description: Build a package +--- + +# npm-build(1) + +## Build a package + +### Synopsis +```shell +npm build [] +``` + +* ``: + A folder containing a `package.json` file in its root. + +### Description + +This is the plumbing command called by `npm link` and `npm install`. + +It should generally be called during installation, but if you need to run it +directly, run: +```bash + npm run-script build +``` + +### See Also + +* [npm install](/cli-commands/npm-install) +* [npm link](/cli-commands/npm-link) +* [npm scripts](/using-npm/scripts) +* [package.json](/configuring-npm/package-json) diff --git a/docs/content/cli-commands/npm-bundle.md b/docs/content/cli-commands/npm-bundle.md new file mode 100644 index 0000000000000..76417ac8b0b0c --- /dev/null +++ b/docs/content/cli-commands/npm-bundle.md @@ -0,0 +1,21 @@ +--- +section: cli-commands +title: npm-bundle +description: REMOVED +--- + +# npm-bundle(1) + +## REMOVED + +### Description + +The `npm bundle` command has been removed in 1.0, for the simple reason +that it is no longer necessary, as the default behavior is now to +install packages into the local space. + +Just use `npm install` now to do what `npm bundle` used to do. + +### See Also + +* [npm install](/cli-commands/npm-install) diff --git a/doc/cli/npm-cache.md b/docs/content/cli-commands/npm-cache.md similarity index 75% rename from doc/cli/npm-cache.md rename to docs/content/cli-commands/npm-cache.md index 92a6236c0c9e9..ed31a320421b0 100644 --- a/doc/cli/npm-cache.md +++ b/docs/content/cli-commands/npm-cache.md @@ -1,19 +1,28 @@ -npm-cache(1) -- Manipulates packages cache -========================================== +--- +section: cli-commands +title: npm-cache +description: Manipulates packages cache +--- -## SYNOPSIS +# npm-cache(1) - npm cache add - npm cache add - npm cache add - npm cache add @ +## Manipulates packages cache - npm cache clean [] - aliases: npm cache clear, npm cache rm +### Synopsis - npm cache verify +```bash +npm cache add +npm cache add +npm cache add +npm cache add @ -## DESCRIPTION +npm cache clean [] +aliases: npm cache clear, npm cache rm + +npm cache verify +``` + +### Description Used to add, list, or clean the npm cache folder. @@ -29,7 +38,7 @@ Used to add, list, or clean the npm cache folder. Verify the contents of the cache folder, garbage collecting any unneeded data, and verifying the integrity of the cache index and all cached data. -## DETAILS +### Details npm stores cache data in an opaque directory within the configured `cache`, named `_cacache`. This directory is a `cacache`-based content-addressable cache @@ -51,7 +60,7 @@ directly. npm will not remove data by itself: the cache will grow as new packages are installed. -## A NOTE ABOUT THE CACHE'S DESIGN +### A note about the cache's design The npm cache is strictly a cache: it should not be relied upon as a persistent and reliable data store for package data. npm makes no guarantee that a @@ -62,22 +71,21 @@ if it does return data, that data will be exactly the data that was inserted. To run an offline verification of existing cache contents, use `npm cache verify`. -## CONFIGURATION +### Configuration -### cache +#### cache Default: `~/.npm` on Posix, or `%AppData%/npm-cache` on Windows. The root cache folder. -## SEE ALSO +### See Also -* npm-folders(5) -* npm-config(1) -* npm-config(7) -* npmrc(5) -* npm-install(1) -* npm-publish(1) -* npm-pack(1) +* [npm folders](/configuring-npm/folders) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) +* [npm install](/cli-commands/npm-install) +* [npm publish](/cli-commands/npm-publish) +* [npm pack](/cli-commands/npm-pack) * https://npm.im/cacache * https://npm.im/pacote diff --git a/doc/cli/npm-ci.md b/docs/content/cli-commands/npm-ci.md similarity index 77% rename from doc/cli/npm-ci.md rename to docs/content/cli-commands/npm-ci.md index 289bb7c195a9e..357ba16cf6ade 100644 --- a/doc/cli/npm-ci.md +++ b/docs/content/cli-commands/npm-ci.md @@ -1,15 +1,23 @@ -npm-ci(1) -- Install a project with a clean slate -=================================== +--- +section: cli-commands +title: npm-ci +description: Install a project with a clean slate +--- -## SYNOPSIS +# npm-ci(1) - npm ci +## Install a project with a clean slate -## EXAMPLE +### Synopsis +```bash +npm ci +``` + +### Example Make sure you have a package-lock and an up-to-date install: -``` +```bash $ cd ./my/npm/project $ npm install added 154 packages in 10s @@ -18,14 +26,14 @@ $ ls | grep package-lock Run `npm ci` in that project -``` +```bash $ npm ci added 154 packages in 5s ``` Configure Travis to build using `npm ci` instead of `npm install`: -``` +```bash # .travis.yml install: - npm ci @@ -35,9 +43,9 @@ cache: - "$HOME/.npm" ``` -## DESCRIPTION +### Description -This command is similar to `npm-install(1)`, except it's meant to be used in +This command is similar to [`npm install`](/cli-commands/npm-install), except it's meant to be used in automated environments such as test platforms, continuous integration, and deployment -- or any situation where you want to make sure you're doing a clean install of your dependencies. It can be significantly faster than a regular npm @@ -53,7 +61,7 @@ In short, the main differences between using `npm install` and `npm ci` are: * If a `node_modules` is already present, it will be automatically removed before `npm ci` begins its install. * It will never write to `package.json` or any of the package-locks: installs are essentially frozen. -## SEE ALSO +### See Also -* npm-install(1) -* npm-package-locks(5) +* [npm install](/cli-commands/npm-install) +* [package-locks](/configuring-npm/package-locks) diff --git a/docs/content/cli-commands/npm-completion.md b/docs/content/cli-commands/npm-completion.md new file mode 100644 index 0000000000000..59bfca503e084 --- /dev/null +++ b/docs/content/cli-commands/npm-completion.md @@ -0,0 +1,42 @@ +--- +section: cli-commands +title: npm-completion +description: Tab Completion for npm +--- + +# npm-completion(1) + +## Tab Completion for npm + +### Synopsis +```bash +source <(npm completion) +``` + +### Description + +Enables tab-completion in all npm commands. + +The synopsis above +loads the completions into your current shell. Adding it to +your ~/.bashrc or ~/.zshrc will make the completions available +everywhere: + +```bash +npm completion >> ~/.bashrc +npm completion >> ~/.zshrc +``` + +You may of course also pipe the output of `npm completion` to a file +such as `/usr/local/etc/bash_completion.d/npm` or +`/etc/bash_completion.d/npm` if you have a system that will read +that file for you. + +When `COMP_CWORD`, `COMP_LINE`, and `COMP_POINT` are defined in the +environment, `npm completion` acts in "plumbing mode", and outputs +completions based on the arguments. + +### See Also + +* [npm developers](/using-npm/developers) +* [npm](/cli-commands/npm) diff --git a/docs/content/cli-commands/npm-config.md b/docs/content/cli-commands/npm-config.md new file mode 100644 index 0000000000000..c2f2033b066d2 --- /dev/null +++ b/docs/content/cli-commands/npm-config.md @@ -0,0 +1,85 @@ +--- +section: cli-commands +title: npm-config +description: Manage the npm configuration files +--- + +# npm-config(1) + +## Manage the npm configuration files + +### Synopsis +```bash +npm config set [-g|--global] +npm config get +npm config delete +npm config list [-l] [--json] +npm config edit +npm get +npm set [-g|--global] + +aliases: c +``` + +### Description + +npm gets its config settings from the command line, environment +variables, `npmrc` files, and in some cases, the `package.json` file. + +See [npmrc](/configuring-npm/npmrc) for more information about the npmrc files. + +See [config](/using-npm/config) for a more thorough discussion of the mechanisms +involved. + +The `npm config` command can be used to update and edit the contents +of the user and global npmrc files. + +### Sub-commands + +Config supports the following sub-commands: + +#### set +```bash +npm config set key value +``` +Sets the config key to the value. + +If value is omitted, then it sets it to "true". + +#### get +```bash +npm config get key +``` + +Echo the config value to stdout. + +#### list +```bash +npm config list +``` + +Show all the config settings. Use `-l` to also show defaults. Use `--json` +to show the settings in json format. + +#### delete +```bash +npm config delete key +``` + +Deletes the key from all configuration files. + +#### edit +```bash +npm config edit +``` + +Opens the config file in an editor. Use the `--global` flag to edit the +global config. + +### See Also + +* [npm folders](/configuring-npm/folders) +* [npm config](/cli-commands/npm-config) +* [package.json](/configuring-npm/package-json) +* [npmrc](/configuring-npm/npmrc) +* [npm](/cli-commands/npm) diff --git a/docs/content/cli-commands/npm-dedupe.md b/docs/content/cli-commands/npm-dedupe.md new file mode 100644 index 0000000000000..e15a12ba7c18a --- /dev/null +++ b/docs/content/cli-commands/npm-dedupe.md @@ -0,0 +1,67 @@ +--- +section: cli-commands +title: npm-dedupe +description: Reduce duplication +--- + +# npm-dedupe(1) + +## Reduce duplication + +### Synopsis +```bash +npm dedupe +npm ddp + +aliases: find-dupes, ddp +``` + +### Description + +Searches the local package tree and attempts to simplify the overall +structure by moving dependencies further up the tree, where they can +be more effectively shared by multiple dependent packages. + +For example, consider this dependency graph: + +```bash +a ++-- b <-- depends on c@1.0.x +| `-- c@1.0.3 +`-- d <-- depends on c@~1.0.9 + `-- c@1.0.10 +``` + +In this case, `npm dedupe` will transform the tree to: + +```bash +a ++-- b ++-- d +`-- c@1.0.10 +``` + +Because of the hierarchical nature of node's module lookup, b and d +will both get their dependency met by the single c package at the root +level of the tree. + +The deduplication algorithm walks the tree, moving each dependency as far +up in the tree as possible, even if duplicates are not found. This will +result in both a flat and deduplicated tree. + +If a suitable version exists at the target location in the tree +already, then it will be left untouched, but the other duplicates will +be deleted. + +Arguments are ignored. Dedupe always acts on the entire tree. + +Modules + +Note that this operation transforms the dependency tree, but will never +result in new modules being installed. + +### See Also + +* [npm ls](/cli-commands/npm-ls) +* [npm update](/cli-commands/npm-update) +* [npm install](/cli-commands/npm-install) diff --git a/docs/content/cli-commands/npm-deprecate.md b/docs/content/cli-commands/npm-deprecate.md new file mode 100644 index 0000000000000..d2d9613f653c6 --- /dev/null +++ b/docs/content/cli-commands/npm-deprecate.md @@ -0,0 +1,36 @@ +--- +section: cli-commands +title: npm-deprecate +description: Deprecate a version of a package +--- +# npm-deprecate(1) + +## Deprecate a version of a package + +### Synopsis +```bash +npm deprecate [@] +``` + +### Description + +This command will update the npm registry entry for a package, providing +a deprecation warning to all who attempt to install it. + +It works on [version ranges](https://semver.npmjs.com/) as well as specific +versions, so you can do something like this: +```bash +npm deprecate my-thing@"< 0.2.3" "critical bug fixed in v0.2.3" +``` + +Note that you must be the package owner to deprecate something. See the +`owner` and `adduser` help topics. + +To un-deprecate a package, specify an empty string (`""`) for the `message` +argument. Note that you must use double quotes with no space between them to +format an empty string. + +### See Also + +* [npm publish](/cli-commands/npm-publish) +* [npm registry](/using-npm/registry) diff --git a/doc/cli/npm-dist-tag.md b/docs/content/cli-commands/npm-dist-tag.md similarity index 75% rename from doc/cli/npm-dist-tag.md rename to docs/content/cli-commands/npm-dist-tag.md index 1a69d1b6ce42b..c7921c7f739d7 100644 --- a/doc/cli/npm-dist-tag.md +++ b/docs/content/cli-commands/npm-dist-tag.md @@ -1,15 +1,24 @@ -npm-dist-tag(1) -- Modify package distribution tags -=================================================== + --- +section: cli-commands +title: npm-dist-tag +description: Modify package distribution tags +--- -## SYNOPSIS +# npm-dist-tag(1) - npm dist-tag add @ [] - npm dist-tag rm - npm dist-tag ls [] +## Modify package distribution tags - aliases: dist-tags -## DESCRIPTION +### Synopsis +```bash +npm dist-tag add @ [] +npm dist-tag rm +npm dist-tag ls [] + +aliases: dist-tags +``` + +### Description Add, remove, and enumerate distribution tags on a package: @@ -24,16 +33,20 @@ Add, remove, and enumerate distribution tags on a package: * ls: Show all of the dist-tags for a package, defaulting to the package in - the current prefix. + the current prefix. This is the default action if none is specified. A tag can be used when installing packages as a reference to a version instead of using a specific version number: - npm install @ +```bash +npm install @ +``` When installing dependencies, a preferred tagged version may be specified: - npm install --tag +```bash +npm install --tag +``` This also applies to `npm dedupe`. @@ -43,7 +56,7 @@ Publishing a package sets the `latest` tag to the published version unless the By default, `npm install ` (without any `@` or `@` specifier) installs the `latest` tag. -## PURPOSE +### Purpose Tags can be used to provide an alias instead of version numbers. @@ -62,7 +75,7 @@ The `next` tag is used by some projects to identify the upcoming version. By default, other than `latest`, no tag has any special significance to npm itself. -## CAVEATS +### Caveats This command used to be known as `npm tag`, which only created new tags, and so had a different syntax. @@ -77,12 +90,11 @@ example, `v1.4` cannot be used as a tag, because it is interpreted by semver as The simplest way to avoid semver problems with tags is to use tags that do not begin with a number or the letter `v`. -## SEE ALSO +### See Also -* npm-publish(1) -* npm-install(1) -* npm-dedupe(1) -* npm-registry(7) -* npm-config(1) -* npm-config(7) -* npmrc(5) +* [npm publish](/cli-commands/npm-publish) +* [npm install](/cli-commands/npm-install) +* [npm dedupe](/cli-commands/npm-dedupe) +* [npm registry](/using-npm/registry) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) diff --git a/docs/content/cli-commands/npm-docs.md b/docs/content/cli-commands/npm-docs.md new file mode 100644 index 0000000000000..46f5cd0d900a4 --- /dev/null +++ b/docs/content/cli-commands/npm-docs.md @@ -0,0 +1,52 @@ +--- +section: cli-commands +title: npm-docs +description: Docs for a package in a web browser maybe +--- + +# npm-docs(1) + +## Docs for a package in a web browser maybe + +### Synopsis + +```bash +npm docs [ [ ...]] +npm docs . +npm home [ [ ...]] +npm home . +``` + +### Description + +This command tries to guess at the likely location of a package's +documentation URL, and then tries to open it using the `--browser` +config param. You can pass multiple package names at once. If no +package name is provided, it will search for a `package.json` in +the current folder and use the `name` property. + +### Configuration + +#### browser + +* Default: OS X: `"open"`, Windows: `"start"`, Others: `"xdg-open"` +* Type: String + +The browser that is called by the `npm docs` command to open websites. + +#### registry + +* Default: https://registry.npmjs.org/ +* Type: url + +The base URL of the npm package registry. + + +### See Also + +* [npm view](/cli-commands/npm-view) +* [npm publish](/cli-commands/npm-publish) +* [npm registry](/using-npm/registry) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) +* [package.json](/configuring-npm/package-json) diff --git a/doc/cli/npm-doctor.md b/docs/content/cli-commands/npm-doctor.md similarity index 90% rename from doc/cli/npm-doctor.md rename to docs/content/cli-commands/npm-doctor.md index 1a5738505cefe..7cec349e5a814 100644 --- a/doc/cli/npm-doctor.md +++ b/docs/content/cli-commands/npm-doctor.md @@ -1,11 +1,20 @@ -npm-doctor(1) -- Check your environments -======================================================== +--- +section: cli-commands +title: npm-doctor +description: Check your environments +--- -## SYNOPSIS +# npm-doctor(1) - npm doctor +## Check your environments -## DESCRIPTION +### Synopsis + +```bash +npm doctor +``` + +### Description `npm doctor` runs a set of checks to ensure that your npm installation has what it needs to manage your JavaScript packages. npm is mostly a standalone tool, but it does @@ -29,7 +38,7 @@ better than an old version. `npm doctor` verifies the following items in your environment, and if there are any recommended changes, it will display them. -### `npm ping` +#### `npm ping` By default, npm installs from the primary npm registry, `registry.npmjs.org`. `npm doctor` hits a special ping endpoint within the registry. This can also be @@ -42,7 +51,7 @@ what that is by running `npm config get registry`), and if you're using a private registry that doesn't support the `/whoami` endpoint supported by the primary registry, this check may fail. -### `npm -v` +#### `npm -v` While Node.js may come bundled with a particular version of npm, it's the policy of the CLI team that we recommend all users run `npm@latest` if they @@ -52,7 +61,7 @@ releases typically only receive critical security and regression fixes. The team believes that the latest tested version of npm is almost always likely to be the most functional and defect-free version of npm. -### `node -v` +#### `node -v` For most users, in most circumstances, the best version of Node will be the latest long-term support (LTS) release. Those of you who want access to new @@ -61,7 +70,7 @@ running a newer version, and some of you may be required to run an older version of Node because of enterprise change control policies. That's OK! But in general, the npm team recommends that most users run Node.js LTS. -### `npm config get registry` +#### `npm config get registry` Some of you may be installing from private package registries for your project or company. That's great! Others of you may be following tutorials or @@ -70,7 +79,7 @@ having. Sometimes, this may entail changing the registry you're pointing at. This part of `npm doctor` just lets you, and maybe whoever's helping you with support, know that you're not using the default registry. -### `which git` +#### `which git` While it's documented in the README, it may not be obvious that npm needs Git installed to do many of the things that it does. Also, in some cases @@ -78,14 +87,14 @@ installed to do many of the things that it does. Also, in some cases accessible via your `PATH` so that npm can find it. This check ensures that Git is available. -### Permissions checks +#### Permissions checks * Your cache must be readable and writable by the user running npm. * Global package binaries must be writable by the user running npm. * Your local `node_modules` path, if you're running `npm doctor` with a project directory, must be readable and writable by the user running npm. -### Validate the checksums of cached packages +#### Validate the checksums of cached packages When an npm package is published, the publishing process generates a checksum that npm uses at install time to verify that the package didn't get corrupted @@ -95,8 +104,8 @@ get cache`, and see what's in that cache with `npm cache ls` – probably more than you were expecting!). In the event that there are corrupt packages in your cache, you should probably run `npm cache clean` and reset the cache. -## SEE ALSO +### See Also -* npm-bugs(1) -* npm-help(1) -* npm-ping(1) +* [npm bugs](/cli-commands/npm-bugs) +* [npm help](/cli-commands/npm-help) +* [npm ping](/cli-commands/npm-ping) diff --git a/docs/content/cli-commands/npm-edit.md b/docs/content/cli-commands/npm-edit.md new file mode 100644 index 0000000000000..94b6a087fa102 --- /dev/null +++ b/docs/content/cli-commands/npm-edit.md @@ -0,0 +1,47 @@ +--- +section: cli-commands +title: npm-edit +description: Edit an installed package +--- + +# npm-edit(1) + +## Edit an installed package + +### Synopsis + +```bash +npm edit [/...] +``` + +### Description + +Selects a (sub)dependency in the current +working directory and opens the package folder in the default editor +(or whatever you've configured as the npm `editor` config -- see +[`npm-config`](npm-config).) + +After it has been edited, the package is rebuilt so as to pick up any +changes in compiled packages. + +For instance, you can do `npm install connect` to install connect +into your package, and then `npm edit connect` to make a few +changes to your locally installed copy. + +### Configuration + +#### editor + +* Default: `EDITOR` environment variable if set, or `"vi"` on Posix, + or `"notepad"` on Windows. +* Type: path + +The command to run for `npm edit` or `npm config edit`. + +### See Also + +* [npm folders](/configuring-npm/folders) +* [npm explore](/cli-commands/npm-explore) +* [npm install](/cli-commands/npm-install) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) diff --git a/docs/content/cli-commands/npm-explore.md b/docs/content/cli-commands/npm-explore.md new file mode 100644 index 0000000000000..8ded96d40984f --- /dev/null +++ b/docs/content/cli-commands/npm-explore.md @@ -0,0 +1,50 @@ + --- +section: cli-commands +title: npm-explore +description: Browse an installed package +--- + +# npm-explore(1) + +## Browse an installed package + +### Synopsis + +```bash +npm explore [ -- ] +``` + +### Description + +Spawn a subshell in the directory of the installed package specified. + +If a command is specified, then it is run in the subshell, which then +immediately terminates. + +This is particularly handy in the case of git submodules in the +`node_modules` folder: + +```bash +npm explore some-dependency -- git pull origin master +``` + +Note that the package is *not* automatically rebuilt afterwards, so be +sure to use `npm rebuild ` if you make any changes. + +### Configuration + +#### shell + +* Default: SHELL environment variable, or "bash" on Posix, or "cmd" on + Windows +* Type: path + +The shell to run for the `npm explore` command. + +### See Also + +* [npm folders](/configuring-npm/folders) +* [npm edit](/cli-commands/npm-edit) +* [npm rebuild](/cli-commands/npm-rebuild) +* [npm build](/cli-commands/npm-build) +* [npm install](/cli-commands/npm-install) diff --git a/docs/content/cli-commands/npm-fund.md b/docs/content/cli-commands/npm-fund.md new file mode 100644 index 0000000000000..5a751eec46687 --- /dev/null +++ b/docs/content/cli-commands/npm-fund.md @@ -0,0 +1,68 @@ +--- +section: cli-commands +title: npm-fund +description: Retrieve funding information +--- + +# npm-fund(1) + +## Retrieve funding information + +### Synopsis + +```bash + npm fund [] +``` + +### Description + +This command retrieves information on how to fund the dependencies of +a given project. If no package name is provided, it will list all +dependencies that are looking for funding in a tree-structure in which +are listed the type of funding and the url to visit. If a package name +is provided then it tries to open its funding url using the `--browser` +config param; if there are multiple funding sources for the package, the +user will be instructed to pass the `--which` command to disambiguate. + +The list will avoid duplicated entries and will stack all packages +that share the same type/url as a single entry. Given this nature the +list is not going to have the same shape of the output from `npm ls`. + +### Configuration + +#### browser + +* Default: OS X: `"open"`, Windows: `"start"`, Others: `"xdg-open"` +* Type: String + +The browser that is called by the `npm fund` command to open websites. + +#### json + +* Type: Boolean +* Default: false + +Show information in JSON format. + +#### unicode + +* Type: Boolean +* Default: true + +Whether to represent the tree structure using unicode characters. +Set it to `false` in order to use all-ansi output. + +#### which + +* Type: Number +* Default: undefined + +If there are multiple funding sources, which 1-indexed source URL to open. + +## See Also + +* [npm docs](/cli-commands/npm-docs) +* [npm config](/cli-commands/npm-config) +* [npm install](/cli-commands/npm-install) +* [npm ls](/cli-commands/npm-ls) + diff --git a/docs/content/cli-commands/npm-help-search.md b/docs/content/cli-commands/npm-help-search.md new file mode 100644 index 0000000000000..69d005cb160ed --- /dev/null +++ b/docs/content/cli-commands/npm-help-search.md @@ -0,0 +1,43 @@ +--- +section: cli-commands +title: npm-help-search +description: Search npm help documentation +--- + +# npm-help-search(1) + +## Search npm help documentation + +### Synopsis + +```bash +npm help-search +``` + +### Description + +This command will search the npm markdown documentation files for the +terms provided, and then list the results, sorted by relevance. + +If only one result is found, then it will show that help topic. + +If the argument to `npm help` is not a known help topic, then it will +call `help-search`. It is rarely if ever necessary to call this +command directly. + +### Configuration + +#### long + +* Type: Boolean +* Default: false + +If true, the "long" flag will cause help-search to output context around +where the terms were found in the documentation. + +If false, then help-search will just list out the help topics found. + +### See Also + +* [npm](/cli-commands/npm) +* [npm help](/cli-commands/npm-help) diff --git a/docs/content/cli-commands/npm-help.md b/docs/content/cli-commands/npm-help.md new file mode 100644 index 0000000000000..c47676ffcac1d --- /dev/null +++ b/docs/content/cli-commands/npm-help.md @@ -0,0 +1,44 @@ +--- +section: cli-commands +title: npm-help +description: Get help on npm +--- + +# npm-help(1) + +## Get help on npm + +### Synopsis + +```bash +npm help [] +``` + +### Description + +If supplied a topic, then show the appropriate documentation page. + +If the topic does not exist, or if multiple terms are provided, then run +the `help-search` command to find a match. Note that, if `help-search` +finds a single subject, then it will run `help` on that topic, so unique +matches are equivalent to specifying a topic name. + +### Configuration + +#### viewer + +* Default: "man" on Posix, "browser" on Windows +* Type: path + +The program to use to view help content. + +Set to `"browser"` to view html help content in the default web browser. + +### See Also + +* [npm](/cli-commands/npm) +* [npm folders](/configuring-npm/folders) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) +* [package.json](/configuring-npm/package-json) +* [npm help-search](/cli-commands/npm-help-search) diff --git a/doc/cli/npm-hook.md b/docs/content/cli-commands/npm-hook.md similarity index 75% rename from doc/cli/npm-hook.md rename to docs/content/cli-commands/npm-hook.md index 519287242574a..ce4bbccb3593b 100644 --- a/doc/cli/npm-hook.md +++ b/docs/content/cli-commands/npm-hook.md @@ -1,54 +1,62 @@ -npm-hook(1) -- Manage registry hooks -=================================== +--- +section: cli-commands +title: npm-hook +description: Manage registry hooks +--- -## SYNOPSIS +# npm-hook(1) - npm hook ls [pkg] - npm hook add - npm hook update [secret] - npm hook rm +## Manage registry hooks -## EXAMPLE +### Synopsis -Add a hook to watch a package for changes: +```bash +npm hook ls [pkg] +npm hook add +npm hook update [secret] +npm hook rm ``` + +### Example + +Add a hook to watch a package for changes: +```bash $ npm hook add lodash https://example.com/ my-shared-secret ``` Add a hook to watch packages belonging to the user `substack`: -``` +```bash $ npm hook add ~substack https://example.com/ my-shared-secret ``` Add a hook to watch packages in the scope `@npm` -``` +```bash $ npm hook add @npm https://example.com/ my-shared-secret ``` List all your active hooks: -``` +```bash $ npm hook ls ``` List your active hooks for the `lodash` package: -``` +```bash $ npm hook ls lodash ``` Update an existing hook's url: -``` +```bash $ npm hook update id-deadbeef https://my-new-website.here/ ``` Remove a hook: -``` +```bash $ npm hook rm id-deadbeef ``` -## DESCRIPTION +### Description -Allows you to manage [npm -hooks](https://blog.npmjs.org/post/145260155635/introducing-hooks-get-notifications-of-npm), +Allows you to manage [npm hooks](https://blog.npmjs.org/post/145260155635/introducing-hooks-get-notifications-of-npm), including adding, removing, listing, and updating. Hooks allow you to configure URL endpoints that will be notified whenever a @@ -67,6 +75,6 @@ that particular hook. The shared secret will be sent along to the URL endpoint so you can verify the request came from your own configured hook. -## SEE ALSO +### See Also * ["Introducing Hooks" blog post](https://blog.npmjs.org/post/145260155635/introducing-hooks-get-notifications-of-npm) diff --git a/docs/content/cli-commands/npm-init.md b/docs/content/cli-commands/npm-init.md new file mode 100644 index 0000000000000..73ad74b23ea99 --- /dev/null +++ b/docs/content/cli-commands/npm-init.md @@ -0,0 +1,74 @@ +--- +section: cli-commands +title: npm-init +description: create a package.json file +--- + +# npm-init(1) + +## create a package.json file + +### Synopsis +```bash +npm init [--force|-f|--yes|-y|--scope] +npm init <@scope> (same as `npx <@scope>/create`) +npm init [<@scope>/] (same as `npx [<@scope>/]create-`) +``` + +### Examples + +Create a new React-based project using [`create-react-app`](https://npm.im/create-react-app): +```bash +$ npm init react-app ./my-react-app +``` + +Create a new `esm`-compatible package using [`create-esm`](https://npm.im/create-esm): +```bash +$ mkdir my-esm-lib && cd my-esm-lib +$ npm init esm --yes +``` + +Generate a plain old package.json using legacy init: +```bash +$ mkdir my-npm-pkg && cd my-npm-pkg +$ git init +$ npm init +``` + +Generate it without having it ask any questions: +```bash +$ npm init -y +``` + +### Description + +`npm init ` can be used to set up a new or existing npm package. + +`initializer` in this case is an npm package named `create-`, which +will be installed by [`npx`](https://npm.im/npx), and then have its main bin +executed -- presumably creating or updating `package.json` and running any other +initialization-related operations. + +The init command is transformed to a corresponding `npx` operation as follows: + +* `npm init foo` -> `npx create-foo` +* `npm init @usr/foo` -> `npx @usr/create-foo` +* `npm init @usr` -> `npx @usr/create` + +Any additional options will be passed directly to the command, so `npm init foo +--hello` will map to `npx create-foo --hello`. + +If the initializer is omitted (by just calling `npm init`), init will fall back +to legacy init behavior. It will ask you a bunch of questions, and then write a +package.json for you. It will attempt to make reasonable guesses based on +existing fields, dependencies, and options selected. It is strictly additive, so +it will keep any fields and values that were already set. You can also use +`-y`/`--yes` to skip the questionnaire altogether. If you pass `--scope`, it +will create a scoped package. + +### See Also + +* +* [package.json](/configuring-npm/package-json) +* [npm version](/cli-commands/npm-version) +* [npm scope](/using-npm/scope) diff --git a/docs/content/cli-commands/npm-install-ci-test.md b/docs/content/cli-commands/npm-install-ci-test.md new file mode 100644 index 0000000000000..98e40f4b27a77 --- /dev/null +++ b/docs/content/cli-commands/npm-install-ci-test.md @@ -0,0 +1,26 @@ +--- +section: cli-commands +title: npm-install-ci-test +description: Install a project with a clean slate and run tests +--- + +# npm install-ci-test(1) + +## Install a project with a clean slate and run tests + +### Synopsis + +```bash +npm install-ci-test + +alias: npm cit +``` + +### Description + +This command runs an `npm ci` followed immediately by an `npm test`. + +### See Also + +* [npm ci](/cli-commands/npm-ci) +* [npm test](/cli-commands/npm-test) diff --git a/docs/content/cli-commands/npm-install-test.md b/docs/content/cli-commands/npm-install-test.md new file mode 100644 index 0000000000000..b86a5199117c4 --- /dev/null +++ b/docs/content/cli-commands/npm-install-test.md @@ -0,0 +1,35 @@ +--- +section: cli-commands +title: npm-install-test +description: Install package(s) and run tests +--- + +# npm install-test(1) + +## Install package(s) and run tests + +### Synopsis + +```bash +npm install-test (with no args, in package dir) +npm install-test [<@scope>/] +npm install-test [<@scope>/]@ +npm install-test [<@scope>/]@ +npm install-test [<@scope>/]@ +npm install-test +npm install-test +npm install-test + +alias: npm it +common options: [--save|--save-dev|--save-optional] [--save-exact] [--dry-run] +``` + +### Description + +This command runs an `npm install` followed immediately by an `npm test`. It +takes exactly the same arguments as `npm install`. + +### See Also + +* [npm install](/cli-commands/npm-install) +* [npm test](/cli-commands/npm-test) diff --git a/docs/content/cli-commands/npm-install.md b/docs/content/cli-commands/npm-install.md new file mode 100644 index 0000000000000..8e661bf643f39 --- /dev/null +++ b/docs/content/cli-commands/npm-install.md @@ -0,0 +1,519 @@ +--- +section: cli-commands +title: npm-install +description: Install a package +--- + +# npm-install(1) + +## Install a package + +### Synopsis + +```bash +npm install (with no args, in package dir) +npm install [<@scope>/] +npm install [<@scope>/]@ +npm install [<@scope>/]@ +npm install [<@scope>/]@ +npm install @npm: +npm install :/ +npm install +npm install +npm install +npm install + +aliases: npm i, npm add +common options: [-P|--save-prod|-D|--save-dev|-O|--save-optional] [-E|--save-exact] [-B|--save-bundle] [--no-save] [--dry-run] +``` + +### Description + +This command installs a package, and any packages that it depends on. If the +package has a package-lock or shrinkwrap file, the installation of dependencies +will be driven by that, with an `npm-shrinkwrap.json` taking precedence if both +files exist. See [package-lock.json](/configuring-npm/package-lock-json) and [`npm shrinkwrap`](/cli-commands/npm-shrinkwrap). + +A `package` is: + +* a) a folder containing a program described by a [`package.json`](/configuring-npm/package-json) file +* b) a gzipped tarball containing (a) +* c) a url that resolves to (b) +* d) a `@` that is published on the registry (see [`registry`](/using-npm/registry)) with (c) +* e) a `@` (see [`npm dist-tag`](/cli-commands/npm-dist-tag)) that points to (d) +* f) a `` that has a "latest" tag satisfying (e) +* g) a `` that resolves to (a) + +Even if you never publish your package, you can still get a lot of +benefits of using npm if you just want to write a node program (a), and +perhaps if you also want to be able to easily install it elsewhere +after packing it up into a tarball (b). + + +* `npm install` (in package directory, no arguments): + + Install the dependencies in the local node_modules folder. + + In global mode (ie, with `-g` or `--global` appended to the command), + it installs the current package context (ie, the current working + directory) as a global package. + + By default, `npm install` will install all modules listed as dependencies + in [`package.json`](/configuring-npm/package-json). + + With the `--production` flag (or when the `NODE_ENV` environment variable + is set to `production`), npm will not install modules listed in + `devDependencies`. To install all modules listed in both `dependencies` + and `devDependencies` when `NODE_ENV` environment variable is set to `production`, + you can use `--production=false`. + + > NOTE: The `--production` flag has no particular meaning when adding a + dependency to a project. + +* `npm install `: + + Install the package in the directory as a symlink in the current project. + Its dependencies will be installed before it's linked. If `` sits + inside the root of your project, its dependencies may be hoisted to the + toplevel `node_modules` as they would for other types of dependencies. + +* `npm install `: + + Install a package that is sitting on the filesystem. Note: if you just want + to link a dev directory into your npm root, you can do this more easily by + using `npm link`. + + Tarball requirements: + * The filename *must* use `.tar`, `.tar.gz`, or `.tgz` as + the extension. + * The package contents should reside in a subfolder inside the tarball (usually it is called `package/`). npm strips one directory layer when installing the package (an equivalent of `tar x --strip-components=1` is run). + * The package must contain a `package.json` file with `name` and `version` properties. + + Example: + + npm install ./package.tgz + +* `npm install `: + + Fetch the tarball url, and then install it. In order to distinguish between + this and other options, the argument must start with "http://" or "https://" + + Example: + + npm install https://github.com/indexzero/forever/tarball/v0.5.6 + +* `npm install [<@scope>/]`: + + Do a `@` install, where `` is the "tag" config. (See + [`config`](/using-npm/config). The config's default value is `latest`.) + + In most cases, this will install the version of the modules tagged as + `latest` on the npm registry. + + Example: + + npm install sax + +* `npm install @npm:`: + + Install a package under a custom alias. Allows multiple versions of + a same-name package side-by-side, more convenient import names for + packages with otherwise long ones and using git forks replacements + or forked npm packages as replacements. Aliasing works only on your + project and does not rename packages in transitive dependencies. + Aliases should follow the naming conventions stated in + [`validate-npm-package-name`](https://www.npmjs.com/package/validate-npm-package-name#naming-rules). + + Examples: + + npm install my-react@npm:react + npm install jquery2@npm:jquery@2 + npm install jquery3@npm:jquery@3 + npm install npa@npm:npm-package-arg + + + `npm install` saves any specified packages into `dependencies` by default. + Additionally, you can control where and how they get saved with some + additional flags: + + * `-P, --save-prod`: Package will appear in your `dependencies`. This is the + default unless `-D` or `-O` are present. + + * `-D, --save-dev`: Package will appear in your `devDependencies`. + + * `-O, --save-optional`: Package will appear in your `optionalDependencies`. + + * `--no-save`: Prevents saving to `dependencies`. + + When using any of the above options to save dependencies to your + package.json, there are two additional, optional flags: + + * `-E, --save-exact`: Saved dependencies will be configured with an + exact version rather than using npm's default semver range + operator. + + * `-B, --save-bundle`: Saved dependencies will also be added to your `bundleDependencies` list. + + Further, if you have an `npm-shrinkwrap.json` or `package-lock.json` then it + will be updated as well. + + `` is optional. The package will be downloaded from the registry + associated with the specified scope. If no registry is associated with + the given scope the default registry is assumed. See [`scope`](/using-npm/scope). + + Note: if you do not include the @-symbol on your scope name, npm will + interpret this as a GitHub repository instead, see below. Scopes names + must also be followed by a slash. + + Examples: + + ```bash + npm install sax + npm install githubname/reponame + npm install @myorg/privatepackage + npm install node-tap --save-dev + npm install dtrace-provider --save-optional + npm install readable-stream --save-exact + npm install ansi-regex --save-bundle + ``` + + **Note**: If there is a file or folder named `` in the current + working directory, then it will try to install that, and only try to + fetch the package by name if it is not valid. + +* `npm install [<@scope>/]@`: + + Install the version of the package that is referenced by the specified tag. + If the tag does not exist in the registry data for that package, then this + will fail. + + Example: + + ```bash + npm install sax@latest + npm install @myorg/mypackage@latest + ``` + +* `npm install [<@scope>/]@`: + + Install the specified version of the package. This will fail if the + version has not been published to the registry. + + Example: + + ```bash + npm install sax@0.1.1 + npm install @myorg/privatepackage@1.5.0 + ``` + +* `npm install [<@scope>/]@`: + + Install a version of the package matching the specified version range. This + will follow the same rules for resolving dependencies described in [`package.json`](/configuring-npm/package-json). + + Note that most version ranges must be put in quotes so that your shell will + treat it as a single argument. + + Example: + ```bash + npm install sax@">=0.1.0 <0.2.0" + npm install @myorg/privatepackage@">=0.1.0 <0.2.0" + ``` + +* `npm install `: + + Installs the package from the hosted git provider, cloning it with `git`. + For a full git remote url, only that URL will be attempted. + + ```bash + ://[[:]@][:][:][/][# | #semver:] + ``` + + `` is one of `git`, `git+ssh`, `git+http`, `git+https`, or + `git+file`. + + If `#` is provided, it will be used to clone exactly that + commit. If the commit-ish has the format `#semver:`, `` can + be any valid semver range or exact version, and npm will look for any tags + or refs matching that range in the remote repository, much as it would for a + registry dependency. If neither `#` or `#semver:` is + specified, then the default branch of the repository is used. + + If the repository makes use of submodules, those submodules will be cloned + as well. + + If the package being installed contains a `prepare` script, its + `dependencies` and `devDependencies` will be installed, and the prepare + script will be run, before the package is packaged and installed. + + The following git environment variables are recognized by npm and will be + added to the environment when running git: + + * `GIT_ASKPASS` + * `GIT_EXEC_PATH` + * `GIT_PROXY_COMMAND` + * `GIT_SSH` + * `GIT_SSH_COMMAND` + * `GIT_SSL_CAINFO` + * `GIT_SSL_NO_VERIFY` + + See the git man page for details. + + Examples: + + ```bash + npm install git+ssh://git@github.com:npm/cli.git#v1.0.27 + npm install git+ssh://git@github.com:npm/cli#semver:^5.0 + npm install git+https://isaacs@github.com/npm/cli.git + npm install git://github.com/npm/cli.git#v1.0.27 + GIT_SSH_COMMAND='ssh -i ~/.ssh/custom_ident' npm install git+ssh://git@github.com:npm/cli.git + ``` + +* `npm install /[#]`: +* `npm install github:/[#]`: + + Install the package at `https://github.com/githubname/githubrepo` by + attempting to clone it using `git`. + + If `#` is provided, it will be used to clone exactly that + commit. If the commit-ish has the format `#semver:`, `` can + be any valid semver range or exact version, and npm will look for any tags + or refs matching that range in the remote repository, much as it would for a + registry dependency. If neither `#` or `#semver:` is + specified, then `master` is used. + + As with regular git dependencies, `dependencies` and `devDependencies` will + be installed if the package has a `prepare` script, before the package is + done installing. + + Examples: + + ```bash + npm install mygithubuser/myproject + npm install github:mygithubuser/myproject + ``` + +* `npm install gist:[/][#|#semver:]`: + + Install the package at `https://gist.github.com/gistID` by attempting to + clone it using `git`. The GitHub username associated with the gist is + optional and will not be saved in `package.json`. + + As with regular git dependencies, `dependencies` and `devDependencies` will + be installed if the package has a `prepare` script, before the package is + done installing. + + Example: + + ```bash + npm install gist:101a11beef + ``` + +* `npm install bitbucket:/[#]`: + + Install the package at `https://bitbucket.org/bitbucketname/bitbucketrepo` + by attempting to clone it using `git`. + + If `#` is provided, it will be used to clone exactly that + commit. If the commit-ish has the format `#semver:`, `` can + be any valid semver range or exact version, and npm will look for any tags + or refs matching that range in the remote repository, much as it would for a + registry dependency. If neither `#` or `#semver:` is + specified, then `master` is used. + + As with regular git dependencies, `dependencies` and `devDependencies` will + be installed if the package has a `prepare` script, before the package is + done installing. + + Example: + + ```bash + npm install bitbucket:mybitbucketuser/myproject + ``` + +* `npm install gitlab:/[#]`: + + Install the package at `https://gitlab.com/gitlabname/gitlabrepo` + by attempting to clone it using `git`. + + If `#` is provided, it will be used to clone exactly that + commit. If the commit-ish has the format `#semver:`, `` can + be any valid semver range or exact version, and npm will look for any tags + or refs matching that range in the remote repository, much as it would for a + registry dependency. If neither `#` or `#semver:` is + specified, then `master` is used. + + As with regular git dependencies, `dependencies` and `devDependencies` will + be installed if the package has a `prepare` script, before the package is + done installing. + + Example: + + ```bash + npm install gitlab:mygitlabuser/myproject + npm install gitlab:myusr/myproj#semver:^5.0 + ``` + +You may combine multiple arguments, and even multiple types of arguments. +For example: + +```bash +npm install sax@">=0.1.0 <0.2.0" bench supervisor +``` + +The `--tag` argument will apply to all of the specified install targets. If a +tag with the given name exists, the tagged version is preferred over newer +versions. + +The `--dry-run` argument will report in the usual way what the install would +have done without actually installing anything. + +The `--package-lock-only` argument will only update the `package-lock.json`, +instead of checking `node_modules` and downloading dependencies. + +The `-f` or `--force` argument will force npm to fetch remote resources even if a +local copy exists on disk. + +```bash +npm install sax --force +``` + +The `--no-fund` argument will hide the message displayed at the end of each +install that acknowledges the number of dependencies looking for funding. +See `npm-fund(1)` + +The `-g` or `--global` argument will cause npm to install the package globally +rather than locally. See [folders](/configuring-npm/folders). + +The `--global-style` argument will cause npm to install the package into +your local `node_modules` folder with the same layout it uses with the +global `node_modules` folder. Only your direct dependencies will show in +`node_modules` and everything they depend on will be flattened in their +`node_modules` folders. This obviously will eliminate some deduping. + +The `--ignore-scripts` argument will cause npm to not execute any +scripts defined in the package.json. See [`scripts`](/using-npm/scripts). + +The `--legacy-bundling` argument will cause npm to install the package such +that versions of npm prior to 1.4, such as the one included with node 0.8, +can install the package. This eliminates all automatic deduping. + +The `--link` argument will cause npm to link global installs into the +local space in some cases. + +The `--no-bin-links` argument will prevent npm from creating symlinks for +any binaries the package might contain. + +The `--no-optional` argument will prevent optional dependencies from +being installed. + +The `--no-shrinkwrap` argument, which will ignore an available +package lock or shrinkwrap file and use the package.json instead. + +The `--no-package-lock` argument will prevent npm from creating a +`package-lock.json` file. When running with package-lock's disabled npm +will not automatically prune your node modules when installing. + +The `--nodedir=/path/to/node/source` argument will allow npm to find the +node source code so that npm can compile native modules. + +The `--only={prod[uction]|dev[elopment]}` argument will cause either only +`devDependencies` or only non-`devDependencies` to be installed regardless of the `NODE_ENV`. + +The `--no-audit` argument can be used to disable sending of audit reports to +the configured registries. See [`npm-audit`](npm-audit) for details on what is sent. + +See [`config`](/using-npm/config). Many of the configuration params have some +effect on installation, since that's most of what npm does. + +#### Algorithm + +To install a package, npm uses the following algorithm: +```bash +load the existing node_modules tree from disk +clone the tree +fetch the package.json and assorted metadata and add it to the clone +walk the clone and add any missing dependencies + dependencies will be added as close to the top as is possible + without breaking any other modules +compare the original tree with the cloned tree and make a list of +actions to take to convert one to the other +execute all of the actions, deepest first + kinds of actions are install, update, remove and move +``` + +For this `package{dep}` structure: `A{B,C}, B{C}, C{D}`, +this algorithm produces: + +```bash +A ++-- B ++-- C ++-- D +``` + +That is, the dependency from B to C is satisfied by the fact that A +already caused C to be installed at a higher level. D is still installed +at the top level because nothing conflicts with it. + +For `A{B,C}, B{C,D@1}, C{D@2}`, this algorithm produces: + +```bash +A ++-- B ++-- C + `-- D@2 ++-- D@1 +``` + +Because B's D@1 will be installed in the top level, C now has to install D@2 +privately for itself. This algorithm is deterministic, but different trees may +be produced if two dependencies are requested for installation in a different +order. + +See [folders](/configuring-npm/folders) for a more detailed description of the specific folder structures that npm creates. + +### Limitations of npm's Install Algorithm + +npm will refuse to install any package with an identical name to the +current package. This can be overridden with the `--force` flag, but in +most cases can simply be addressed by changing the local package name. + +There are some very rare and pathological edge-cases where a cycle can +cause npm to try to install a never-ending tree of packages. Here is +the simplest case: + +```bash +A -> B -> A' -> B' -> A -> B -> A' -> B' -> A -> ... +``` + +where `A` is some version of a package, and `A'` is a different version +of the same package. Because `B` depends on a different version of `A` +than the one that is already in the tree, it must install a separate +copy. The same is true of `A'`, which must install `B'`. Because `B'` +depends on the original version of `A`, which has been overridden, the +cycle falls into infinite regress. + +To avoid this situation, npm flat-out refuses to install any +`name@version` that is already present anywhere in the tree of package +folder ancestors. A more correct, but more complex, solution would be +to symlink the existing version into the new location. If this ever +affects a real use-case, it will be investigated. + +### See Also + +* [npm folders](/configuring-npm/folders) +* [npm update](/cli-commands/npm-update) +* [npm audit](/cli-commands/npm-audit) +* [npm fund](/cli-commands/npm-fund) +* [npm link](/cli-commands/npm-link) +* [npm rebuild](/cli-commands/npm-rebuild) +* [npm scripts](/using-npm/scripts) +* [npm build](/cli-commands/npm-build) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) +* [npm registry](/using-npm/registry) +* [npm dist-tag](/cli-commands/npm-dist-tag) +* [npm uninstall](/cli-commands/npm-uninstall) +* [npm shrinkwrap](/cli-commands/npm-shrinkwrap) +* [package.json](/configuring-npm/package-json) diff --git a/docs/content/cli-commands/npm-link.md b/docs/content/cli-commands/npm-link.md new file mode 100644 index 0000000000000..a6d2908c4a32a --- /dev/null +++ b/docs/content/cli-commands/npm-link.md @@ -0,0 +1,92 @@ +--- +section: cli-commands +title: npm-link +description: Symlink a package folder +--- + +# npm-link(1) + +## Symlink a package folder + +### Synopsis + +```bash +npm link (in package dir) +npm link [<@scope>/][@] + +alias: npm ln +``` + +### Description + +Package linking is a two-step process. + +First, `npm link` in a package folder will create a symlink in the global folder +`{prefix}/lib/node_modules/` that links to the package where the `npm +link` command was executed. It will also link any bins in the package to `{prefix}/bin/{name}`. +Note that `npm link` uses the global prefix (see `npm prefix -g` for its value). + +Next, in some other location, `npm link package-name` will create a +symbolic link from globally-installed `package-name` to `node_modules/` +of the current folder. + +Note that `package-name` is taken from `package.json`, +not from directory name. + +The package name can be optionally prefixed with a scope. See [`scope`](/using-npm/npm-scope). +The scope must be preceded by an @-symbol and followed by a slash. + +When creating tarballs for `npm publish`, the linked packages are +"snapshotted" to their current state by resolving the symbolic links. + +This is handy for installing your own stuff, so that you can work on it and +test it iteratively without having to continually rebuild. + +For example: + +```bash + cd ~/projects/node-redis # go into the package directory + npm link # creates global link + cd ~/projects/node-bloggy # go into some other package directory. + npm link redis # link-install the package +``` + +Now, any changes to ~/projects/node-redis will be reflected in +~/projects/node-bloggy/node_modules/node-redis/. Note that the link should +be to the package name, not the directory name for that package. + +You may also shortcut the two steps in one. For example, to do the +above use-case in a shorter way: + +```bash +cd ~/projects/node-bloggy # go into the dir of your main project +npm link ../node-redis # link the dir of your dependency +``` + +The second line is the equivalent of doing: + +```bash +(cd ../node-redis; npm link) +npm link redis +``` + +That is, it first creates a global link, and then links the global +installation target into your project's `node_modules` folder. + +Note that in this case, you are referring to the directory name, `node-redis`, +rather than the package name `redis`. + +If your linked package is scoped (see [`scope`](/using-npm/npm-scope)) your link command must include that scope, e.g. + +```bash +npm link @myorg/privatepackage +``` + +### See Also + +* [npm developers](/using-npm/developers) +* [package.json](/configuring-npm/package-json) +* [npm- nstall](/cli-commands/npm-install) +* [npm folders](/configuring-npm/folders) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) diff --git a/docs/content/cli-commands/npm-logout.md b/docs/content/cli-commands/npm-logout.md new file mode 100644 index 0000000000000..ca7d86f2d1960 --- /dev/null +++ b/docs/content/cli-commands/npm-logout.md @@ -0,0 +1,54 @@ +--- +section: cli-commands +title: npm-logout +description: Log out of the registry +--- + +# npm-logout(1) + +## Log out of the registry + +### Synopsis + +```bash +npm logout [--registry=] [--scope=<@scope>] +``` + +### Description + +When logged into a registry that supports token-based authentication, tell the +server to end this token's session. This will invalidate the token everywhere +you're using it, not just for the current environment. + +When logged into a legacy registry that uses username and password authentication, this will +clear the credentials in your user configuration. In this case, it will _only_ affect +the current environment. + +If `--scope` is provided, this will find the credentials for the registry +connected to that scope, if set. + +### Configuration + +#### registry + +Default: https://registry.npmjs.org/ + +The base URL of the npm package registry. If `scope` is also specified, +it takes precedence. + +#### scope + +Default: The scope of your current project, if any, otherwise none. + +If specified, you will be logged out of the specified scope. See [`scope`](/using-npm/npm-scope). + +```bash +npm logout --scope=@myco +``` + +### See Also + +* [npm adduser](/cli-commands/npm-adduser) +* [npm registry](/using-npm/registry) +* [npm config](/cli-commands/npm-config) +* [npm whoami](/cli-commands/npm-whoami) diff --git a/docs/content/cli-commands/npm-ls.md b/docs/content/cli-commands/npm-ls.md new file mode 100644 index 0000000000000..64a399155ff60 --- /dev/null +++ b/docs/content/cli-commands/npm-ls.md @@ -0,0 +1,129 @@ +--- +section: cli-commands +title: npm-ls +description: List installed packages +--- + +# npm-ls(1) + +## List installed packages + +### Synopsis + +```bash +npm ls [[<@scope>/] ...] + +aliases: list, la, ll +``` + +### Description + +This command will print to stdout all the versions of packages that are +installed, as well as their dependencies, in a tree-structure. + +Positional arguments are `name@version-range` identifiers, which will +limit the results to only the paths to the packages named. Note that +nested packages will *also* show the paths to the specified packages. +For example, running `npm ls promzard` in npm's source tree will show: + +```bash + npm@@VERSION@ /path/to/npm + └─┬ init-package-json@0.0.4 + └── promzard@0.1.5 +``` + +It will print out extraneous, missing, and invalid packages. + +If a project specifies git urls for dependencies these are shown +in parentheses after the name@version to make it easier for users to +recognize potential forks of a project. + +The tree shown is the logical dependency tree, based on package +dependencies, not the physical layout of your node_modules folder. + +When run as `ll` or `la`, it shows extended information by default. + +### Configuration + +#### json + +* Default: false +* Type: Boolean + +Show information in JSON format. + +#### long + +* Default: false +* Type: Boolean + +Show extended information. + +#### parseable + +* Default: false +* Type: Boolean + +Show parseable output instead of tree view. + +#### global + +* Default: false +* Type: Boolean + +List packages in the global install prefix instead of in the current +project. + +#### depth + +* Type: Int + +Max display depth of the dependency tree. + +#### prod / production + +* Type: Boolean +* Default: false + +Display only the dependency tree for packages in `dependencies`. + +#### dev / development + +* Type: Boolean +* Default: false + +Display only the dependency tree for packages in `devDependencies`. + +#### only + +* Type: String + +When "dev" or "development", is an alias to `dev`. + +When "prod" or "production", is an alias to `production`. + +#### link + +* Type: Boolean +* Default: false + +Display only dependencies which are linked + +#### unicode + +* Type: Boolean +* Default: true + +Whether to represent the tree structure using unicode characters. +Set it to false in order to use all-ansi output. + +### See Also + +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) +* [npm folders](/configuring-npm/folders) +* [npm install](/cli-commands/npm-install) +* [npm link](/cli-commands/npm-link) +* [npm prune](/cli-commands/npm-prune) +* [npm outdated](/cli-commands/npm-outdated) +* [npm update](/cli-commands/npm-update) diff --git a/docs/content/cli-commands/npm-org.md b/docs/content/cli-commands/npm-org.md new file mode 100644 index 0000000000000..33db38d0f82a5 --- /dev/null +++ b/docs/content/cli-commands/npm-org.md @@ -0,0 +1,65 @@ +--- +section: cli-commands +title: npm-org +description: Manage orgs +--- + +# npm-org(1) + +## Manage orgs + +### Synopsis + +```bash +npm org set [developer | admin | owner] +npm org rm +npm org ls [] +``` + +### Example + +Add a new developer to an org: + +```bash +$ npm org set my-org @mx-smith +``` + +Add a new admin to an org (or change a developer to an admin): + +```bash +$ npm org set my-org @mx-santos admin +``` + +Remove a user from an org: + +```bash +$ npm org rm my-org mx-santos +``` + +List all users in an org: + +```bash +$ npm org ls my-org +``` + +List all users in JSON format: + +```bash +$ npm org ls my-org --json +``` + +See what role a user has in an org: + +```bash +$ npm org ls my-org @mx-santos +``` + +### Description + +You can use the `npm org` commands to manage and view users of an organization. +It supports adding and removing users, changing their roles, listing them, and +finding specific ones and their roles. + +### See Also + +* [Documentation on npm Orgs](https://docs.npmjs.com/orgs/) diff --git a/doc/cli/npm-outdated.md b/docs/content/cli-commands/npm-outdated.md similarity index 85% rename from doc/cli/npm-outdated.md rename to docs/content/cli-commands/npm-outdated.md index 045586a40a728..c7934109dca37 100644 --- a/doc/cli/npm-outdated.md +++ b/docs/content/cli-commands/npm-outdated.md @@ -1,11 +1,20 @@ -npm-outdated(1) -- Check for outdated packages -============================================== +--- +section: cli-commands +title: npm-outdated +description: Check for outdated packages +--- -## SYNOPSIS +# npm-outdated(1) - npm outdated [[<@scope>/] ...] +## Check for outdated packages -## DESCRIPTION +### Synopsis + +```bash +npm outdated [[<@scope>/] ...] +``` + +### Description This command will check the registry to see if any (or, specific) installed packages are currently outdated. @@ -20,7 +29,7 @@ In the output: Running `npm publish` with no special configuration will publish the package with a dist-tag of `latest`. This may or may not be the maximum version of the package, or the most-recently published version of the package, depending - on how the package's developer manages the latest dist-tag(1). + on how the package's developer manages the latest [dist-tag](npm-dist-tag). * `location` is where in the dependency tree the package is located. Note that `npm outdated` defaults to a depth of 0, so unless you override that, you'll always be seeing only top-level dependencies that are outdated. @@ -33,7 +42,7 @@ In the output: ### An example -``` +```bash $ npm outdated Package Current Wanted Latest Location glob 5.0.15 5.0.15 6.0.1 test-outdated-output @@ -69,30 +78,30 @@ A few things to note: * `once` is just plain out of date. Reinstalling `node_modules` from scratch or running `npm update` will bring it up to spec. -## CONFIGURATION +### Configuration -### json +#### json * Default: false * Type: Boolean Show information in JSON format. -### long +#### long * Default: false * Type: Boolean Show extended information. -### parseable +#### parseable * Default: false * Type: Boolean Show parseable output instead of tree view. -### global +#### global * Default: false * Type: Boolean @@ -100,16 +109,16 @@ Show parseable output instead of tree view. Check packages in the global install prefix instead of in the current project. -### depth +#### depth * Default: 0 * Type: Int Max depth for checking dependency tree. -## SEE ALSO +### See Also -* npm-update(1) -* npm-dist-tag(1) -* npm-registry(7) -* npm-folders(5) +* [npm update](/cli-commands/npm-update) +* [npm dist-tag](/cli-commands/npm-dist-tag) +* [npm registry](/using-npm/registry) +* [npm folders](/configuring-npm/folders) diff --git a/docs/content/cli-commands/npm-owner.md b/docs/content/cli-commands/npm-owner.md new file mode 100644 index 0000000000000..bc2fbc82fb280 --- /dev/null +++ b/docs/content/cli-commands/npm-owner.md @@ -0,0 +1,47 @@ +--- +section: cli-commands +title: npm-owner +description: Manage package owners +--- + +# npm-owner(1) +## Manage package owners + +### Synopsis + +```bash +npm owner add [<@scope>/] +npm owner rm [<@scope>/] +npm owner ls [<@scope>/] + +aliases: author +``` + +### Description + +Manage ownership of published packages. + +* ls: + List all the users who have access to modify a package and push new versions. + Handy when you need to know who to bug for help. +* add: + Add a new user as a maintainer of a package. This user is enabled to modify + metadata, publish new versions, and add other owners. +* rm: + Remove a user from the package owner list. This immediately revokes their + privileges. + +Note that there is only one level of access. Either you can modify a package, +or you can't. Future versions may contain more fine-grained access levels, but +that is not implemented at this time. + +If you have two-factor authentication enabled with `auth-and-writes` then +you'll need to include an otp on the command line when changing ownership +with `--otp`. + +### See Also + +* [npm publish](/cli-commands/npm-publish) +* [npm registry](/using-npm/registry) +* [npm adduser](/cli-commands/npm-adduser) +* [npm disputes](/using-npm/disputes) diff --git a/docs/content/cli-commands/npm-pack.md b/docs/content/cli-commands/npm-pack.md new file mode 100644 index 0000000000000..acf18559c14e0 --- /dev/null +++ b/docs/content/cli-commands/npm-pack.md @@ -0,0 +1,38 @@ +--- +section: cli-commands +title: npm-pack +description: Create a tarball from a package +--- + +# npm-pack(1) + +## Create a tarball from a package + +### Synopsis + +```bash +npm pack [[<@scope>/]...] [--dry-run] +``` + +### Description + +For anything that's installable (that is, a package folder, tarball, +tarball url, name@tag, name@version, name, or scoped name), this +command will fetch it to the cache, and then copy the tarball to the +current working directory as `-.tgz`, and then write +the filenames out to stdout. + +If the same package is specified multiple times, then the file will be +overwritten the second time. + +If no arguments are supplied, then npm packs the current package folder. + +The `--dry-run` argument will do everything that pack usually does without +actually packing anything. Reports on what would have gone into the tarball. + +### See Also + +* [npm cache](/cli-commands/npm-cache) +* [npm publish](/cli-commands/npm-publish) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) diff --git a/docs/content/cli-commands/npm-ping.md b/docs/content/cli-commands/npm-ping.md new file mode 100644 index 0000000000000..93d18b57f840b --- /dev/null +++ b/docs/content/cli-commands/npm-ping.md @@ -0,0 +1,33 @@ +--- +section: cli-commands +title: npm-ping +description: Ping npm registry +--- + +# npm-ping(1) + +## Ping npm registry + +### Synopsis + +```bash +npm ping [--registry ] +``` + +### Description + +Ping the configured or given npm registry and verify authentication. +If it works it will output something like: + +```bash +Ping success: {*Details about registry*} +``` +otherwise you will get: +```bash +Ping error: {*Detail about error} +``` + +### See Also + +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) diff --git a/docs/content/cli-commands/npm-prefix.md b/docs/content/cli-commands/npm-prefix.md new file mode 100644 index 0000000000000..b82fec663a147 --- /dev/null +++ b/docs/content/cli-commands/npm-prefix.md @@ -0,0 +1,32 @@ +--- +section: cli-commands +title: npm-prefix +description: Display prefix +--- + +# npm-prefix(1) + +## Display prefix + +### Synopsis + +```bash +npm prefix [-g] +``` + +### Description + +Print the local prefix to standard out. This is the closest parent directory +to contain a `package.json` file or `node_modules` directory, unless `-g` is +also specified. + +If `-g` is specified, this will be the value of the global prefix. See +[`npm config`](/cli-commands/npm-config) for more detail. + +### See Also + +* [npm root](/cli-commands/npm-root) +* [npm bin](/cli-commands/npm-bin) +* [npm folders](/configuring-npm/folders) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) diff --git a/doc/cli/npm-profile.md b/docs/content/cli-commands/npm-profile.md similarity index 83% rename from doc/cli/npm-profile.md rename to docs/content/cli-commands/npm-profile.md index 16b5e11b60c65..9fe82cd2d3a95 100644 --- a/doc/cli/npm-profile.md +++ b/docs/content/cli-commands/npm-profile.md @@ -1,15 +1,23 @@ -npm-profile(1) -- Change settings on your registry profile -========================================================== +--- +section: cli-commands +title: npm-profile +description: Change settings on your registry profile +--- -## SYNOPSIS +# npm-profile(1) +## Change settings on your registry profile - npm profile get [--json|--parseable] [] - npm profile set [--json|--parseable] - npm profile set password - npm profile enable-2fa [auth-and-writes|auth-only] - npm profile disable-2fa +### Synopsis -## DESCRIPTION +```bash +npm profile get [--json|--parseable] [] +npm profile set [--json|--parseable] +npm profile set password +npm profile enable-2fa [auth-and-writes|auth-only] +npm profile disable-2fa +``` + +### Description Change your profile information on the registry. This not be available if you're using a non-npmjs registry. @@ -18,7 +26,7 @@ you're using a non-npmjs registry. Display all of the properties of your profile, or one or more specific properties. It looks like: -``` +```bash +-----------------+---------------------------+ | name | example | +-----------------+---------------------------+ @@ -63,12 +71,12 @@ you're using a non-npmjs registry. * `npm profile disable-2fa`: Disables two-factor authentication. -## DETAILS +### Details All of the `npm profile` subcommands accept `--json` and `--parseable` and will tailor their output based on those. Some of these commands may not be available on non npmjs.com registries. -## SEE ALSO +### See Also -* npm-config(7) +* [npm config](/cli-commands/npm-config) diff --git a/docs/content/cli-commands/npm-prune.md b/docs/content/cli-commands/npm-prune.md new file mode 100644 index 0000000000000..c6b61e62f828a --- /dev/null +++ b/docs/content/cli-commands/npm-prune.md @@ -0,0 +1,46 @@ +--- +section: cli-commands +title: npm-prune +description: Remove extraneous packages +--- + +# npm-prune(1) +## Remove extraneous packages + +### Synopsis + +```bash +npm prune [[<@scope>/]...] [--production] [--dry-run] [--json] +``` + +### Description + +This command removes "extraneous" packages. If a package name is +provided, then only packages matching one of the supplied names are +removed. + +Extraneous packages are packages that are not listed on the parent +package's dependencies list. + +If the `--production` flag is specified or the `NODE_ENV` environment +variable is set to `production`, this command will remove the packages +specified in your `devDependencies`. Setting `--no-production` will +negate `NODE_ENV` being set to `production`. + +If the `--dry-run` flag is used then no changes will actually be made. + +If the `--json` flag is used then the changes `npm prune` made (or would +have made with `--dry-run`) are printed as a JSON object. + +In normal operation with package-locks enabled, extraneous modules are +pruned automatically when modules are installed and you'll only need +this command with the `--production` flag. + +If you've disabled package-locks then extraneous modules will not be removed +and it's up to you to run `npm prune` from time-to-time to remove them. + +### See Also + +* [npm uninstall](/cli-commands/npm-uninstall) +* [npm folders](/configuring-npm/folders) +* [npm ls](/cli-commands/npm-ls) diff --git a/docs/content/cli-commands/npm-publish.md b/docs/content/cli-commands/npm-publish.md new file mode 100644 index 0000000000000..44c36e0b7ac6c --- /dev/null +++ b/docs/content/cli-commands/npm-publish.md @@ -0,0 +1,81 @@ +--- +section: cli-commands +title: npm-publish +description: Publish a package +--- + +# npm-publish(1) + +## Publish a package + +### Synopsis +```bash +npm publish [|] [--tag ] [--access ] [--otp otpcode] [--dry-run] + +Publishes '.' if no argument supplied +Sets tag 'latest' if no --tag specified +``` + +### Description + +Publishes a package to the registry so that it can be installed by name. All +files in the package directory are included if no local `.gitignore` or +`.npmignore` file exists. If both files exist and a file is ignored by +`.gitignore` but not by `.npmignore` then it will be included. See +[`developers`](/using-npm/developers) for full details on what's included in the published package, as well as details on how the package is built. + +By default npm will publish to the public registry. This can be overridden by +specifying a different default registry or using a [`scope`](/using-npm/npm-scope) in the name (see [`package.json`](/configuring-npm/package-json)). + +* ``: + A folder containing a package.json file + +* ``: + A url or file path to a gzipped tar archive containing a single folder + with a package.json file inside. + +* `[--tag ]` + Registers the published package with the given tag, such that `npm install + @` will install this version. By default, `npm publish` updates + and `npm install` installs the `latest` tag. See [`npm-dist-tag`](npm-dist-tag) for + details about tags. + +* `[--access ]` + Tells the registry whether this package should be published as public or + restricted. Only applies to scoped packages, which default to `restricted`. + If you don't have a paid account, you must publish with `--access public` + to publish scoped packages. + +* `[--otp ]` + If you have two-factor authentication enabled in `auth-and-writes` mode + then you can provide a code from your authenticator with this. If you + don't include this and you're running from a TTY then you'll be prompted. + +* `[--dry-run]` + As of `npm@6`, does everything publish would do except actually publishing + to the registry. Reports the details of what would have been published. + +Fails if the package name and version combination already exists in +the specified registry. + +Once a package is published with a given name and version, that +specific name and version combination can never be used again, even if +it is removed with [`npm unpublish`](/cli-commands/npm-unpublish). + +As of `npm@5`, both a sha1sum and an integrity field with a sha512sum of the +tarball will be submitted to the registry during publication. Subsequent +installs will use the strongest supported algorithm to verify downloads. + +Similar to `--dry-run` see [`npm pack`](/cli-commands/npm-pack), which figures out the files to be +included and packs them into a tarball to be uploaded to the registry. + +### See Also + +* [npm registry](/using-npm/registry) +* [npm scope](/using-npm/scope) +* [npm adduser](/cli-commands/adduser) +* [npm owner](/cli-commands/owner) +* [npm deprecate](/cli-commands/deprecate) +* [npm dist-tag](/cli-commands/dist-tag) +* [npm pack](/cli-commands/pack) +* [npm profile](/cli-commands/profile) diff --git a/docs/content/cli-commands/npm-rebuild.md b/docs/content/cli-commands/npm-rebuild.md new file mode 100644 index 0000000000000..414b9ca55a193 --- /dev/null +++ b/docs/content/cli-commands/npm-rebuild.md @@ -0,0 +1,26 @@ +--- +section: cli-commands +title: npm-rebuild +description: Rebuild a package +--- + +# npm-rebuild(1) + +## Rebuild a package + +### Synopsis + +```bash +npm rebuild [[<@scope>/]...] + +alias: npm rb +``` + +### Description + +This command runs the `npm build` command on the matched folders. This is useful when you install a new version of node, and must recompile all your C++ addons with the new binary. + +### See Also + +* [npm build](/cli-commands/npm-build) +* [npm install](/cli-commands/npm-install) diff --git a/docs/content/cli-commands/npm-repo.md b/docs/content/cli-commands/npm-repo.md new file mode 100644 index 0000000000000..ad41ea571253f --- /dev/null +++ b/docs/content/cli-commands/npm-repo.md @@ -0,0 +1,36 @@ +--- +section: cli-commands +title: npm-repo +description: Open package repository page in the browser +--- + +# npm-repo(1) + +## Open package repository page in the browser + +### Synopsis + +```bash +npm repo [] +``` + +### Description + +This command tries to guess at the likely location of a package's +repository URL, and then tries to open it using the `--browser` +config param. If no package name is provided, it will search for +a `package.json` in the current folder and use the `name` property. + +### Configuration + +#### browser + +* Default: OS X: `"open"`, Windows: `"start"`, Others: `"xdg-open"` +* Type: String + +The browser that is called by the `npm repo` command to open websites. + +### See Also + +* [npm docs](/cli-commands/npm-docs) +* [npm config](/cli-commands/npm-config) diff --git a/docs/content/cli-commands/npm-restart.md b/docs/content/cli-commands/npm-restart.md new file mode 100644 index 0000000000000..d6d388b224ab1 --- /dev/null +++ b/docs/content/cli-commands/npm-restart.md @@ -0,0 +1,49 @@ +--- +section: cli-commands +title: npm-restart +description: Restart a package +--- + +# npm-restart(1) + +## Restart a package + +### Synopsis + +```bash +npm restart [-- ] +``` + +### Description + +This restarts a package. + +This runs a package's "stop", "restart", and "start" scripts, and associated +pre- and post- scripts, in the order given below: + +1. prerestart +2. prestop +3. stop +4. poststop +5. restart +6. prestart +7. start +8. poststart +9. postrestart + +### Note + +Note that the "restart" script is run **in addition to** the "stop" +and "start" scripts, not instead of them. + +This is the behavior as of `npm` major version 2. A change in this +behavior will be accompanied by an increase in major version number + +### See Also + +* [npm run-script](/cli-commands/npm-run-script) +* [npm scripts](/using-npm/scripts) +* [npm test](/cli-commands/npm-test) +* [npm start](/cli-commands/npm-start) +* [npm stop](/cli-commands/npm-stop) +* [npm restart](/cli-commands/npm-restart) \ No newline at end of file diff --git a/docs/content/cli-commands/npm-root.md b/docs/content/cli-commands/npm-root.md new file mode 100644 index 0000000000000..2b27878af4c6d --- /dev/null +++ b/docs/content/cli-commands/npm-root.md @@ -0,0 +1,26 @@ +--- +section: cli-commands +title: npm-root +description: Display npm root +--- + +# npm-root(1) + +## Display npm root + +### Synopsis +```bash +npm root [-g] +``` + +### Description + +Print the effective `node_modules` folder to standard out. + +### See Also + +* [npm prefix](/cli-commands/npm-prefix) +* [npm bin](/cli-commands/npm-bin) +* [npm folders](/configuring-npm/folders) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) diff --git a/doc/cli/npm-run-script.md b/docs/content/cli-commands/npm-run-script.md similarity index 81% rename from doc/cli/npm-run-script.md rename to docs/content/cli-commands/npm-run-script.md index ef73820d5b07e..51def74c3c4c3 100644 --- a/doc/cli/npm-run-script.md +++ b/docs/content/cli-commands/npm-run-script.md @@ -1,13 +1,22 @@ -npm-run-script(1) -- Run arbitrary package scripts -================================================== +--- +section: cli-commands +title: npm-run-script +description: Run arbitrary package scripts +--- -## SYNOPSIS +# npm-run-script(1) - npm run-script [--silent] [-- ...] +## Run arbitrary package scripts - alias: npm run +### Synopsis -## DESCRIPTION +```bash +npm run-script [--silent] [-- ...] + +alias: npm run +``` + +### Description This runs an arbitrary command from a package's `"scripts"` object. If no `"command"` is provided, it will list the available scripts. `run[-script]` is @@ -20,7 +29,9 @@ use custom arguments when executing scripts. The special option `--` is used by [getopt](https://goo.gl/KxMmtG) to delimit the end of the options. npm will pass all the arguments after the `--` directly to your script: - npm run test -- --grep="pattern" +```bash +npm run test -- --grep="pattern" +``` The arguments will only be passed to the script specified after ```npm run``` and not to any pre or post script. @@ -36,11 +47,15 @@ locally-installed dependencies can be used without the `node_modules/.bin` prefix. For example, if there is a `devDependency` on `tap` in your package, you should write: - "scripts": {"test": "tap test/\*.js"} +```bash +"scripts": {"test": "tap test/\*.js"} +``` instead of - "scripts": {"test": "node_modules/.bin/tap test/\*.js"} +```bash +"scripts": {"test": "node_modules/.bin/tap test/\*.js"} +``` to run your tests. @@ -72,11 +87,11 @@ You can use the `--if-present` flag to avoid exiting with a non-zero exit code when the script is undefined. This lets you run potentially undefined scripts without breaking the execution chain. -## SEE ALSO +### See Also -* npm-scripts(7) -* npm-test(1) -* npm-start(1) -* npm-restart(1) -* npm-stop(1) -* npm-config(7) +* [npm scripts](/using-npm/scripts) +* [npm test](/cli-commands/npm-test) +* [npm start](/cli-commands/npm-start) +* [npm restart](/cli-commands/npm-restart) +* [npm stop](/cli-commands/npm-stop) +* [npm config](/cli-commands/npm-config) diff --git a/doc/cli/npm-search.md b/docs/content/cli-commands/npm-search.md similarity index 80% rename from doc/cli/npm-search.md rename to docs/content/cli-commands/npm-search.md index c1107d79b735b..e066106faf71f 100644 --- a/doc/cli/npm-search.md +++ b/docs/content/cli-commands/npm-search.md @@ -1,13 +1,22 @@ -npm-search(1) -- Search for packages -==================================== +--- +section: cli-commands +title: npm-search +description: Search for packages +--- -## SYNOPSIS +# npm-search(1) - npm search [-l|--long] [--json] [--parseable] [--no-description] [search terms ...] +## Search for packages - aliases: s, se, find +### Synopsis -## DESCRIPTION +```bash +npm search [-l|--long] [--json] [--parseable] [--no-description] [search terms ...] + +aliases: s, se, find +``` + +### Description Search the registry for packages matching the search terms. `npm search` performs a linear, incremental, lexically-ordered search through package @@ -31,9 +40,9 @@ quoted in most shells.) ### A Note on caching -## CONFIGURATION +### Configuration -### description +#### description * Default: true * Type: Boolean @@ -41,21 +50,21 @@ quoted in most shells.) Used as `--no-description`, disables search matching in package descriptions and suppresses display of that field in results. -### json +#### json * Default: false * Type: Boolean Output search results as a JSON array. -### parseable +#### parseable * Default: false * Type: Boolean Output search results as lines with tab-separated columns. -### long +#### long * Default: false * Type: Boolean @@ -65,28 +74,28 @@ lines. When disabled (default) search results are truncated to fit neatly on a single line. Modules with extremely long names will fall on multiple lines. -### searchopts +#### searchopts * Default: "" * Type: String Space-separated options that are always passed to search. -### searchexclude +#### searchexclude * Default: "" * Type: String Space-separated options that limit the results from search. -### searchstaleness +#### searchstaleness * Default: 900 (15 minutes) * Type: Number The age of the cache, in seconds, before another registry request is made. -### registry +#### registry * Default: https://registry.npmjs.org/ * Type: url @@ -97,10 +106,9 @@ repository, `npm search` will default to that registry when searching. Pass a different registry url such as the default above in order to override this setting. -## SEE ALSO +### See Also -* npm-registry(7) -* npm-config(1) -* npm-config(7) -* npmrc(5) -* npm-view(1) +* [npm registry](/using-npm/registry) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) +* [npm view](/cli-commands/npm-view) diff --git a/docs/content/cli-commands/npm-shrinkwrap.md b/docs/content/cli-commands/npm-shrinkwrap.md new file mode 100644 index 0000000000000..342fb001675ab --- /dev/null +++ b/docs/content/cli-commands/npm-shrinkwrap.md @@ -0,0 +1,34 @@ +--- +section: cli-commands +title: npm-shrinkwrap +description: Lock down dependency versions for publication +--- + +# npm-shrinkwrap(1) + +## Lock down dependency versions for publication + +### Synopsis + +```bash +npm shrinkwrap +``` + +### Description + +This command repurposes `package-lock.json` into a publishable +`npm-shrinkwrap.json` or simply creates a new one. The file created and updated +by this command will then take precedence over any other existing or future +`package-lock.json` files. For a detailed explanation of the design and purpose +of package locks in npm, see [package-locks](/configuring-npm/package-locks). + +### See Also + +* [npm install](/cli-commands/npm-install) +* [npm run-script](/cli-commands/npm-run-script) +* [npm scripts](/using-npm/scripts) +* [package.js](/configuring-npm/package-json) +* [package-locks](/configuring-npm/package-locks) +* [package-lock.json](/configuring-npm/package-lock-json) +* [shrinkwrap.json](/configuring-npm/shrinkwrap-json) +* [npm ls](/cli-commands/npm-ls) diff --git a/docs/content/cli-commands/npm-star.md b/docs/content/cli-commands/npm-star.md new file mode 100644 index 0000000000000..1912e9c654bc2 --- /dev/null +++ b/docs/content/cli-commands/npm-star.md @@ -0,0 +1,31 @@ +--- +section: cli-commands +title: npm-star +description: Mark your favorite packages +--- + +# npm-star(1) + +## Mark your favorite packages + +### Synopsis + +```bash +npm star [...] +npm unstar [...] +``` + +### Description + +"Starring" a package means that you have some interest in it. It's +a vaguely positive way to show that you care. + +"Unstarring" is the same thing, but in reverse. + +It's a boolean thing. Starring repeatedly has no additional effect. + +### See Also + +* [npm view](/cli-commands/npm-view) +* [npm whoami](/cli-commands/npm-whoami) +* [npm adduser](/cli-commands/npm-adduser) diff --git a/docs/content/cli-commands/npm-stars.md b/docs/content/cli-commands/npm-stars.md new file mode 100644 index 0000000000000..475547bb4b7f2 --- /dev/null +++ b/docs/content/cli-commands/npm-stars.md @@ -0,0 +1,29 @@ +--- +section: cli-commands +title: npm-stars +description: View packages marked as favorites +--- + +# npm-stars(1) + +## View packages marked as favorites + +### Synopsis +```bash +npm stars [] +``` + +### Description + +If you have starred a lot of neat things and want to find them again +quickly this command lets you do just that. + +You may also want to see your friend's favorite packages, in this case +you will most certainly enjoy this command. + +### See Also + +* [npm star](/cli-commands/npm-star) +* [npm view](/cli-commands/npm-view) +* [npm whoami](/cli-commands/npm-whoami) +* [npm adduser](/cli-commands/npm-adduser) diff --git a/docs/content/cli-commands/npm-start.md b/docs/content/cli-commands/npm-start.md new file mode 100644 index 0000000000000..839528257b6d9 --- /dev/null +++ b/docs/content/cli-commands/npm-start.md @@ -0,0 +1,32 @@ +--- +section: cli-commands +title: npm-start +description: Start a package +--- + +# npm-start(1) + +## Start a package + +### Synopsis + +```bash +npm start [-- ] +``` + +### Description + +This runs an arbitrary command specified in the package's `"start"` property of +its `"scripts"` object. If no `"start"` property is specified on the +`"scripts"` object, it will run `node server.js`. + +As of [`npm@2.0.0`](https://blog.npmjs.org/post/98131109725/npm-2-0-0), you can +use custom arguments when executing scripts. Refer to [`npm run-script`](/cli-commands/npm-run-script) for more details. + +### See Also + +* [npm run-script](/cli-commands/npm-run-script) +* [npm scripts](/using-npm/scripts) +* [npm test](/cli-commands/npm-test) +* [npm restart](/cli-commands/npm-restart) +* [npm stop](/cli-commands/npm-stop) diff --git a/docs/content/cli-commands/npm-stop.md b/docs/content/cli-commands/npm-stop.md new file mode 100644 index 0000000000000..da759047cb19a --- /dev/null +++ b/docs/content/cli-commands/npm-stop.md @@ -0,0 +1,27 @@ +--- +section: cli-commands +title: npm-stop +description: Stop a package +--- + +# npm-stop(1) + +## Stop a package + +### Synopsis + +```bash +npm stop [-- ] +``` + +### Description + +This runs a package's "stop" script, if one was provided. + +### See Also + +* [npm run-script](/cli-commands/npm-run-script) +* [npm scripts](/using-npm/scripts) +* [npm test](/cli-commands/npm-test) +* [npm start](/cli-commands/npm-start) +* [npm restart](/cli-commands/npm-restart) diff --git a/docs/content/cli-commands/npm-team.md b/docs/content/cli-commands/npm-team.md new file mode 100644 index 0000000000000..9a63b10c26f32 --- /dev/null +++ b/docs/content/cli-commands/npm-team.md @@ -0,0 +1,64 @@ +--- +section: cli-commands +title: npm-team +description: Manage organization teams and team memberships +--- + +# npm-team(1) + +## Manage organization teams and team memberships + +### Synopsis + +```bash +npm team create +npm team destroy + +npm team add +npm team rm + +npm team ls | + +npm team edit +``` + +### Description + +Used to manage teams in organizations, and change team memberships. Does not +handle permissions for packages. + +Teams must always be fully qualified with the organization/scope they belong to +when operating on them, separated by a colon (`:`). That is, if you have a `wombats` team in a `wisdom` organization, you must always refer to that team as `wisdom:wombats` in these commands. + +* create / destroy: + Create a new team, or destroy an existing one. Note: You cannot remove the `developers` team, learn more. +* add / rm: + Add a user to an existing team, or remove a user from a team they belong to. + +* ls: + If performed on an organization name, will return a list of existing teams + under that organization. If performed on a team, it will instead return a list + of all users belonging to that particular team. + +* edit: + Edit a current team. + +### Details + +`npm team` always operates directly on the current registry, configurable from +the command line using `--registry=`. + +In order to create teams and manage team membership, you must be a *team admin* +under the given organization. Listing teams and team memberships may be done by +any member of the organizations. + +Organization creation and management of team admins and *organization* members +is done through the website, not the npm CLI. + +To use teams to manage permissions on packages belonging to your organization, +use the `npm access` command to grant or revoke the appropriate permissions. + +### See Also + +* [npm access](/cli-commands/npm-access) +* [npm registry](/using-npm/registry) diff --git a/docs/content/cli-commands/npm-test.md b/docs/content/cli-commands/npm-test.md new file mode 100644 index 0000000000000..99c027e3e057a --- /dev/null +++ b/docs/content/cli-commands/npm-test.md @@ -0,0 +1,29 @@ +--- +section: cli-commands +title: npm-test +description: Test a package +--- + +# npm-test(1) + +## Test a package + +### Synopsis + +```bash +npm test [-- ] + +aliases: t, tst +``` + +### Description + +This runs a package's "test" script, if one was provided. + +### See Also + +* [npm run-script](/cli-commands/npm-run-script) +* [npm scripts](/using-npm/scripts) +* [npm start](/cli-commands/npm-start) +* [npm restart](/cli-commands/npm-restart) +* [npm stop](/cli-commands/npm-stop) diff --git a/doc/cli/npm-token.md b/docs/content/cli-commands/npm-token.md similarity index 86% rename from doc/cli/npm-token.md rename to docs/content/cli-commands/npm-token.md index 0212dfe2ea85a..37a74083d2a12 100644 --- a/doc/cli/npm-token.md +++ b/docs/content/cli-commands/npm-token.md @@ -1,20 +1,29 @@ -npm-token(1) -- Manage your authentication tokens -================================================= +--- +section: cli-commands +title: npm-token +description: Manage your authentication tokens +--- -## SYNOPSIS +# npm-token(1) - npm token list [--json|--parseable] - npm token create [--read-only] [--cidr=1.1.1.1/24,2.2.2.2/16] - npm token revoke +## Manage your authentication tokens -## DESCRIPTION +### Synopsis +```bash + npm token list [--json|--parseable] + npm token create [--read-only] [--cidr=1.1.1.1/24,2.2.2.2/16] + npm token revoke + ``` -This list you list, create and revoke authentication tokens. +### Description + +This lets you list, create and revoke authentication tokens. * `npm token list`: Shows a table of all active authentication tokens. You can request this as JSON with `--json` or tab-separated values with `--parseable`. -``` + +```bash +--------+---------+------------+----------+----------------+ | id | token | created | read-only | CIDR whitelist | +--------+---------+------------+----------+----------------+ @@ -40,7 +49,7 @@ This list you list, create and revoke authentication tokens. limit use of this token to. This will prompt you for your password, and, if you have two-factor authentication enabled, an otp. -``` +```bash +----------------+--------------------------------------+ | token | a73c9572-f1b9-8983-983d-ba3ac3cc913d | +----------------+--------------------------------------+ diff --git a/docs/content/cli-commands/npm-uninstall.md b/docs/content/cli-commands/npm-uninstall.md new file mode 100644 index 0000000000000..96fdc4ebe05ba --- /dev/null +++ b/docs/content/cli-commands/npm-uninstall.md @@ -0,0 +1,64 @@ +--- +section: cli-commands +title: npm-uninstall +description: Remove a package +--- + +# npm-uninstall(1) + +## Remove a package + +### Synopsis + +```bash +npm uninstall [<@scope>/][@]... [-S|--save|-D|--save-dev|-O|--save-optional|--no-save] + +aliases: remove, rm, r, un, unlink +``` + +### Description + +This uninstalls a package, completely removing everything npm installed +on its behalf. + +Example: + +```bash +npm uninstall sax +``` + +In global mode (ie, with `-g` or `--global` appended to the command), +it uninstalls the current package context as a global package. + +`npm uninstall` takes 3 exclusive, optional flags which save or update +the package version in your main package.json: + +* `-S, --save`: Package will be removed from your `dependencies`. + +* `-D, --save-dev`: Package will be removed from your `devDependencies`. + +* `-O, --save-optional`: Package will be removed from your `optionalDependencies`. + +* `--no-save`: Package will not be removed from your `package.json` file. + +Further, if you have an `npm-shrinkwrap.json` then it will be updated as +well. + +Scope is optional and follows the usual rules for [`scope`](/using-npm/scope). + +Examples: +```bash +npm uninstall sax --save +npm uninstall @myorg/privatepackage --save +npm uninstall node-tap --save-dev +npm uninstall dtrace-provider --save-optional +npm uninstall lodash --no-save +``` + +### See Also + +* [npm prune](/cli-commands/npm-prune) +* [npm install](/cli-commands/npm-install) +* [npm folders](/configuring-npm/folders) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) diff --git a/docs/content/cli-commands/npm-unpublish.md b/docs/content/cli-commands/npm-unpublish.md new file mode 100644 index 0000000000000..c40f480a8bcb8 --- /dev/null +++ b/docs/content/cli-commands/npm-unpublish.md @@ -0,0 +1,50 @@ +--- +section: cli-commands +title: npm-unpublish +description: Remove a package from the registry +--- + +# npm-unpublish(1) + +## Remove a package from the registry + +### Synopsis + +#### Unpublishing a single version of a package + +```bash +npm unpublish [<@scope>/]@ +``` + +#### Unpublishing an entire package + +```bash +npm unpublish [<@scope>/] --force +``` + +### Warning + +Consider using the `deprecate` command instead, if your intent is to encourage users to upgrade, or if you no longer want to maintain a package. + +### Description + +This removes a package version from the registry, deleting its +entry and removing the tarball. + +If no version is specified, or if all versions are removed then +the root package entry is removed from the registry entirely. + +Even if a package version is unpublished, that specific name and +version combination can never be reused. In order to publish the +package again, a new version number must be used. If you unpublish the entire package, you may not publish any new versions of that package until 24 hours have passed. + +To learn more about how unpublish is treated on the npm registry, see our unpublish policies. + + +### See Also + +* [npm deprecate](/cli-commands/npm-deprecate) +* [npm publish](/cli-commands/npm-publish) +* [npm registry](/using-npm/registry) +* [npm adduser](/cli-commands/npm-adduser) +* [npm owner](/cli-commands/npm-owner) diff --git a/doc/cli/npm-update.md b/docs/content/cli-commands/npm-update.md similarity index 80% rename from doc/cli/npm-update.md rename to docs/content/cli-commands/npm-update.md index ec02cbcc37632..f2e93b6dc33df 100644 --- a/doc/cli/npm-update.md +++ b/docs/content/cli-commands/npm-update.md @@ -1,13 +1,22 @@ -npm-update(1) -- Update a package -================================= +--- +section: cli-commands +title: npm-update +description: Update a package +--- -## SYNOPSIS +# npm-update(1) - npm update [-g] [...] +## Update a package - aliases: up, upgrade +### Synopsis -## DESCRIPTION +```bash +npm update [-g] [...] + +aliases: up, upgrade +``` + +### Description This command will update all the packages listed to the latest version (specified by the `tag` config), respecting semver. @@ -30,7 +39,7 @@ As of `npm@5.0.0`, the `npm update` will change `package.json` to save the new version as the minimum required dependency. To get the old behavior, use `npm update --no-save`. -## EXAMPLES +### Example IMPORTANT VERSION NOTE: these examples assume `npm@2.6.1` or later. For older versions of `npm`, you must specify `--depth 0` to get the behavior @@ -39,7 +48,7 @@ described below. For the examples below, assume that the current package is `app` and it depends on dependencies, `dep1` (`dep2`, .. etc.). The published versions of `dep1` are: -``` +```json { "dist-tags": { "latest": "1.2.2" }, "versions": [ @@ -56,11 +65,11 @@ on dependencies, `dep1` (`dep2`, .. etc.). The published versions of `dep1` are } ``` -### Caret Dependencies +#### Caret Dependencies If `app`'s `package.json` contains: -``` +```json "dependencies": { "dep1": "^1.1.1" } @@ -69,11 +78,11 @@ If `app`'s `package.json` contains: Then `npm update` will install `dep1@1.2.2`, because `1.2.2` is `latest` and `1.2.2` satisfies `^1.1.1`. -### Tilde Dependencies +#### Tilde Dependencies However, if `app`'s `package.json` contains: -``` +```json "dependencies": { "dep1": "~1.1.1" } @@ -84,11 +93,11 @@ tag points to `1.2.2`, this version does not satisfy `~1.1.1`, which is equivale to `>=1.1.1 <1.2.0`. So the highest-sorting version that satisfies `~1.1.1` is used, which is `1.1.2`. -### Caret Dependencies below 1.0.0 +#### Caret Dependencies below 1.0.0 Suppose `app` has a caret dependency on a version below `1.0.0`, for example: -``` +```json "dependencies": { "dep1": "^0.2.0" } @@ -99,7 +108,7 @@ versions which satisfy `^0.2.0`. If the dependence were on `^0.4.0`: -``` +```json "dependencies": { "dep1": "^0.4.0" } @@ -109,7 +118,7 @@ Then `npm update` will install `dep1@0.4.1`, because that is the highest-sorting version that satisfies `^0.4.0` (`>= 0.4.0 <0.5.0`) -### Updating Globally-Installed Packages +#### Updating Globally-Installed Packages `npm update -g` will apply the `update` action to each globally installed package that is `outdated` -- that is, has a version that is different from @@ -119,11 +128,11 @@ NOTE: If a package has been upgraded to a version newer than `latest`, it will be _downgraded_. -## SEE ALSO +### See Also -* npm-install(1) -* npm-outdated(1) -* npm-shrinkwrap(1) -* npm-registry(7) -* npm-folders(5) -* npm-ls(1) +* [npm install](/cli-commands/npm-install) +* [npm outdated](/cli-commands/npm-outdated) +* [npm shrinkwrap](/cli-commands/npm-shrinkwrap) +* [npm registry](/using-npm/registry) +* [npm folders](/configuring-npm/folders) +* [npm ls](/cli-commands/npm-ls) diff --git a/doc/cli/npm-version.md b/docs/content/cli-commands/npm-version.md similarity index 75% rename from doc/cli/npm-version.md rename to docs/content/cli-commands/npm-version.md index a20f4a982a603..a47e9e33326ac 100644 --- a/doc/cli/npm-version.md +++ b/docs/content/cli-commands/npm-version.md @@ -1,15 +1,24 @@ -npm-version(1) -- Bump a package version -======================================== +--- +section: cli-commands +title: npm-version +description: Bump a package version +--- -## SYNOPSIS +# npm-version(1) - npm version [ | major | minor | patch | premajor | preminor | prepatch | prerelease [--preid=] | from-git] +## Bump a package version - 'npm [-v | --version]' to print npm version - 'npm view version' to view a package's published version - 'npm ls' to inspect current package/dependency versions +### Synopsis -## DESCRIPTION +```bash +npm version [ | major | minor | patch | premajor | preminor | prepatch | prerelease [--preid=] | from-git] + +'npm [-v | --version]' to print npm version +'npm view version' to view a package's published version +'npm ls' to inspect current package/dependency versions +``` + +### Description Run this in a package directory to bump the version and write the new data back to `package.json`, `package-lock.json`, and, if present, `npm-shrinkwrap.json`. @@ -31,20 +40,24 @@ use it as a commit message when creating a version commit. If the `message` config contains `%s` then that will be replaced with the resulting version number. For example: - npm version patch -m "Upgrade to %s for reasons" +```bash +npm version patch -m "Upgrade to %s for reasons" +``` If the `sign-git-tag` config is set, then the tag will be signed using the `-s` flag to git. Note that you must have a default GPG key set up in your git config for this to work properly. For example: - $ npm config set sign-git-tag true - $ npm version patch +```bash +$ npm config set sign-git-tag true +$ npm version patch - You need a passphrase to unlock the secret key for - user: "isaacs (http://blog.izs.me/) " - 2048-bit RSA key, ID 6C481CF6, created 2010-08-31 +You need a passphrase to unlock the secret key for +user: "isaacs (http://blog.izs.me/) " +2048-bit RSA key, ID 6C481CF6, created 2010-08-31 - Enter passphrase: +Enter passphrase: +``` If `preversion`, `version`, or `postversion` are in the `scripts` property of the package.json, they will be executed as part of running `npm version`. @@ -66,19 +79,21 @@ The exact order of execution is as follows: Take the following example: +```json "scripts": { "preversion": "npm test", "version": "npm run build && git add -A dist", "postversion": "git push && git push --tags && rm -rf build/temp" } +``` This runs all your tests, and proceeds only if they pass. Then runs your `build` script, and adds everything in the `dist` directory to the commit. After the commit, it pushes the new commit and tag up to the server, and deletes the `build/temp` directory. -## CONFIGURATION +### Configuration -### allow-same-version +#### allow-same-version * Default: false * Type: Boolean @@ -86,21 +101,21 @@ and tag up to the server, and deletes the `build/temp` directory. Prevents throwing an error when `npm version` is used to set the new version to the same value as the current version. -### git-tag-version +#### git-tag-version * Default: true * Type: Boolean Commit and tag the version change. -### commit-hooks +#### commit-hooks * Default: true * Type: Boolean Run git commit hooks when committing the version change. -### sign-git-tag +#### sign-git-tag * Default: false * Type: Boolean @@ -109,11 +124,11 @@ Pass the `-s` flag to git to sign the tag. Note that you must have a default GPG key set up in your git config for this to work properly. -## SEE ALSO +### See Also -* npm-init(1) -* npm-run-script(1) -* npm-scripts(7) -* package.json(5) -* semver(7) -* config(7) +* [npm init](/cli-commands/npm-init) +* [npm run-script](/cli-commands/npm-run-script) +* [npm scripts](/using-npm/scripts) +* [package.json](/configuring-npm/package-json) +* [semver](/using-npm/semver) +* [config](/using-npm/config) diff --git a/docs/content/cli-commands/npm-view.md b/docs/content/cli-commands/npm-view.md new file mode 100644 index 0000000000000..0c108e6f56615 --- /dev/null +++ b/docs/content/cli-commands/npm-view.md @@ -0,0 +1,124 @@ +--- +section: cli-commands +title: npm-view +description: View registry info +--- + +# npm-view(1) + +## View registry info + +### Synopsis + +```bash +npm view [<@scope>/][@] [[.]...] + +aliases: info, show, v +``` + +### Description + +This command shows data about a package and prints it to the stream +referenced by the `outfd` config, which defaults to stdout. + +To show the package registry entry for the `connect` package, you can do +this: + +```bash +npm view connect +``` + +The default version is "latest" if unspecified. + +Field names can be specified after the package descriptor. +For example, to show the dependencies of the `ronn` package at version +0.3.5, you could do the following: + +```bash +npm view ronn@0.3.5 dependencies +``` + +You can view child fields by separating them with a period. +To view the git repository URL for the latest version of npm, you could +do this: + +```bash +npm view npm repository.url +``` + +This makes it easy to view information about a dependency with a bit of +shell scripting. For example, to view all the data about the version of +opts that ronn depends on, you can do this: + +```bash +npm view opts@$(npm view ronn dependencies.opts) +``` + +For fields that are arrays, requesting a non-numeric field will return +all of the values from the objects in the list. For example, to get all +the contributor names for the "express" project, you can do this: + +```bash +npm view express contributors.email +``` + +You may also use numeric indices in square braces to specifically select +an item in an array field. To just get the email address of the first +contributor in the list, you can do this: + +```bash +npm view express contributors[0].email +``` + +Multiple fields may be specified, and will be printed one after another. +For example, to get all the contributor names and email addresses, you +can do this: + +```bash +npm view express contributors.name contributors.email +``` + +"Person" fields are shown as a string if they would be shown as an +object. So, for example, this will show the list of npm contributors in +the shortened string format. (See [`package.json`](/configuring-npm/package.json) for more on this.) + +```bash +npm view npm contributors +``` + +If a version range is provided, then data will be printed for every +matching version of the package. This will show which version of jsdom +was required by each matching version of yui3: + +```bash +npm view yui3@'>0.5.4' dependencies.jsdom +``` + +To show the `connect` package version history, you can do +this: + +```bash +npm view connect versions +``` + +### Output + +If only a single string field for a single version is output, then it +will not be colorized or quoted, so as to enable piping the output to +another command. If the field is an object, it will be output as a JavaScript object literal. + +If the --json flag is given, the outputted fields will be JSON. + +If the version range matches multiple versions, than each printed value +will be prefixed with the version it applies to. + +If multiple fields are requested, than each of them are prefixed with +the field name. + +### See Also + +* [npm search](/cli-commands/npm-search) +* [npm registry](/using-npm/registry) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) +* [npm docs](/cli-commands/npm-docs) diff --git a/docs/content/cli-commands/npm-whoami.md b/docs/content/cli-commands/npm-whoami.md new file mode 100644 index 0000000000000..b0eda4e46aa61 --- /dev/null +++ b/docs/content/cli-commands/npm-whoami.md @@ -0,0 +1,24 @@ +--- +section: cli-commands +title: npm-whoami +description: Display npm username +--- + +# npm-whoami(1) +## Display npm username + +### Synopsis + +```bash +npm whoami [--registry ] +``` + +### Description + +Print the `username` config to standard output. + +### See Also + +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) +* [npm adduser](/cli-commands/npm-adduser) diff --git a/doc/cli/npm.md b/docs/content/cli-commands/npm.md similarity index 83% rename from doc/cli/npm.md rename to docs/content/cli-commands/npm.md index 32384547bcb0b..01a9308204d19 100644 --- a/doc/cli/npm.md +++ b/docs/content/cli-commands/npm.md @@ -1,15 +1,24 @@ -npm(1) -- javascript package manager -==================================== +--- +section: cli-commands +title: npm +description: javascript package manager +--- -## SYNOPSIS +# npm(1) - npm [args] +## javascript package manager -## VERSION +### Synopsis + +```bash +npm [args] +``` + +### Version @VERSION@ -## DESCRIPTION +### Description npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency @@ -21,7 +30,7 @@ programs. Run `npm help` to get a list of available commands. -## IMPORTANT +### Important npm is configured to use npm, Inc.'s public registry at https://registry.npmjs.org by default. Use of the npm public registry is @@ -31,17 +40,17 @@ You can configure npm to use any compatible registry you like, and even run your own registry. Use of someone else's registry may be governed by their terms of use. -## INTRODUCTION +### Introduction You probably got npm because you want to install stuff. Use `npm install blerg` to install the latest version of "blerg". Check out -`npm-install(1)` for more info. It can do a lot of stuff. +[`npm install`](/cli-commands/npm-install) for more info. It can do a lot of stuff. Use the `npm search` command to show everything that's available. Use `npm ls` to show everything you've installed. -## DEPENDENCIES +### Dependencies If a package references to another package with a git URL, npm depends on a preinstalled git. @@ -57,9 +66,9 @@ For more information visit [the node-gyp repository](https://github.com/TooTallNate/node-gyp) and the [node-gyp Wiki](https://github.com/TooTallNate/node-gyp/wiki). -## DIRECTORIES +### Directories -See `npm-folders(5)` to learn about where npm puts stuff. +See [`folders`](/configuring-npm/folders) to learn about where npm puts stuff. In particular, npm has two modes of operation: @@ -74,13 +83,13 @@ In particular, npm has two modes of operation: Local mode is the default. Use `-g` or `--global` on any command to operate in global mode instead. -## DEVELOPER USAGE +### Developer Usage If you're using npm to develop and publish your code, check out the following help topics: * json: - Make a package.json file. See `package.json(5)`. + Make a package.json file. See [`package.json`](/configuring-npm/package.json). * link: For linking your current working code into Node's path, so that you don't have to reinstall every time you make a change. Use @@ -95,7 +104,7 @@ following help topics: * publish: Use the `npm publish` command to upload your code to the registry. -## CONFIGURATION +#### Configuration npm is extremely configurable. It reads its configuration options from 5 places. @@ -121,9 +130,9 @@ npm is extremely configurable. It reads its configuration options from npm's default configuration options are defined in lib/utils/config-defs.js. These must not be changed. -See `npm-config(7)` for much much more information. +See [`config`](/using-npm/config) for much much more information. -## CONTRIBUTIONS +### Contributions Patches welcome! @@ -131,10 +140,9 @@ If you would like to contribute, but don't know what to work on, read the contributing guidelines and check the issues list. * [CONTRIBUTING.md](https://github.com/npm/cli/blob/latest/CONTRIBUTING.md) -* [Bug tracker](https://npm.community/c/bugs) -* [Support tracker](https://npm.community/c/support) +* [Bug tracker](https://github.com/npm/cli/issues) -## BUGS +### Bugs When you find issues, please report them: @@ -146,20 +154,16 @@ for help in the [support forum](https://npm.community/c/support) if you're unsure if it's actually a bug or are having trouble coming up with a detailed reproduction to report. -## AUTHOR +### Author [Isaac Z. Schlueter](http://blog.izs.me/) :: [isaacs](https://github.com/isaacs/) :: [@izs](https://twitter.com/izs) :: -## SEE ALSO - -* npm-help(1) -* README -* package.json(5) -* npm-install(1) -* npm-config(1) -* npm-config(7) -* npmrc(5) -* npm-index(7) +### See Also +* [npm help](/cli-commands/npm-help) +* [package.json](/configuring-npm/package-json) +* [npm install](/cli-commands/npm-install) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) diff --git a/docs/content/configuring-npm/folders.md b/docs/content/configuring-npm/folders.md new file mode 100644 index 0000000000000..96a8f4783d719 --- /dev/null +++ b/docs/content/configuring-npm/folders.md @@ -0,0 +1,223 @@ +--- +section: configuring-npm +title: folders +description: Folder Structures Used by npm +--- + +# folders(5) + +## Folder Structures Used by npm + +### Description + +npm puts various things on your computer. That's its job. + +This document will tell you what it puts where. + +#### tl;dr + +* Local install (default): puts stuff in `./node_modules` of the current + package root. +* Global install (with `-g`): puts stuff in /usr/local or wherever node + is installed. +* Install it **locally** if you're going to `require()` it. +* Install it **globally** if you're going to run it on the command line. +* If you need both, then install it in both places, or use `npm link`. + +#### prefix Configuration + +The `prefix` config defaults to the location where node is installed. +On most systems, this is `/usr/local`. On Windows, it's `%AppData%\npm`. +On Unix systems, it's one level up, since node is typically installed at +`{prefix}/bin/node` rather than `{prefix}/node.exe`. + +When the `global` flag is set, npm installs things into this prefix. +When it is not set, it uses the root of the current package, or the +current working directory if not in a package already. + +#### Node Modules + +Packages are dropped into the `node_modules` folder under the `prefix`. +When installing locally, this means that you can +`require("packagename")` to load its main module, or +`require("packagename/lib/path/to/sub/module")` to load other modules. + +Global installs on Unix systems go to `{prefix}/lib/node_modules`. +Global installs on Windows go to `{prefix}/node_modules` (that is, no +`lib` folder.) + +Scoped packages are installed the same way, except they are grouped together +in a sub-folder of the relevant `node_modules` folder with the name of that +scope prefix by the @ symbol, e.g. `npm install @myorg/package` would place +the package in `{prefix}/node_modules/@myorg/package`. See [`scope`](/using-npm/scope) for more details. + +If you wish to `require()` a package, then install it locally. + +#### Executables + +When in global mode, executables are linked into `{prefix}/bin` on Unix, +or directly into `{prefix}` on Windows. + +When in local mode, executables are linked into +`./node_modules/.bin` so that they can be made available to scripts run +through npm. (For example, so that a test runner will be in the path +when you run `npm test`.) + +#### Man Pages + +When in global mode, man pages are linked into `{prefix}/share/man`. + +When in local mode, man pages are not installed. + +Man pages are not installed on Windows systems. + +#### Cache + +See [`npm cache`](/cli-commands/npm-cache). Cache files are stored in `~/.npm` on Posix, or +`%AppData%/npm-cache` on Windows. + +This is controlled by the `cache` configuration param. + +#### Temp Files + +Temporary files are stored by default in the folder specified by the +`tmp` config, which defaults to the TMPDIR, TMP, or TEMP environment +variables, or `/tmp` on Unix and `c:\windows\temp` on Windows. + +Temp files are given a unique folder under this root for each run of the +program, and are deleted upon successful exit. + +### More Information + +When installing locally, npm first tries to find an appropriate +`prefix` folder. This is so that `npm install foo@1.2.3` will install +to the sensible root of your package, even if you happen to have `cd`ed +into some other folder. + +Starting at the $PWD, npm will walk up the folder tree checking for a +folder that contains either a `package.json` file, or a `node_modules` +folder. If such a thing is found, then that is treated as the effective +"current directory" for the purpose of running npm commands. (This +behavior is inspired by and similar to git's .git-folder seeking +logic when running git commands in a working dir.) + +If no package root is found, then the current folder is used. + +When you run `npm install foo@1.2.3`, then the package is loaded into +the cache, and then unpacked into `./node_modules/foo`. Then, any of +foo's dependencies are similarly unpacked into +`./node_modules/foo/node_modules/...`. + +Any bin files are symlinked to `./node_modules/.bin/`, so that they may +be found by npm scripts when necessary. + +#### Global Installation + +If the `global` configuration is set to true, then npm will +install packages "globally". + +For global installation, packages are installed roughly the same way, +but using the folders described above. + +#### Cycles, Conflicts, and Folder Parsimony + +Cycles are handled using the property of node's module system that it +walks up the directories looking for `node_modules` folders. So, at every +stage, if a package is already installed in an ancestor `node_modules` +folder, then it is not installed at the current location. + +Consider the case above, where `foo -> bar -> baz`. Imagine if, in +addition to that, baz depended on bar, so you'd have: +`foo -> bar -> baz -> bar -> baz ...`. However, since the folder +structure is: `foo/node_modules/bar/node_modules/baz`, there's no need to +put another copy of bar into `.../baz/node_modules`, since when it calls +require("bar"), it will get the copy that is installed in +`foo/node_modules/bar`. + +This shortcut is only used if the exact same +version would be installed in multiple nested `node_modules` folders. It +is still possible to have `a/node_modules/b/node_modules/a` if the two +"a" packages are different versions. However, without repeating the +exact same package multiple times, an infinite regress will always be +prevented. + +Another optimization can be made by installing dependencies at the +highest level possible, below the localized "target" folder. + +#### Example + +Consider this dependency graph: + +```bash +foo ++-- blerg@1.2.5 ++-- bar@1.2.3 +| +-- blerg@1.x (latest=1.3.7) +| +-- baz@2.x +| | `-- quux@3.x +| | `-- bar@1.2.3 (cycle) +| `-- asdf@* +`-- baz@1.2.3 + `-- quux@3.x + `-- bar +``` + +In this case, we might expect a folder structure like this: + +```bash +foo ++-- node_modules + +-- blerg (1.2.5) <---[A] + +-- bar (1.2.3) <---[B] + | `-- node_modules + | +-- baz (2.0.2) <---[C] + | | `-- node_modules + | | `-- quux (3.2.0) + | `-- asdf (2.3.4) + `-- baz (1.2.3) <---[D] + `-- node_modules + `-- quux (3.2.0) <---[E] +``` + +Since foo depends directly on `bar@1.2.3` and `baz@1.2.3`, those are +installed in foo's `node_modules` folder. + +Even though the latest copy of blerg is 1.3.7, foo has a specific +dependency on version 1.2.5. So, that gets installed at [A]. Since the +parent installation of blerg satisfies bar's dependency on `blerg@1.x`, +it does not install another copy under [B]. + +Bar [B] also has dependencies on baz and asdf, so those are installed in +bar's `node_modules` folder. Because it depends on `baz@2.x`, it cannot +re-use the `baz@1.2.3` installed in the parent `node_modules` folder [D], +and must install its own copy [C]. + +Underneath bar, the `baz -> quux -> bar` dependency creates a cycle. +However, because bar is already in quux's ancestry [B], it does not +unpack another copy of bar into that folder. + +Underneath `foo -> baz` [D], quux's [E] folder tree is empty, because its +dependency on bar is satisfied by the parent folder copy installed at [B]. + +For a graphical breakdown of what is installed where, use `npm ls`. + +#### Publishing + +Upon publishing, npm will look in the `node_modules` folder. If any of +the items there are not in the `bundledDependencies` array, then they will +not be included in the package tarball. + +This allows a package maintainer to install all of their dependencies +(and dev dependencies) locally, but only re-publish those items that +cannot be found elsewhere. See [`package.json`](/configuring-npm/package.json) for more information. + +### See also + +* [package.json](/configuring-npm/package-json) +* [npm install](/cli-commands/npm-install) +* [npm pack](/cli-commands/npm-pack) +* [npm cache](/cli-commands/npm-cache) +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) +* [config](/using-npm/config) +* [npm publish](/cli-commands/npm-publish) diff --git a/docs/content/configuring-npm/install.md b/docs/content/configuring-npm/install.md new file mode 100644 index 0000000000000..31a316f8e0630 --- /dev/null +++ b/docs/content/configuring-npm/install.md @@ -0,0 +1,70 @@ +--- +section: configuring-npm +title: install +description: Download and install node and npm +--- + +# install(5) + +## Download and Install npm + +### Description + +To publish and install packages to and from the public npm registry, you must install Node.js and the npm command line interface using either a Node version manager or a Node installer. **We strongly recommend using a Node version manager to install Node.js and npm.** We do not recommend using a Node installer, since the Node installation process installs npm in a directory with local permissions and can cause permissions errors when you run npm packages globally. + +### Overview + +- [Checking your version of npm and Node.js](#checking-your-version-of-npm-and-node-js) +- [Using a Node version manager to install Node.js and npm](#using-a-node-version-manager-to-install-node-js-and-npm) +- [Using a Node installer to install Node.js and npm](#using-a-node-installer-to-install-node-js-and-npm) + +### Checking your version of npm and Node.js + +To see if you already have Node.js and npm installed and check the installed version, run the following commands: + +``` +node -v +npm -v +``` + +### Using a Node version manager to install Node.js and npm + +Node version managers allow you to install and switch between multiple versions of Node.js and npm on your system so you can test your applications on multiple versions of npm to ensure they work for users on different versions. + +#### OSX or Linux Node version managers + +* [nvm](https://github.com/creationix/nvm) +* [n](https://github.com/tj/n) + +#### Windows Node version managers + +* [nodist](https://github.com/marcelklehr/nodist) +* [nvm-windows](https://github.com/coreybutler/nvm-windows) + +### Using a Node installer to install Node.js and npm + +If you are unable to use a Node version manager, you can use a Node installer to install both Node.js and npm on your system. + +* [Node.js installer](https://nodejs.org/en/download/) +* [NodeSource installer](https://github.com/nodesource/distributions). If you use Linux, we recommend that you use a NodeSource installer. + +#### OS X or Windows Node installers + +If you're using OS X or Windows, use one of the installers from the [Node.js download page](https://nodejs.org/en/download/). Be sure to install the version labeled **LTS**. Other versions have not yet been tested with npm. + +#### Linux or other operating systems Node installers + +If you're using Linux or another operating system, use one of the following installers: + +- [NodeSource installer](https://github.com/nodesource/distributions) (recommended) +- One of the installers on the [Node.js download page](https://nodejs.org/en/download/) + +Or see [this page](https://nodejs.org/en/download/package-manager/) to install npm for Linux in the way many Linux developers prefer. + + +#### Less-common operating systems + +For more information on installing Node.js on a variety of operating systems, see [this page][pkg-mgr]. + + +[pkg-mgr]: https://nodejs.org/en/download/package-manager/ diff --git a/docs/content/configuring-npm/npmrc.md b/docs/content/configuring-npm/npmrc.md new file mode 100644 index 0000000000000..090ed5944fcce --- /dev/null +++ b/docs/content/configuring-npm/npmrc.md @@ -0,0 +1,103 @@ +--- +section: configuring-npm +title: npmrc +description: The npm config files +--- + +# npmrc(5) + +## The npm config files + +### Description + +npm gets its config settings from the command line, environment +variables, and `npmrc` files. + +The `npm config` command can be used to update and edit the contents +of the user and global npmrc files. + +For a list of available configuration options, see [config](/using-npm/config). + +### Files + +The four relevant files are: + +* per-project config file (/path/to/my/project/.npmrc) +* per-user config file (~/.npmrc) +* global config file ($PREFIX/etc/npmrc) +* npm builtin config file (/path/to/npm/npmrc) + +All npm config files are an ini-formatted list of `key = value` +parameters. Environment variables can be replaced using +`${VARIABLE_NAME}`. For example: + +```bash +prefix = ${HOME}/.npm-packages +``` + +Each of these files is loaded, and config options are resolved in +priority order. For example, a setting in the userconfig file would +override the setting in the globalconfig file. + +Array values are specified by adding "[]" after the key name. For +example: + +```bash +key[] = "first value" +key[] = "second value" +``` + +#### Comments + +Lines in `.npmrc` files are interpreted as comments when they begin with a `;` or `#` character. `.npmrc` files are parsed by [npm/ini](https://github.com/npm/ini), which specifies this comment syntax. + +For example: + +```bash +# last modified: 01 Jan 2016 +; Set a new registry for a scoped package +@myscope:registry=https://mycustomregistry.example.org +``` + +#### Per-project config file + +When working locally in a project, a `.npmrc` file in the root of the +project (ie, a sibling of `node_modules` and `package.json`) will set +config values specific to this project. + +Note that this only applies to the root of the project that you're +running npm in. It has no effect when your module is published. For +example, you can't publish a module that forces itself to install +globally, or in a different location. + +Additionally, this file is not read in global mode, such as when running +`npm install -g`. + +#### Per-user config file + +`$HOME/.npmrc` (or the `userconfig` param, if set in the environment +or on the command line) + +#### Global config file + +`$PREFIX/etc/npmrc` (or the `globalconfig` param, if set above): +This file is an ini-file formatted list of `key = value` parameters. +Environment variables can be replaced as above. + +#### Built-in config file + +`path/to/npm/itself/npmrc` + +This is an unchangeable "builtin" configuration file that npm keeps +consistent across updates. Set fields in here using the `./configure` +script that comes with npm. This is primarily for distribution +maintainers to override default configs in a standard and consistent +manner. + +### See also + +* [npm folders](/configuring-npm/folders) +* [npm config](/cli-commands/npm-config) +* [config](/using-npm/config) +* [package.json](/configuring-npm/package-json) +* [npm](/cli-commands/npm) diff --git a/docs/content/configuring-npm/package-json.md b/docs/content/configuring-npm/package-json.md new file mode 100644 index 0000000000000..9af70ea0ab2cc --- /dev/null +++ b/docs/content/configuring-npm/package-json.md @@ -0,0 +1,919 @@ +--- +section: configuring-npm +title: package.json +description: Specifics of npm's package.json handling +--- + +# package.json(5) + +## Specifics of npm's package.json handling + +### Description + +This document is all you need to know about what's required in your package.json +file. It must be actual JSON, not just a JavaScript object literal. + +A lot of the behavior described in this document is affected by the config +settings described in [`config`](/using-npm/config). + +### name + +If you plan to publish your package, the *most* important things in your +package.json are the name and version fields as they will be required. The name +and version together form an identifier that is assumed to be completely unique. +Changes to the package should come along with changes to the version. If you don't +plan to publish your package, the name and version fields are optional. + +The name is what your thing is called. + +Some rules: + +* The name must be less than or equal to 214 characters. This includes the scope for + scoped packages. +* The names of scoped packages can begin with a dot or an underscore. This is not permitted without a scope. +* New packages must not have uppercase letters in the name. +* The name ends up being part of a URL, an argument on the command line, and a + folder name. Therefore, the name can't contain any non-URL-safe characters. + +Some tips: + +* Don't use the same name as a core Node module. +* Don't put "js" or "node" in the name. It's assumed that it's js, since you're + writing a package.json file, and you can specify the engine using the "engines" + field. (See below.) +* The name will probably be passed as an argument to require(), so it should + be something short, but also reasonably descriptive. +* You may want to check the npm registry to see if there's something by that name + already, before you get too attached to it. + +A name can be optionally prefixed by a scope, e.g. `@myorg/mypackage`. See +[`scope`](/using-npm/scope) for more detail. + +### version + +If you plan to publish your package, the *most* important things in your +package.json are the name and version fields as they will be required. The name +and version together form an identifier that is assumed to be completely unique. +Changes to the package should come along with changes to the version. If you don't +plan to publish your package, the name and version fields are optional. + +Version must be parseable by +[node-semver](https://github.com/isaacs/node-semver), which is bundled +with npm as a dependency. (`npm install semver` to use it yourself.) + +More on version numbers and ranges at [semver](/using-npm/semver). + +### description + +Put a description in it. It's a string. This helps people discover your +package, as it's listed in `npm search`. + +### keywords + +Put keywords in it. It's an array of strings. This helps people +discover your package as it's listed in `npm search`. + +### homepage + +The url to the project homepage. + +Example: + +```json +"homepage": "https://github.com/owner/project#readme" +``` + +### bugs + +The url to your project's issue tracker and / or the email address to which +issues should be reported. These are helpful for people who encounter issues +with your package. + +It should look like this: + +```json +{ "url" : "https://github.com/owner/project/issues" +, "email" : "project@hostname.com" +} +``` + +You can specify either one or both values. If you want to provide only a url, +you can specify the value for "bugs" as a simple string instead of an object. + +If a url is provided, it will be used by the `npm bugs` command. + +### license + +You should specify a license for your package so that people know how they are +permitted to use it, and any restrictions you're placing on it. + +If you're using a common license such as BSD-2-Clause or MIT, add a +current SPDX license identifier for the license you're using, like this: + +```json +{ "license" : "BSD-3-Clause" } +``` + +You can check [the full list of SPDX license IDs](https://spdx.org/licenses/). +Ideally you should pick one that is +[OSI](https://opensource.org/licenses/alphabetical) approved. + +If your package is licensed under multiple common licenses, use an [SPDX license +expression syntax version 2.0 string](https://www.npmjs.com/package/spdx), like this: + +```json +{ "license" : "(ISC OR GPL-3.0)" } +``` +If you are using a license that hasn't been assigned an SPDX identifier, or if +you are using a custom license, use a string value like this one: + +```json +{ "license" : "SEE LICENSE IN " } +``` +Then include a file named `` at the top level of the package. + +Some old packages used license objects or a "licenses" property containing an +array of license objects: + +```json +// Not valid metadata +{ "license" : + { "type" : "ISC" + , "url" : "https://opensource.org/licenses/ISC" + } +} + +// Not valid metadata +{ "licenses" : + [ + { "type": "MIT" + , "url": "https://www.opensource.org/licenses/mit-license.php" + } + , { "type": "Apache-2.0" + , "url": "https://opensource.org/licenses/apache2.0.php" + } + ] +} +``` + +Those styles are now deprecated. Instead, use SPDX expressions, like this: + +```json +{ "license": "ISC" } + +{ "license": "(MIT OR Apache-2.0)" } +``` + +Finally, if you do not wish to grant others the right to use a private or +unpublished package under any terms: + +```json +{ "license": "UNLICENSED" } +``` +Consider also setting `"private": true` to prevent accidental publication. + +### people fields: author, contributors + +The "author" is one person. "contributors" is an array of people. A "person" +is an object with a "name" field and optionally "url" and "email", like this: + +```json +{ "name" : "Barney Rubble" +, "email" : "b@rubble.com" +, "url" : "http://barnyrubble.tumblr.com/" +} +``` + +Or you can shorten that all into a single string, and npm will parse it for you: + +```json +"Barney Rubble (http://barnyrubble.tumblr.com/)" +``` + +Both email and url are optional either way. + +npm also sets a top-level "maintainers" field with your npm user info. + +### funding + +You can specify an object containing an URL that provides up-to-date +information about ways to help fund development of your package, or +a string URL, or an array of these: + + "funding": { + "type" : "individual", + "url" : "http://example.com/donate" + } + + "funding": { + "type" : "patreon", + "url" : "https://www.patreon.com/my-account" + } + + "funding": "http://example.com/donate" + + "funding": [ + { + "type" : "individual", + "url" : "http://example.com/donate" + }, + "http://example.com/donateAlso", + { + "type" : "patreon", + "url" : "https://www.patreon.com/my-account" + } + ] + + +Users can use the `npm fund` subcommand to list the `funding` URLs of all +dependencies of their project, direct and indirect. A shortcut to visit each +funding url is also available when providing the project name such as: +`npm fund ` (when there are multiple URLs, the first one will be +visited) + +### files + +The optional `files` field is an array of file patterns that describes +the entries to be included when your package is installed as a +dependency. File patterns follow a similar syntax to `.gitignore`, but +reversed: including a file, directory, or glob pattern (`*`, `**/*`, and such) +will make it so that file is included in the tarball when it's packed. Omitting +the field will make it default to `["*"]`, which means it will include all files. + +Some special files and directories are also included or excluded regardless of +whether they exist in the `files` array (see below). + +You can also provide a `.npmignore` file in the root of your package or +in subdirectories, which will keep files from being included. At the +root of your package it will not override the "files" field, but in +subdirectories it will. The `.npmignore` file works just like a +`.gitignore`. If there is a `.gitignore` file, and `.npmignore` is +missing, `.gitignore`'s contents will be used instead. + +Files included with the "package.json#files" field _cannot_ be excluded +through `.npmignore` or `.gitignore`. + +Certain files are always included, regardless of settings: + +* `package.json` +* `README` +* `CHANGES` / `CHANGELOG` / `HISTORY` +* `LICENSE` / `LICENCE` +* `NOTICE` +* The file in the "main" field + +`README`, `CHANGES`, `LICENSE` & `NOTICE` can have any case and extension. + +Conversely, some files are always ignored: + +* `.git` +* `CVS` +* `.svn` +* `.hg` +* `.lock-wscript` +* `.wafpickle-N` +* `.*.swp` +* `.DS_Store` +* `._*` +* `npm-debug.log` +* `.npmrc` +* `node_modules` +* `config.gypi` +* `*.orig` +* `package-lock.json` (use shrinkwrap instead) + +### main + +The main field is a module ID that is the primary entry point to your program. +That is, if your package is named `foo`, and a user installs it, and then does +`require("foo")`, then your main module's exports object will be returned. + +This should be a module ID relative to the root of your package folder. + +For most modules, it makes the most sense to have a main script and often not +much else. + +### browser + +If your module is meant to be used client-side the browser field should be +used instead of the main field. This is helpful to hint users that it might +rely on primitives that aren't available in Node.js modules. (e.g. `window`) + +### bin + +A lot of packages have one or more executable files that they'd like to +install into the PATH. npm makes this pretty easy (in fact, it uses this +feature to install the "npm" executable.) + +To use this, supply a `bin` field in your package.json which is a map of +command name to local file name. On install, npm will symlink that file into +`prefix/bin` for global installs, or `./node_modules/.bin/` for local +installs. + + +For example, myapp could have this: + +```json +{ "bin" : { "myapp" : "./cli.js" } } +``` + +So, when you install myapp, it'll create a symlink from the `cli.js` script to +`/usr/local/bin/myapp`. + +If you have a single executable, and its name should be the name +of the package, then you can just supply it as a string. For example: + +```json +{ "name": "my-program" +, "version": "1.2.5" +, "bin": "./path/to/program" } +``` + +would be the same as this: + +```json +{ "name": "my-program" +, "version": "1.2.5" +, "bin" : { "my-program" : "./path/to/program" } } +``` + +Please make sure that your file(s) referenced in `bin` starts with +`#!/usr/bin/env node`, otherwise the scripts are started without the node +executable! + +### man + +Specify either a single file or an array of filenames to put in place for the +`man` program to find. + +If only a single file is provided, then it's installed such that it is the +result from `man `, regardless of its actual filename. For example: + +```json +{ "name" : "foo" +, "version" : "1.2.3" +, "description" : "A packaged foo fooer for fooing foos" +, "main" : "foo.js" +, "man" : "./man/doc.1" +} +``` + +would link the `./man/doc.1` file in such that it is the target for `man foo` + +If the filename doesn't start with the package name, then it's prefixed. +So, this: + +```json +{ "name" : "foo" +, "version" : "1.2.3" +, "description" : "A packaged foo fooer for fooing foos" +, "main" : "foo.js" +, "man" : [ "./man/foo.1", "./man/bar.1" ] +} +``` + +will create files to do `man foo` and `man foo-bar`. + +Man files must end with a number, and optionally a `.gz` suffix if they are +compressed. The number dictates which man section the file is installed into. + +```json +{ "name" : "foo" +, "version" : "1.2.3" +, "description" : "A packaged foo fooer for fooing foos" +, "main" : "foo.js" +, "man" : [ "./man/foo.1", "./man/foo.2" ] +} +``` +will create entries for `man foo` and `man 2 foo` + +### directories + +The CommonJS [Packages](http://wiki.commonjs.org/wiki/Packages/1.0) spec details a +few ways that you can indicate the structure of your package using a `directories` +object. If you look at [npm's package.json](https://registry.npmjs.org/npm/latest), +you'll see that it has directories for doc, lib, and man. + +In the future, this information may be used in other creative ways. + +#### directories.lib + +Tell people where the bulk of your library is. Nothing special is done +with the lib folder in any way, but it's useful meta info. + +#### directories.bin + +If you specify a `bin` directory in `directories.bin`, all the files in +that folder will be added. + +Because of the way the `bin` directive works, specifying both a +`bin` path and setting `directories.bin` is an error. If you want to +specify individual files, use `bin`, and for all the files in an +existing `bin` directory, use `directories.bin`. + +#### directories.man + +A folder that is full of man pages. Sugar to generate a "man" array by +walking the folder. + +#### directories.doc + +Put markdown files in here. Eventually, these will be displayed nicely, +maybe, someday. + +#### directories.example + +Put example scripts in here. Someday, it might be exposed in some clever way. + +#### directories.test + +Put your tests in here. It is currently not exposed, but it might be in the +future. + +### repository + +Specify the place where your code lives. This is helpful for people who +want to contribute. If the git repo is on GitHub, then the `npm docs` +command will be able to find you. + +Do it like this: + +```json +"repository": { + "type" : "git", + "url" : "https://github.com/npm/cli.git" +} + +"repository": { + "type" : "svn", + "url" : "https://v8.googlecode.com/svn/trunk/" +} +``` + +The URL should be a publicly available (perhaps read-only) url that can be handed +directly to a VCS program without any modification. It should not be a url to an +html project page that you put in your browser. It's for computers. + +For GitHub, GitHub gist, Bitbucket, or GitLab repositories you can use the same +shortcut syntax you use for `npm install`: + +```json +"repository": "npm/npm" + +"repository": "github:user/repo" + +"repository": "gist:11081aaa281" + +"repository": "bitbucket:user/repo" + +"repository": "gitlab:user/repo" +``` + +If the `package.json` for your package is not in the root directory (for example +if it is part of a monorepo), you can specify the directory in which it lives: + +```json +"repository": { + "type" : "git", + "url" : "https://github.com/facebook/react.git", + "directory": "packages/react-dom" +} +``` + +### scripts + +The "scripts" property is a dictionary containing script commands that are run +at various times in the lifecycle of your package. The key is the lifecycle +event, and the value is the command to run at that point. + +See [`scripts`](/using-npm/scripts) to find out more about writing package scripts. + +### config + +A "config" object can be used to set configuration parameters used in package +scripts that persist across upgrades. For instance, if a package had the +following: + +```json +{ "name" : "foo" +, "config" : { "port" : "8080" } } +``` + +and then had a "start" command that then referenced the +`npm_package_config_port` environment variable, then the user could +override that by doing `npm config set foo:port 8001`. + +See [`config`](/using-npm/config) and [`scripts`](/using-npm/scripts) for more on package +configs. + +### dependencies + +Dependencies are specified in a simple object that maps a package name to a +version range. The version range is a string which has one or more +space-separated descriptors. Dependencies can also be identified with a +tarball or git URL. + +**Please do not put test harnesses or transpilers in your +`dependencies` object.** See `devDependencies`, below. + +See [semver](/using-npm/semver) for more details about specifying version ranges. + +* `version` Must match `version` exactly +* `>version` Must be greater than `version` +* `>=version` etc +* `=version1 <=version2`. +* `range1 || range2` Passes if either range1 or range2 are satisfied. +* `git...` See 'Git URLs as Dependencies' below +* `user/repo` See 'GitHub URLs' below +* `tag` A specific version tagged and published as `tag` See [`npm dist-tag`](/cli-commands/npm-dist-tag) +* `path/path/path` See [Local Paths](#local-paths) below + +For example, these are all valid: + +```json +{ "dependencies" : + { "foo" : "1.0.0 - 2.9999.9999" + , "bar" : ">=1.0.2 <2.1.2" + , "baz" : ">1.0.2 <=2.3.4" + , "boo" : "2.0.1" + , "qux" : "<1.0.0 || >=2.3.1 <2.4.5 || >=2.5.2 <3.0.0" + , "asd" : "http://asdf.com/asdf.tar.gz" + , "til" : "~1.2" + , "elf" : "~1.2.3" + , "two" : "2.x" + , "thr" : "3.3.x" + , "lat" : "latest" + , "dyl" : "file:../dyl" + } +} +``` + +#### URLs as Dependencies + +You may specify a tarball URL in place of a version range. + +This tarball will be downloaded and installed locally to your package at +install time. + +#### Git URLs as Dependencies + +Git urls are of the form: + +```bash +://[[:]@][:][:][/][# | #semver:] +``` + +`` is one of `git`, `git+ssh`, `git+http`, `git+https`, or +`git+file`. + +If `#` is provided, it will be used to clone exactly that +commit. If the commit-ish has the format `#semver:`, `` can +be any valid semver range or exact version, and npm will look for any tags +or refs matching that range in the remote repository, much as it would for a +registry dependency. If neither `#` or `#semver:` is +specified, then `master` is used. + +Examples: + +```bash +git+ssh://git@github.com:npm/cli.git#v1.0.27 +git+ssh://git@github.com:npm/cli#semver:^5.0 +git+https://isaacs@github.com/npm/cli.git +git://github.com/npm/cli.git#v1.0.27 +``` + +#### GitHub URLs + +As of version 1.1.65, you can refer to GitHub urls as just "foo": +"user/foo-project". Just as with git URLs, a `commit-ish` suffix can be +included. For example: + +```json +{ + "name": "foo", + "version": "0.0.0", + "dependencies": { + "express": "expressjs/express", + "mocha": "mochajs/mocha#4727d357ea", + "module": "user/repo#feature\/branch" + } +} +``` + +#### Local Paths + +As of version 2.0.0 you can provide a path to a local directory that contains a +package. Local paths can be saved using `npm install -S` or +`npm install --save`, using any of these forms: + +```bash +../foo/bar +~/foo/bar +./foo/bar +/foo/bar +``` + +in which case they will be normalized to a relative path and added to your +`package.json`. For example: + +```json +{ + "name": "baz", + "dependencies": { + "bar": "file:../foo/bar" + } +} +``` + +This feature is helpful for local offline development and creating +tests that require npm installing where you don't want to hit an +external server, but should not be used when publishing packages +to the public registry. + +### devDependencies + +If someone is planning on downloading and using your module in their +program, then they probably don't want or need to download and build +the external test or documentation framework that you use. + +In this case, it's best to map these additional items in a `devDependencies` +object. + +These things will be installed when doing `npm link` or `npm install` +from the root of a package, and can be managed like any other npm +configuration param. See [`config`](/using-npm/config) for more on the topic. + +For build steps that are not platform-specific, such as compiling +CoffeeScript or other languages to JavaScript, use the `prepare` +script to do this, and make the required package a devDependency. + +For example: + +```json +{ "name": "ethopia-waza", + "description": "a delightfully fruity coffee varietal", + "version": "1.2.3", + "devDependencies": { + "coffee-script": "~1.6.3" + }, + "scripts": { + "prepare": "coffee -o lib/ -c src/waza.coffee" + }, + "main": "lib/waza.js" +} +``` + +The `prepare` script will be run before publishing, so that users +can consume the functionality without requiring them to compile it +themselves. In dev mode (ie, locally running `npm install`), it'll +run this script as well, so that you can test it easily. + +### peerDependencies + +In some cases, you want to express the compatibility of your package with a +host tool or library, while not necessarily doing a `require` of this host. +This is usually referred to as a *plugin*. Notably, your module may be exposing +a specific interface, expected and specified by the host documentation. + +For example: + +```json +{ + "name": "tea-latte", + "version": "1.3.5", + "peerDependencies": { + "tea": "2.x" + } +} +``` + +This ensures your package `tea-latte` can be installed *along* with the second +major version of the host package `tea` only. `npm install tea-latte` could +possibly yield the following dependency graph: + +```bash +├── tea-latte@1.3.5 +└── tea@2.2.0 +``` + +**NOTE: npm versions 1 and 2 will automatically install `peerDependencies` if +they are not explicitly depended upon higher in the dependency tree. In the +next major version of npm (npm@3), this will no longer be the case. You will +receive a warning that the peerDependency is not installed instead.** The +behavior in npms 1 & 2 was frequently confusing and could easily put you into +dependency hell, a situation that npm is designed to avoid as much as possible. + +Trying to install another plugin with a conflicting requirement will cause an +error. For this reason, make sure your plugin requirement is as broad as +possible, and not to lock it down to specific patch versions. + +Assuming the host complies with [semver](https://semver.org/), only changes in +the host package's major version will break your plugin. Thus, if you've worked +with every 1.x version of the host package, use `"^1.0"` or `"1.x"` to express +this. If you depend on features introduced in 1.5.2, use `">= 1.5.2 < 2"`. + +### bundledDependencies + +This defines an array of package names that will be bundled when publishing +the package. + +In cases where you need to preserve npm packages locally or have them +available through a single file download, you can bundle the packages in a +tarball file by specifying the package names in the `bundledDependencies` +array and executing `npm pack`. + +For example: + +If we define a package.json like this: + +```json +{ + "name": "awesome-web-framework", + "version": "1.0.0", + "bundledDependencies": [ + "renderized", "super-streams" + ] +} +``` +we can obtain `awesome-web-framework-1.0.0.tgz` file by running `npm pack`. +This file contains the dependencies `renderized` and `super-streams` which +can be installed in a new project by executing `npm install +awesome-web-framework-1.0.0.tgz`. Note that the package names do not include +any versions, as that information is specified in `dependencies`. + +If this is spelled `"bundleDependencies"`, then that is also honored. + +### optionalDependencies + +If a dependency can be used, but you would like npm to proceed if it cannot be +found or fails to install, then you may put it in the `optionalDependencies` +object. This is a map of package name to version or url, just like the +`dependencies` object. The difference is that build failures do not cause +installation to fail. Running `npm install --no-optional` will prevent these +dependencies from being installed. + +It is still your program's responsibility to handle the lack of the +dependency. For example, something like this: + +```js +try { + var foo = require('foo') + var fooVersion = require('foo/package.json').version +} catch (er) { + foo = null +} +if ( notGoodFooVersion(fooVersion) ) { + foo = null +} + +// .. then later in your program .. + +if (foo) { + foo.doFooThings() +} +``` + +Entries in `optionalDependencies` will override entries of the same name in +`dependencies`, so it's usually best to only put in one place. + +### engines + +You can specify the version of node that your stuff works on: + +```json +{ "engines" : { "node" : ">=0.10.3 <0.12" } } +``` + +And, like with dependencies, if you don't specify the version (or if you +specify "\*" as the version), then any version of node will do. + +If you specify an "engines" field, then npm will require that "node" be +somewhere on that list. If "engines" is omitted, then npm will just assume +that it works on node. + +You can also use the "engines" field to specify which versions of npm +are capable of properly installing your program. For example: + +```json +{ "engines" : { "npm" : "~1.0.20" } } +``` + +Unless the user has set the `engine-strict` config flag, this +field is advisory only and will only produce warnings when your package is installed as a dependency. + +### engineStrict + +**This feature was removed in npm 3.0.0** + +Prior to npm 3.0.0, this feature was used to treat this package as if the +user had set `engine-strict`. It is no longer used. + +### os + +You can specify which operating systems your +module will run on: + +```json +"os" : [ "darwin", "linux" ] +``` + +You can also blacklist instead of whitelist operating systems, +just prepend the blacklisted os with a '!': + +```json +"os" : [ "!win32" ] +``` + +The host operating system is determined by `process.platform` + +It is allowed to both blacklist, and whitelist, although there isn't any +good reason to do this. + +### cpu + +If your code only runs on certain cpu architectures, +you can specify which ones. + +```json +"cpu" : [ "x64", "ia32" ] +``` + +Like the `os` option, you can also blacklist architectures: + +```json +"cpu" : [ "!arm", "!mips" ] +``` + +The host architecture is determined by `process.arch` + +### preferGlobal + +**DEPRECATED** + +This option used to trigger an npm warning, but it will no longer warn. It is +purely there for informational purposes. It is now recommended that you install +any binaries as local devDependencies wherever possible. + +### private + +If you set `"private": true` in your package.json, then npm will refuse +to publish it. + +This is a way to prevent accidental publication of private repositories. If +you would like to ensure that a given package is only ever published to a +specific registry (for example, an internal registry), then use the +`publishConfig` dictionary described below to override the `registry` config +param at publish-time. + +### publishConfig + +This is a set of config values that will be used at publish-time. It's +especially handy if you want to set the tag, registry or access, so that +you can ensure that a given package is not tagged with "latest", published +to the global public registry or that a scoped module is private by default. + +Any config values can be overridden, but only "tag", "registry" and "access" +probably matter for the purposes of publishing. + +See [`config`](/using-npm/config) to see the list of config options that can be +overridden. + +### DEFAULT VALUES + +npm will default some values based on package contents. + +* `"scripts": {"start": "node server.js"}` + + If there is a `server.js` file in the root of your package, then npm + will default the `start` command to `node server.js`. + +* `"scripts":{"install": "node-gyp rebuild"}` + + If there is a `binding.gyp` file in the root of your package and you have not defined an `install` or `preinstall` script, npm will + default the `install` command to compile using node-gyp. + +* `"contributors": [...]` + + If there is an `AUTHORS` file in the root of your package, npm will + treat each line as a `Name (url)` format, where email and url + are optional. Lines which start with a `#` or are blank, will be + ignored. + +### SEE ALSO + +* [semver](/using-npm/semver) +* [npm init](/cli-commands/npm-init) +* [npm version](/cli-commands/npm-version) +* [npm config](/cli-commands/npm-config) +* [npm help](/cli-commands/npm-help) +* [npm install](/cli-commands/npm-install) +* [npm publish](/cli-commands/npm-publish) +* [npm uninstall](/cli-commands/npm-uninstall) diff --git a/docs/content/configuring-npm/package-lock-json.md b/docs/content/configuring-npm/package-lock-json.md new file mode 100644 index 0000000000000..9f3ca4683defe --- /dev/null +++ b/docs/content/configuring-npm/package-lock-json.md @@ -0,0 +1,149 @@ +--- +section: configuring-npm +title: package-lock.json +description: A manifestation of the manifest +--- + +# package-lock.json(5) + +## A manifestation of the manifest + +### Description + +`package-lock.json` is automatically generated for any operations where npm +modifies either the `node_modules` tree, or `package.json`. It describes the +exact tree that was generated, such that subsequent installs are able to +generate identical trees, regardless of intermediate dependency updates. + +This file is intended to be committed into source repositories, and serves +various purposes: + +* Describe a single representation of a dependency tree such that teammates, deployments, and continuous integration are guaranteed to install exactly the same dependencies. + +* Provide a facility for users to "time-travel" to previous states of `node_modules` without having to commit the directory itself. + +* To facilitate greater visibility of tree changes through readable source control diffs. + +* And optimize the installation process by allowing npm to skip repeated metadata resolutions for previously-installed packages. + +One key detail about `package-lock.json` is that it cannot be published, and it +will be ignored if found in any place other than the toplevel package. It shares +a format with [npm-shrinkwrap.json](/configuring-npm/shrinkwrap-json), which is essentially the same file, but +allows publication. This is not recommended unless deploying a CLI tool or +otherwise using the publication process for producing production packages. + +If both `package-lock.json` and `npm-shrinkwrap.json` are present in the root of +a package, `package-lock.json` will be completely ignored. + + +### File Format + +#### name + +The name of the package this is a package-lock for. This must match what's in +`package.json`. + +#### version + +The version of the package this is a package-lock for. This must match what's in +`package.json`. + +#### lockfileVersion + +An integer version, starting at `1` with the version number of this document +whose semantics were used when generating this `package-lock.json`. + +#### packageIntegrity + +This is a [subresource +integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) value +created from the `package.json`. No preprocessing of the `package.json` should +be done. Subresource integrity strings can be produced by modules like +[`ssri`](https://www.npmjs.com/package/ssri). + +#### preserveSymlinks + +Indicates that the install was done with the environment variable +`NODE_PRESERVE_SYMLINKS` enabled. The installer should insist that the value of +this property match that environment variable. + +#### dependencies + +A mapping of package name to dependency object. Dependency objects have the +following properties: + +##### version + +This is a specifier that uniquely identifies this package and should be +usable in fetching a new copy of it. + +* bundled dependencies: Regardless of source, this is a version number that is purely for informational purposes. +* registry sources: This is a version number. (eg, `1.2.3`) +* git sources: This is a git specifier with resolved committish. (eg, `git+https://example.com/foo/bar#115311855adb0789a0466714ed48a1499ffea97e`) +* http tarball sources: This is the URL of the tarball. (eg, `https://example.com/example-1.3.0.tgz`) +* local tarball sources: This is the file URL of the tarball. (eg `file:///opt/storage/example-1.3.0.tgz`) +* local link sources: This is the file URL of the link. (eg `file:libs/our-module`) + +##### integrity + +This is a [Standard Subresource +Integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) for this +resource. + +* For bundled dependencies this is not included, regardless of source. +* For registry sources, this is the `integrity` that the registry provided, or if one wasn't provided the SHA1 in `shasum`. +* For git sources this is the specific commit hash we cloned from. +* For remote tarball sources this is an integrity based on a SHA512 of + the file. +* For local tarball sources: This is an integrity field based on the SHA512 of the file. + +##### resolved + +* For bundled dependencies this is not included, regardless of source. +* For registry sources this is path of the tarball relative to the registry + URL. If the tarball URL isn't on the same server as the registry URL then + this is a complete URL. + +##### bundled + +If true, this is the bundled dependency and will be installed by the parent +module. When installing, this module will be extracted from the parent +module during the extract phase, not installed as a separate dependency. + +##### dev + +If true then this dependency is either a development dependency ONLY of the +top level module or a transitive dependency of one. This is false for +dependencies that are both a development dependency of the top level and a +transitive dependency of a non-development dependency of the top level. + +##### optional + +If true then this dependency is either an optional dependency ONLY of the +top level module or a transitive dependency of one. This is false for +dependencies that are both an optional dependency of the top level and a +transitive dependency of a non-optional dependency of the top level. + +All optional dependencies should be included even if they're uninstallable +on the current platform. + + +##### requires + +This is a mapping of module name to version. This is a list of everything +this module requires, regardless of where it will be installed. The version +should match via normal matching rules a dependency either in our +`dependencies` or in a level higher than us. + + +##### dependencies + +The dependencies of this dependency, exactly as at the top level. + +### See also + +* [npm shrinkwrap](/cli-commands/npm-shrinkwrap) +* [shrinkwrap.json](/configuring-npm/shrinkwrap-json) +* [package-locks](/configuring-npm/package-locks) +* [package.json](/configuring-npm/package-json) +* [npm install](/cli-commands/npm-install) diff --git a/docs/content/configuring-npm/package-locks.md b/docs/content/configuring-npm/package-locks.md new file mode 100644 index 0000000000000..de65e3c634f07 --- /dev/null +++ b/docs/content/configuring-npm/package-locks.md @@ -0,0 +1,182 @@ +--- +section: configuring-npm +title: package-locks +description: An explanation of npm lockfiles +--- + +# package-locks(5) + +## An explanation of npm lockfiles + +### Description + +Conceptually, the "input" to [`npm install`](/cli-commands/npm-install) is a [package.json](/configuring-npm/package-json), while its +"output" is a fully-formed `node_modules` tree: a representation of the +dependencies you declared. In an ideal world, npm would work like a pure +function: the same `package.json` should produce the exact same `node_modules` +tree, any time. In some cases, this is indeed true. But in many others, npm is +unable to do this. There are multiple reasons for this: + +* different versions of npm (or other package managers) may have been used to install a package, each using slightly different installation algorithms. + +* a new version of a direct semver-range package may have been published since the last time your packages were installed, and thus a newer version will be used. + +* A dependency of one of your dependencies may have published a new version, which will update even if you used pinned dependency specifiers (`1.2.3` instead of `^1.2.3`) + +* The registry you installed from is no longer available, or allows mutation of versions (unlike the primary npm registry), and a different version of a package exists under the same version number now. + +As an example, consider package A: + +```json +{ + "name": "A", + "version": "0.1.0", + "dependencies": { + "B": "<0.1.0" + } +} +``` + +package B: + +```json +{ + "name": "B", + "version": "0.0.1", + "dependencies": { + "C": "<0.1.0" + } +} +``` + +and package C: +```json +{ + "name": "C", + "version": "0.0.1" +} +``` + +If these are the only versions of A, B, and C available in the +registry, then a normal `npm install A` will install: + +```json +A@0.1.0 +`-- B@0.0.1 + `-- C@0.0.1 +``` + +However, if B@0.0.2 is published, then a fresh `npm install A` will +install: + +```bash +A@0.1.0 +`-- B@0.0.2 + `-- C@0.0.1 +``` + +assuming the new version did not modify B's dependencies. Of course, +the new version of B could include a new version of C and any number +of new dependencies. If such changes are undesirable, the author of A +could specify a dependency on B@0.0.1. However, if A's author and B's +author are not the same person, there's no way for A's author to say +that he or she does not want to pull in newly published versions of C +when B hasn't changed at all. + +To prevent this potential issue, npm uses [package-lock.json](/configuring-npm/package-lock-json) or, if present, [npm-shrinkwrap.json](/configuring-npm/shrinkwrap-json). These files are called package locks, or lockfiles. + +Whenever you run `npm install`, npm generates or updates your package lock, +which will look something like this: + +```json +{ + "name": "A", + "version": "0.1.0", + ...metadata fields... + "dependencies": { + "B": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/B/-/B-0.0.1.tgz", + "integrity": "sha512-DeAdb33F+" + "dependencies": { + "C": { + "version": "git://github.com/org/C.git#5c380ae319fc4efe9e7f2d9c78b0faa588fd99b4" + } + } + } + } +} +``` + +This file describes an *exact*, and more importantly *reproducible* +`node_modules` tree. Once it's present, any future installation will base its +work off this file, instead of recalculating dependency versions off +[package.json](/configuring-npm/package-json). + +The presence of a package lock changes the installation behavior such that: + +1. The module tree described by the package lock is reproduced. This means +reproducing the structure described in the file, using the specific files +referenced in "resolved" if available, falling back to normal package resolution +using "version" if one isn't. + +2. The tree is walked and any missing dependencies are installed in the usual +fashion. + +If `preshrinkwrap`, `shrinkwrap` or `postshrinkwrap` are in the `scripts` +property of the `package.json`, they will be executed in order. `preshrinkwrap` +and `shrinkwrap` are executed before the shrinkwrap, `postshrinkwrap` is +executed afterwards. These scripts run for both `package-lock.json` and +`npm-shrinkwrap.json`. For example to run some postprocessing on the generated +file: + +```json + "scripts": { + "postshrinkwrap": "json -I -e \"this.myMetadata = $MY_APP_METADATA\"" + } +``` + +#### Using locked packages + +Using a locked package is no different than using any package without a package +lock: any commands that update `node_modules` and/or `package.json`'s +dependencies will automatically sync the existing lockfile. This includes `npm +install`, `npm rm`, `npm update`, etc. To prevent this update from happening, +you can use the `--no-save` option to prevent saving altogether, or +`--no-shrinkwrap` to allow `package.json` to be updated while leaving +`package-lock.json` or `npm-shrinkwrap.json` intact. + +It is highly recommended you commit the generated package lock to source +control: this will allow anyone else on your team, your deployments, your +CI/continuous integration, and anyone else who runs `npm install` in your +package source to get the exact same dependency tree that you were developing +on. Additionally, the diffs from these changes are human-readable and will +inform you of any changes npm has made to your `node_modules`, so you can notice +if any transitive dependencies were updated, hoisted, etc. + +#### Resolving lockfile conflicts + +Occasionally, two separate npm install will create package locks that cause +merge conflicts in source control systems. As of `npm@5.7.0`, these conflicts +can be resolved by manually fixing any `package.json` conflicts, and then +running `npm install [--package-lock-only]` again. npm will automatically +resolve any conflicts for you and write a merged package lock that includes all +the dependencies from both branches in a reasonable tree. If +`--package-lock-only` is provided, it will do this without also modifying your +local `node_modules/`. + +To make this process seamless on git, consider installing +[`npm-merge-driver`](https://npm.im/npm-merge-driver), which will teach git how +to do this itself without any user interaction. In short: `$ npx +npm-merge-driver install -g` will let you do this, and even works with +pre-`npm@5.7.0` versions of npm 5, albeit a bit more noisily. Note that if +`package.json` itself conflicts, you will have to resolve that by hand and run +`npm install` manually, even with the merge driver. + +### See Also + +* https://medium.com/@sdboyer/so-you-want-to-write-a-package-manager-4ae9c17d9527 +* [package.json](/configuring-npm/package-json) +* [package-lock.json](/configuring-npm/package-lock-json) +* [shrinkwrap.json](/configuring-npm/shrinkwrap-json) +* [npm shrinkwrap](/cli-commands/npm-shrinkwrap) diff --git a/docs/content/configuring-npm/shrinkwrap-json.md b/docs/content/configuring-npm/shrinkwrap-json.md new file mode 100644 index 0000000000000..bc5e061d55b09 --- /dev/null +++ b/docs/content/configuring-npm/shrinkwrap-json.md @@ -0,0 +1,34 @@ +--- +section: configuring-npm +title: shrinkwrap.json +description: A publishable lockfile +--- + +# npm-shrinkwrap.json(5) + +## A publishable lockfile + +### Description + +`npm-shrinkwrap.json` is a file created by [`npm shrinkwrap`](/cli-commands/npm-shrinkwrap). It is identical to +`package-lock.json`, with one major caveat: Unlike `package-lock.json`, +`npm-shrinkwrap.json` may be included when publishing a package. + +The recommended use-case for `npm-shrinkwrap.json` is applications deployed +through the publishing process on the registry: for example, daemons and +command-line tools intended as global installs or `devDependencies`. It's +strongly discouraged for library authors to publish this file, since that would +prevent end users from having control over transitive dependency updates. + +Additionally, if both `package-lock.json` and `npm-shrinkwrap.json` are present +in a package root, `package-lock.json` will be ignored in favor of this file. + +For full details and description of the `npm-shrinkwrap.json` file format, refer +to the manual page for [package-lock.json](/configuring-npm/package-lock-json). + +### See also + +* [npm shrinkwrap](/cli-commands/npm-shrinkwrap) +* [package-lock.json](/configuring-npm/package-lock-json) +* [package.json](/configuring-npm/package-json) +* [npm install](/cli-commands/npm-install) diff --git a/docs/content/using-npm/config.md b/docs/content/using-npm/config.md new file mode 100644 index 0000000000000..a6947b17d58d1 --- /dev/null +++ b/docs/content/using-npm/config.md @@ -0,0 +1,1300 @@ +--- +section: using-npm +title: config +description: More than you probably want to know about npm configuration +--- + +# config(7) + +## More than you probably want to know about npm configuration + +### Description + +npm gets its configuration values from the following sources, sorted by priority: + +#### Command Line Flags + +Putting `--foo bar` on the command line sets the `foo` configuration +parameter to `"bar"`. A `--` argument tells the cli parser to stop +reading flags. Using `--flag` without specifying any value will set +the value to `true`. + +Example: `--flag1 --flag2` will set both configuration parameters +to `true`, while `--flag1 --flag2 bar` will set `flag1` to `true`, +and `flag2` to `bar`. Finally, `--flag1 --flag2 -- bar` will set +both configuration parameters to `true`, and the `bar` is taken +as a command argument. + +#### Environment Variables + +Any environment variables that start with `npm_config_` will be +interpreted as a configuration parameter. For example, putting +`npm_config_foo=bar` in your environment will set the `foo` +configuration parameter to `bar`. Any environment configurations that +are not given a value will be given the value of `true`. Config +values are case-insensitive, so `NPM_CONFIG_FOO=bar` will work the +same. However, please note that inside [`scripts`](/using-npm/scripts) +npm will set its own environment variables and Node will prefer +those lowercase versions over any uppercase ones that you might set. +For details see [this issue](https://github.com/npm/npm/issues/14528). + +Notice that you need to use underscores instead of dashes, so `--allow-same-version` +would become `npm_config_allow_same_version=true`. + +#### npmrc Files + +The four relevant files are: + +* per-project configuration file (`/path/to/my/project/.npmrc`) +* per-user configuration file (defaults to `$HOME/.npmrc`; configurable via CLI + option `--userconfig` or environment variable `$NPM_CONFIG_USERCONFIG`) +* global configuration file (defaults to `$PREFIX/etc/npmrc`; configurable via + CLI option `--globalconfig` or environment variable `$NPM_CONFIG_GLOBALCONFIG`) +* npm's built-in configuration file (`/path/to/npm/npmrc`) + +See [npmrc](/configuring-npm/npmrc) for more details. + +#### Default Configs + +Run `npm config ls -l` to see a set of configuration parameters that are +internal to npm, and are defaults if nothing else is specified. + +### Shorthands and Other CLI Niceties + +The following shorthands are parsed on the command-line: + +* `-v`: `--version` +* `-h`, `-?`, `--help`, `-H`: `--usage` +* `-s`, `--silent`: `--loglevel silent` +* `-q`, `--quiet`: `--loglevel warn` +* `-d`: `--loglevel info` +* `-dd`, `--verbose`: `--loglevel verbose` +* `-ddd`: `--loglevel silly` +* `-g`: `--global` +* `-C`: `--prefix` +* `-l`: `--long` +* `-m`: `--message` +* `-p`, `--porcelain`: `--parseable` +* `-reg`: `--registry` +* `-f`: `--force` +* `-desc`: `--description` +* `-S`: `--save` +* `-P`: `--save-prod` +* `-D`: `--save-dev` +* `-O`: `--save-optional` +* `-B`: `--save-bundle` +* `-E`: `--save-exact` +* `-y`: `--yes` +* `-n`: `--yes false` +* `ll` and `la` commands: `ls --long` + +If the specified configuration param resolves unambiguously to a known +configuration parameter, then it is expanded to that configuration +parameter. For example: + +```bash +npm ls --par +# same as: +npm ls --parseable +``` + +If multiple single-character shorthands are strung together, and the +resulting combination is unambiguously not some other configuration +param, then it is expanded to its various component pieces. For +example: + +```bash +npm ls -gpld +# same as: +npm ls --global --parseable --long --loglevel info +``` + +### Per-Package Config Settings + +When running scripts (see [`scripts`](/using-npm/scripts)) the package.json "config" +keys are overwritten in the environment if there is a config param of +`[@]:`. For example, if the package.json has +this: + +```json +{ "name" : "foo" +, "config" : { "port" : "8080" } +, "scripts" : { "start" : "node server.js" } } +``` + +and the server.js is this: + +```javascript +http.createServer(...).listen(process.env.npm_package_config_port) +``` + +then the user could change the behavior by doing: + +```bash +npm config set foo:port 80 +``` + +See [package.json](/configuring-npm/package-json) for more information. + +### Config Settings + +#### access + +* Default: `restricted` +* Type: Access + +When publishing scoped packages, the access level defaults to `restricted`. If +you want your scoped package to be publicly viewable (and installable) set +`--access=public`. The only valid values for `access` are `public` and +`restricted`. Unscoped packages _always_ have an access level of `public`. + +#### allow-same-version + +* Default: false +* Type: Boolean + +Prevents throwing an error when `npm version` is used to set the new version +to the same value as the current version. + +#### always-auth + +* Default: false +* Type: Boolean + +Force npm to always require authentication when accessing the registry, +even for `GET` requests. + +#### also + +* Default: null +* Type: String + +When "dev" or "development" and running local `npm shrinkwrap`, +`npm outdated`, or `npm update`, is an alias for `--dev`. + +#### audit + +* Default: true +* Type: Boolean + +When "true" submit audit reports alongside `npm install` runs to the default +registry and all registries configured for scopes. See the documentation +for [`npm audit`](/cli-commands/npm-audit) for details on what is submitted. + +#### audit-level + +* Default: `"low"` +* Type: `'low'`, `'moderate'`, `'high'`, `'critical'` + +The minimum level of vulnerability for `npm audit` to exit with +a non-zero exit code. + +#### auth-type + +* Default: `'legacy'` +* Type: `'legacy'`, `'sso'`, `'saml'`, `'oauth'` + +What authentication strategy to use with `adduser`/`login`. + +#### before + +* Alias: enjoy-by +* Default: null +* Type: Date + +If passed to `npm install`, will rebuild the npm tree such that only versions +that were available **on or before** the `--before` time get installed. +If there's no versions available for the current set of direct dependencies, the +command will error. + +If the requested version is a `dist-tag` and the given tag does not pass the +`--before` filter, the most recent version less than or equal to that tag will +be used. For example, `foo@latest` might install `foo@1.2` even though `latest` +is `2.0`. + +#### bin-links + +* Default: `true` +* Type: Boolean + +Tells npm to create symlinks (or `.cmd` shims on Windows) for package +executables. + +Set to false to have it not do this. This can be used to work around +the fact that some file systems don't support symlinks, even on +ostensibly Unix systems. + +#### browser + +* Default: OS X: `"open"`, Windows: `"start"`, Others: `"xdg-open"` +* Type: String + +The browser that is called by the `npm docs` command to open websites. + +#### ca + +* Default: The npm CA certificate +* Type: String, Array or null + +The Certificate Authority signing certificate that is trusted for SSL +connections to the registry. Values should be in PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with newlines +replaced by the string "\n". For example: + +```bash +ca="-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----" +``` + +Set to `null` to only allow "known" registrars, or to a specific CA cert +to trust only that specific signing authority. + +Multiple CAs can be trusted by specifying an array of certificates: + +```bash +ca[]="..." +ca[]="..." +``` + +See also the `strict-ssl` config. + +#### cafile + +* Default: `null` +* Type: path + +A path to a file containing one or multiple Certificate Authority signing +certificates. Similar to the `ca` setting, but allows for multiple CA's, as +well as for the CA information to be stored in a file on disk. + +#### cache + +* Default: Windows: `%AppData%\npm-cache`, Posix: `~/.npm` +* Type: path + +The location of npm's cache directory. See [`npm cache`](/cli-commands/npm-cache) + +#### cache-lock-stale + +* Default: 60000 (1 minute) +* Type: Number + +The number of ms before cache folder lockfiles are considered stale. + +#### cache-lock-retries + +* Default: 10 +* Type: Number + +Number of times to retry to acquire a lock on cache folder lockfiles. + +#### cache-lock-wait + +* Default: 10000 (10 seconds) +* Type: Number + +Number of ms to wait for cache lock files to expire. + +#### cache-max + +* Default: Infinity +* Type: Number + +**DEPRECATED**: This option has been deprecated in favor of `--prefer-online`. + +`--cache-max=0` is an alias for `--prefer-online`. + +#### cache-min + +* Default: 10 +* Type: Number + +**DEPRECATED**: This option has been deprecated in favor of `--prefer-offline`. + +`--cache-min=9999 (or bigger)` is an alias for `--prefer-offline`. + +#### cert + +* Default: `null` +* Type: String + +A client certificate to pass when accessing the registry. Values should be in +PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with newlines replaced by the string "\n". For example: + +```bash +cert="-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----" +``` + +It is _not_ the path to a certificate file (and there is no "certfile" option). + +#### cidr + +* Default: `null` +* Type: String, Array, null + +This is a list of CIDR address to be used when configuring limited access tokens with the `npm token create` command. + +#### color + +* Default: true +* Type: Boolean or `"always"` + +If false, never shows colors. If `"always"` then always shows colors. +If true, then only prints color codes for tty file descriptors. + +This option can also be changed using the environment: colors are +disabled when the environment variable `NO_COLOR` is set to any value. + +#### depth + +* Default: Infinity +* Type: Number + +The depth to go when recursing directories for `npm ls`, +`npm cache ls`, and `npm outdated`. + +For `npm outdated`, a setting of `Infinity` will be treated as `0` +since that gives more useful information. To show the outdated status +of all packages and dependents, use a large integer value, +e.g., `npm outdated --depth 9999` + +#### description + +* Default: true +* Type: Boolean + +Show the description in `npm search` + +#### dev + +* Default: false +* Type: Boolean + +Install `dev-dependencies` along with packages. + +#### dry-run + +* Default: false +* Type: Boolean + +Indicates that you don't want npm to make any changes and that it should +only report what it would have done. This can be passed into any of the +commands that modify your local installation, eg, `install`, `update`, +`dedupe`, `uninstall`. This is NOT currently honored by some network related +commands, eg `dist-tags`, `owner`, etc. + +#### editor + +* Default: `EDITOR` environment variable if set, or `"vi"` on Posix, + or `"notepad"` on Windows. +* Type: path + +The command to run for `npm edit` or `npm config edit`. + +#### engine-strict + +* Default: false +* Type: Boolean + +If set to true, then npm will stubbornly refuse to install (or even +consider installing) any package that claims to not be compatible with +the current Node.js version. + +#### force + +* Default: false +* Type: Boolean + +Makes various commands more forceful. + +* lifecycle script failure does not block progress. +* publishing clobbers previously published versions. +* skips cache when requesting from the registry. +* prevents checks against clobbering non-npm files. + +#### format-package-lock + +* Default: true +* Type: Boolean + +Format `package-lock.json` or `npm-shrinkwrap.json` as a human readable file. + +#### fetch-retries + +* Default: 2 +* Type: Number + +The "retries" config for the `retry` module to use when fetching +packages from the registry. + +#### fetch-retry-factor + +* Default: 10 +* Type: Number + +The "factor" config for the `retry` module to use when fetching +packages. + +#### fetch-retry-mintimeout + +* Default: 10000 (10 seconds) +* Type: Number + +The "minTimeout" config for the `retry` module to use when fetching +packages. + +#### fetch-retry-maxtimeout + +* Default: 60000 (1 minute) +* Type: Number + +The "maxTimeout" config for the `retry` module to use when fetching +packages. + +#### fund + +* Default: true +* Type: Boolean + +When "true" displays the message at the end of each `npm install` +aknowledging the number of dependencies looking for funding. +See [`npm fund`](/cli-commands/npm-fund) for details. + +#### git + +* Default: `"git"` +* Type: String + +The command to use for git commands. If git is installed on the +computer, but is not in the `PATH`, then set this to the full path to +the git binary. + +#### git-tag-version + +* Default: `true` +* Type: Boolean + +Tag the commit when using the `npm version` command. + +#### commit-hooks + +* Default: `true` +* Type: Boolean + +Run git commit hooks when using the `npm version` command. + +#### global + +* Default: false +* Type: Boolean + +Operates in "global" mode, so that packages are installed into the +`prefix` folder instead of the current working directory. See +[folders](/configuring-npm/folders) for more on the differences in behavior. + +* packages are installed into the `{prefix}/lib/node_modules` folder, instead of the + current working directory. +* bin files are linked to `{prefix}/bin` +* man pages are linked to `{prefix}/share/man` + +#### globalconfig + +* Default: {prefix}/etc/npmrc +* Type: path + +The config file to read for global config options. + +#### global-style + +* Default: false +* Type: Boolean + +Causes npm to install the package into your local `node_modules` folder with +the same layout it uses with the global `node_modules` folder. Only your +direct dependencies will show in `node_modules` and everything they depend +on will be flattened in their `node_modules` folders. This obviously will +eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` will be +preferred. + +#### group + +* Default: GID of the current process +* Type: String or Number + +The group to use when running package scripts in global mode as the root +user. + +#### heading + +* Default: `"npm"` +* Type: String + +The string that starts all the debugging log output. + +#### https-proxy + +* Default: null +* Type: url + +A proxy to use for outgoing https requests. If the `HTTPS_PROXY` or +`https_proxy` or `HTTP_PROXY` or `http_proxy` environment variables are set, +proxy settings will be honored by the underlying `request` library. + +#### if-present + +* Default: false +* Type: Boolean + +If true, npm will not exit with an error code when `run-script` is invoked for +a script that isn't defined in the `scripts` section of `package.json`. This +option can be used when it's desirable to optionally run a script when it's +present and fail if the script fails. This is useful, for example, when running +scripts that may only apply for some builds in an otherwise generic CI setup. + +#### ignore-prepublish + +* Default: false +* Type: Boolean + +If true, npm will not run `prepublish` scripts. + +#### ignore-scripts + +* Default: false +* Type: Boolean + +If true, npm does not run scripts specified in package.json files. + +#### init-module + +* Default: ~/.npm-init.js +* Type: path + +A module that will be loaded by the `npm init` command. See the +documentation for the +[init-package-json](https://github.com/isaacs/init-package-json) module +for more information, or [npm init](/cli-commands/npm-init). + +#### init-author-name + +* Default: "" +* Type: String + +The value `npm init` should use by default for the package author's name. + +#### init-author-email + +* Default: "" +* Type: String + +The value `npm init` should use by default for the package author's email. + +#### init-author-url + +* Default: "" +* Type: String + +The value `npm init` should use by default for the package author's homepage. + +#### init-license + +* Default: "ISC" +* Type: String + +The value `npm init` should use by default for the package license. + +#### init-version + +* Default: "1.0.0" +* Type: semver + +The value that `npm init` should use by default for the package +version number, if not already set in package.json. + +#### json + +* Default: false +* Type: Boolean + +Whether or not to output JSON data, rather than the normal output. + +This feature is currently experimental, and the output data structures for many +commands is either not implemented in JSON yet, or subject to change. Only the +output from `npm ls --json` and `npm search --json` are currently valid. + +#### key + +* Default: `null` +* Type: String + +A client key to pass when accessing the registry. Values should be in PEM +format with newlines replaced by the string "\n". For example: + +```json +key="-----BEGIN PRIVATE KEY-----\nXXXX\nXXXX\n-----END PRIVATE KEY-----" +``` + +It is _not_ the path to a key file (and there is no "keyfile" option). + +#### legacy-bundling + +* Default: false +* Type: Boolean + +Causes npm to install the package such that versions of npm prior to 1.4, +such as the one included with node 0.8, can install the package. This +eliminates all automatic deduping. If used with `global-style` this option +will be preferred. + +#### link + +* Default: false +* Type: Boolean + +If true, then local installs will link if there is a suitable globally +installed package. + +Note that this means that local installs can cause things to be +installed into the global space at the same time. The link is only done +if one of the two conditions are met: + +* The package is not already installed globally, or +* the globally installed version is identical to the version that is + being installed locally. + +#### local-address + +* Default: undefined +* Type: IP Address + +The IP address of the local interface to use when making connections +to the npm registry. Must be IPv4 in versions of Node prior to 0.12. + +#### loglevel + +* Default: "notice" +* Type: String +* Values: "silent", "error", "warn", "notice", "http", "timing", "info", + "verbose", "silly" + +What level of logs to report. On failure, *all* logs are written to +`npm-debug.log` in the current working directory. + +Any logs of a higher level than the setting are shown. The default is "notice". + +#### logstream + +* Default: process.stderr +* Type: Stream + +This is the stream that is passed to the +[npmlog](https://github.com/npm/npmlog) module at run time. + +It cannot be set from the command line, but if you are using npm +programmatically, you may wish to send logs to somewhere other than +stderr. + +If the `color` config is set to true, then this stream will receive +colored output if it is a TTY. + +#### logs-max + +* Default: 10 +* Type: Number + +The maximum number of log files to store. + +#### long + +* Default: false +* Type: Boolean + +Show extended information in `npm ls` and `npm search`. + +#### maxsockets + +* Default: 50 +* Type: Number + +The maximum number of connections to use per origin (protocol/host/port +combination). Passed to the `http` `Agent` used to make the request. + +#### message + +* Default: "%s" +* Type: String + +Commit message which is used by `npm version` when creating version commit. + +Any "%s" in the message will be replaced with the version number. + +#### metrics-registry + +* Default: The value of `registry` (which defaults to "https://registry.npmjs.org/") +* Type: String + +The registry you want to send cli metrics to if `send-metrics` is true. + +#### node-options + +* Default: null +* Type: String + +Options to pass through to Node.js via the `NODE_OPTIONS` environment +variable. This does not impact how npm itself is executed but it does +impact how lifecycle scripts are called. + +#### node-version + +* Default: process.version +* Type: semver or false + +The node version to use when checking a package's `engines` map. + +#### noproxy + +* Default: null +* Type: String or Array + +A comma-separated string or an array of domain extensions that a proxy should not be used for. + +#### offline + +* Default: false +* Type: Boolean + +Force offline mode: no network requests will be done during install. To allow +the CLI to fill in missing cache data, see `--prefer-offline`. + +#### onload-script + +* Default: false +* Type: path + +A node module to `require()` when npm loads. Useful for programmatic +usage. + +#### only + +* Default: null +* Type: String + +When "dev" or "development" and running local `npm install` without any +arguments, only devDependencies (and their dependencies) are installed. + +When "dev" or "development" and running local `npm ls`, `npm outdated`, or +`npm update`, is an alias for `--dev`. + +When "prod" or "production" and running local `npm install` without any +arguments, only non-devDependencies (and their dependencies) are +installed. + +When "prod" or "production" and running local `npm ls`, `npm outdated`, or +`npm update`, is an alias for `--production`. + +#### optional + +* Default: true +* Type: Boolean + +Attempt to install packages in the `optionalDependencies` object. Note +that if these packages fail to install, the overall installation +process is not aborted. + +#### otp + +* Default: null +* Type: Number + +This is a one-time password from a two-factor authenticator. It's needed +when publishing or changing package permissions with `npm access`. + +#### package-lock + +* Default: true +* Type: Boolean + +If set to false, then ignore `package-lock.json` files when installing. This +will also prevent _writing_ `package-lock.json` if `save` is true. + +When package package-locks are disabled, automatic pruning of extraneous +modules will also be disabled. To remove extraneous modules with +package-locks disabled use `npm prune`. + +This option is an alias for `--shrinkwrap`. + +#### package-lock-only + +* Default: false +* Type: Boolean + +If set to true, it will update only the `package-lock.json`, +instead of checking `node_modules` and downloading dependencies. + +#### parseable + +* Default: false +* Type: Boolean + +Output parseable results from commands that write to +standard output. For `npm search`, this will be tab-separated table format. + +#### prefer-offline + +* Default: false +* Type: Boolean + +If true, staleness checks for cached data will be bypassed, but missing data +will be requested from the server. To force full offline mode, use `--offline`. + +This option is effectively equivalent to `--cache-min=9999999`. + +#### prefer-online + +* Default: false +* Type: Boolean + +If true, staleness checks for cached data will be forced, making the CLI look +for updates immediately even for fresh package data. + +#### prefix + +* Default: see [folders](/configuring-npm/folders) +* Type: path + +The location to install global items. If set on the command line, then +it forces non-global commands to run in the specified folder. + +#### preid + +* Default: "" +* Type: String + +The "prerelease identifier" to use as a prefix for the "prerelease" part of a +semver. Like the `rc` in `1.2.0-rc.8`. + +#### production + +* Default: false +* Type: Boolean + +Set to true to run in "production" mode. + +1. devDependencies are not installed at the topmost level when running + local `npm install` without any arguments. +2. Set the NODE_ENV="production" for lifecycle scripts. + +#### progress + +* Default: true, unless TRAVIS or CI env vars set. +* Type: Boolean + +When set to `true`, npm will display a progress bar during time intensive +operations, if `process.stderr` is a TTY. + +Set to `false` to suppress the progress bar. + +#### proxy + +* Default: null +* Type: url + +A proxy to use for outgoing http requests. If the `HTTP_PROXY` or +`http_proxy` environment variables are set, proxy settings will be +honored by the underlying `request` library. + +#### read-only + +* Default: false +* Type: Boolean + +This is used to mark a token as unable to publish when configuring limited access tokens with the `npm token create` command. + +#### rebuild-bundle + +* Default: true +* Type: Boolean + +Rebuild bundled dependencies after installation. + +#### registry + +* Default: https://registry.npmjs.org/ +* Type: url + +The base URL of the npm package registry. + +#### rollback + +* Default: true +* Type: Boolean + +Remove failed installs. + +#### save + +* Default: true +* Type: Boolean + +Save installed packages to a package.json file as dependencies. + +When used with the `npm rm` command, it removes it from the `dependencies` +object. + +Only works if there is already a package.json file present. + +#### save-bundle + +* Default: false +* Type: Boolean + +If a package would be saved at install time by the use of `--save`, +`--save-dev`, or `--save-optional`, then also put it in the +`bundleDependencies` list. + +When used with the `npm rm` command, it removes it from the +bundledDependencies list. + +#### save-prod + +* Default: false +* Type: Boolean + +Makes sure that a package will be saved into `dependencies` specifically. This +is useful if a package already exists in `devDependencies` or +`optionalDependencies`, but you want to move it to be a production dep. This is +also the default behavior if `--save` is true, and neither `--save-dev` or +`--save-optional` are true. + +#### save-dev + +* Default: false +* Type: Boolean + +Save installed packages to a package.json file as `devDependencies`. + +When used with the `npm rm` command, it removes it from the +`devDependencies` object. + +Only works if there is already a package.json file present. + +#### save-exact + +* Default: false +* Type: Boolean + +Dependencies saved to package.json using `--save`, `--save-dev` or +`--save-optional` will be configured with an exact version rather than +using npm's default semver range operator. + +#### save-optional + +* Default: false +* Type: Boolean + +Save installed packages to a package.json file as +optionalDependencies. + +When used with the `npm rm` command, it removes it from the +`devDependencies` object. + +Only works if there is already a package.json file present. + +#### save-prefix + +* Default: '^' +* Type: String + +Configure how versions of packages installed to a package.json file via +`--save` or `--save-dev` get prefixed. + +For example if a package has version `1.2.3`, by default its version is +set to `^1.2.3` which allows minor upgrades for that package, but after +`npm config set save-prefix='~'` it would be set to `~1.2.3` which only allows +patch upgrades. + +#### scope + +* Default: the scope of the current project, if any, or "" +* Type: String + +Associate an operation with a scope for a scoped registry. Useful when logging +in to a private registry for the first time: +`npm login --scope=@organization --registry=registry.organization.com`, which +will cause `@organization` to be mapped to the registry for future installation +of packages specified according to the pattern `@organization/package`. + +#### script-shell + +* Default: `null` +* Type: path + +The shell to use for scripts run with the `npm run` command. + +#### scripts-prepend-node-path + +* Default: "warn-only" +* Type: Boolean, `"auto"` or `"warn-only"` + +If set to `true`, add the directory in which the current `node` executable +resides to the `PATH` environment variable when running scripts, +even if that means that `npm` will invoke a different `node` executable than +the one which it is running. + +If set to `false`, never modify `PATH` with that. + +If set to `"warn-only"`, never modify `PATH` but print a warning if `npm` thinks +that you may want to run it with `true`, e.g. because the `node` executable +in the `PATH` is not the one `npm` was invoked with. + +If set to `auto`, only add that directory to the `PATH` environment variable +if the `node` executable with which `npm` was invoked and the one that is found +first on the `PATH` are different. + +#### searchexclude + +* Default: "" +* Type: String + +Space-separated options that limit the results from search. + +#### searchopts + +* Default: "" +* Type: String + +Space-separated options that are always passed to search. + +#### searchlimit + +* Default: 20 +* Type: Number + +Number of items to limit search results to. Will not apply at all to legacy +searches. + +#### searchstaleness + +* Default: 900 (15 minutes) +* Type: Number + +The age of the cache, in seconds, before another registry request is made if +using legacy search endpoint. + +#### send-metrics + +* Default: false +* Type: Boolean + +If true, success/failure metrics will be reported to the registry stored in +`metrics-registry`. These requests contain the number of successful and +failing runs of the npm CLI and the time period overwhich those counts were +gathered. No identifying information is included in these requests. + +#### shell + +* Default: SHELL environment variable, or "bash" on Posix, or "cmd" on + Windows +* Type: path + +The shell to run for the `npm explore` command. + +#### shrinkwrap + +* Default: true +* Type: Boolean + +If set to false, then ignore `npm-shrinkwrap.json` files when installing. This +will also prevent _writing_ `npm-shrinkwrap.json` if `save` is true. + +This option is an alias for `--package-lock`. + +#### sign-git-commit + +* Default: false +* Type: Boolean + +If set to true, then the `npm version` command will commit the new package +version using `-S` to add a signature. + +Note that git requires you to have set up GPG keys in your git configs +for this to work properly. + +#### sign-git-tag + +* Default: false +* Type: Boolean + +If set to true, then the `npm version` command will tag the version +using `-s` to add a signature. + +Note that git requires you to have set up GPG keys in your git configs +for this to work properly. + +#### sso-poll-frequency + +* Default: 500 +* Type: Number + +When used with SSO-enabled `auth-type`s, configures how regularly the registry +should be polled while the user is completing authentication. + +#### sso-type + +* Default: 'oauth' +* Type: 'oauth', 'saml', or null + +If `--auth-type=sso`, the type of SSO type to use. + +#### strict-ssl + +* Default: true +* Type: Boolean + +Whether or not to do SSL key validation when making requests to the +registry via https. + +See also the `ca` config. + +#### tag + +* Default: latest +* Type: String + +If you ask npm to install a package and don't tell it a specific version, then +it will install the specified tag. + +Also the tag that is added to the package@version specified by the `npm +tag` command, if no explicit tag is given. + +#### tag-version-prefix + +* Default: `"v"` +* Type: String + +If set, alters the prefix used when tagging a new version when performing a +version increment using `npm-version`. To remove the prefix altogether, set it +to the empty string: `""`. + +Because other tools may rely on the convention that npm version tags look like +`v1.0.0`, _only use this property if it is absolutely necessary_. In +particular, use care when overriding this setting for public packages. + +#### timing + +* Default: `false` +* Type: Boolean + +If true, writes an `npm-debug` log to `_logs` and timing information to +`_timing.json`, both in your cache. `_timing.json` is a newline delimited +list of JSON objects. You can quickly view it with this +[json](https://www.npmjs.com/package/json) command line: +`json -g < ~/.npm/_timing.json`. + +#### tmp + +* Default: TMPDIR environment variable, or "/tmp" +* Type: path + +Where to store temporary files and folders. All temp files are deleted +on success, but left behind on failure for forensic purposes. + +#### unicode + +* Default: false on windows, true on mac/unix systems with a unicode locale +* Type: Boolean + +When set to true, npm uses unicode characters in the tree output. When +false, it uses ascii characters to draw trees. + +#### unsafe-perm + +* Default: false if running as root, true otherwise +* Type: Boolean + +Set to true to suppress the UID/GID switching when running package +scripts. If set explicitly to false, then installing as a non-root user +will fail. + +#### update-notifier + +* Default: true +* Type: Boolean + +Set to false to suppress the update notification when using an older +version of npm than the latest. + +#### usage + +* Default: false +* Type: Boolean + +Set to show short usage output (like the -H output) +instead of complete help when doing [`npm help`](/cli-commands/npm-help). + +#### user + +* Default: "nobody" +* Type: String or Number + +The UID to set to when running package scripts as root. + +#### userconfig + +* Default: ~/.npmrc +* Type: path + +The location of user-level configuration settings. + +#### umask + +* Default: 022 +* Type: Octal numeric string in range 0000..0777 (0..511) + +The "umask" value to use when setting the file creation mode on files +and folders. + +Folders and executables are given a mode which is `0777` masked against +this value. Other files are given a mode which is `0666` masked against +this value. Thus, the defaults are `0755` and `0644` respectively. + +#### user-agent + +* Default: node/{process.version} {process.platform} {process.arch} +* Type: String + +Sets a User-Agent to the request header + +#### version + +* Default: false +* Type: boolean + +If true, output the npm version and exit successfully. + +Only relevant when specified explicitly on the command line. + +#### versions + +* Default: false +* Type: boolean + +If true, output the npm version as well as node's `process.versions` map, and +exit successfully. + +Only relevant when specified explicitly on the command line. + +#### viewer + +* Default: "man" on Posix, "browser" on Windows +* Type: path + +The program to use to view help content. + +Set to `"browser"` to view html help content in the default web browser. + +### See also + +* [npm config](/cli-commands/npm-config) +* [npmrc](/configuring-npm/npmrc) +* [npm scripts](/using-npm/scripts) +* [npm folders](/configuring-npm/folders) +* [npm](/cli-commands/npm) diff --git a/docs/content/using-npm/developers.md b/docs/content/using-npm/developers.md new file mode 100644 index 0000000000000..80b7fee6a544d --- /dev/null +++ b/docs/content/using-npm/developers.md @@ -0,0 +1,252 @@ +--- +section: using-npm +title: developers +description: Developer Guide +--- + +# developers(7) + +## Developer Guide + +### Description + +So, you've decided to use npm to develop (and maybe publish/deploy) +your project. + +Fantastic! + +There are a few things that you need to do above the simple steps +that your users will do to install your program. + +### About These Documents + +These are man pages. If you install npm, you should be able to +then do `man npm-thing` to get the documentation on a particular +topic, or `npm help thing` to see the same information. + +### What is a package + +A package is: + +* a) a folder containing a program described by a package.json file +* b) a gzipped tarball containing (a) +* c) a url that resolves to (b) +* d) a `@` that is published on the registry with (c) +* e) a `@` that points to (d) +* f) a `` that has a "latest" tag satisfying (e) +* g) a `git` url that, when cloned, results in (a). + +Even if you never publish your package, you can still get a lot of +benefits of using npm if you just want to write a node program (a), and +perhaps if you also want to be able to easily install it elsewhere +after packing it up into a tarball (b). + +Git urls can be of the form: + +```bash +git://github.com/user/project.git#commit-ish +git+ssh://user@hostname:project.git#commit-ish +git+http://user@hostname/project/blah.git#commit-ish +git+https://user@hostname/project/blah.git#commit-ish +``` + +The `commit-ish` can be any tag, sha, or branch which can be supplied as +an argument to `git checkout`. The default is `master`. + +### The package.json File + +You need to have a `package.json` file in the root of your project to do +much of anything with npm. That is basically the whole interface. + +See [`package.json`](/configuring-npm/package-json) for details about what goes in that file. At the very +least, you need: + +* name: + This should be a string that identifies your project. Please do not + use the name to specify that it runs on node, or is in JavaScript. + You can use the "engines" field to explicitly state the versions of + node (or whatever else) that your program requires, and it's pretty + well assumed that it's JavaScript. + + It does not necessarily need to match your github repository name. + + So, `node-foo` and `bar-js` are bad names. `foo` or `bar` are better. + +* version: + A semver-compatible version. + +* engines: + Specify the versions of node (or whatever else) that your program + runs on. The node API changes a lot, and there may be bugs or new + functionality that you depend on. Be explicit. + +* author: + Take some credit. + +* scripts: + If you have a special compilation or installation script, then you + should put it in the `scripts` object. You should definitely have at + least a basic smoke-test command as the "scripts.test" field. + See [scripts](/using-npm/scripts). + +* main: + If you have a single module that serves as the entry point to your + program (like what the "foo" package gives you at require("foo")), + then you need to specify that in the "main" field. + +* directories: + This is an object mapping names to folders. The best ones to include are + "lib" and "doc", but if you use "man" to specify a folder full of man pages, + they'll get installed just like these ones. + +You can use `npm init` in the root of your package in order to get you +started with a pretty basic package.json file. See [`npm init`](/cli-commands/npm-init) for +more info. + +### Keeping files *out* of your package + +Use a `.npmignore` file to keep stuff out of your package. If there's +no `.npmignore` file, but there *is* a `.gitignore` file, then npm will +ignore the stuff matched by the `.gitignore` file. If you *want* to +include something that is excluded by your `.gitignore` file, you can +create an empty `.npmignore` file to override it. Like `git`, `npm` looks +for `.npmignore` and `.gitignore` files in all subdirectories of your +package, not only the root directory. + +`.npmignore` files follow the [same pattern rules](https://git-scm.com/book/en/v2/Git-Basics-Recording-Changes-to-the-Repository#Ignoring-Files) +as `.gitignore` files: + +* Blank lines or lines starting with `#` are ignored. +* Standard glob patterns work. +* You can end patterns with a forward slash `/` to specify a directory. +* You can negate a pattern by starting it with an exclamation point `!`. + +By default, the following paths and files are ignored, so there's no +need to add them to `.npmignore` explicitly: + +* `.*.swp` +* `._*` +* `.DS_Store` +* `.git` +* `.hg` +* `.npmrc` +* `.lock-wscript` +* `.svn` +* `.wafpickle-*` +* `config.gypi` +* `CVS` +* `npm-debug.log` + +Additionally, everything in `node_modules` is ignored, except for +bundled dependencies. npm automatically handles this for you, so don't +bother adding `node_modules` to `.npmignore`. + +The following paths and files are never ignored, so adding them to +`.npmignore` is pointless: + +* `package.json` +* `README` (and its variants) +* `CHANGELOG` (and its variants) +* `LICENSE` / `LICENCE` + +If, given the structure of your project, you find `.npmignore` to be a +maintenance headache, you might instead try populating the `files` +property of `package.json`, which is an array of file or directory names +that should be included in your package. Sometimes a whitelist is easier +to manage than a blacklist. + +#### Testing whether your `.npmignore` or `files` config works + +If you want to double check that your package will include only the files +you intend it to when published, you can run the `npm pack` command locally +which will generate a tarball in the working directory, the same way it +does for publishing. + +### Link Packages + +`npm link` is designed to install a development package and see the +changes in real time without having to keep re-installing it. (You do +need to either re-link or `npm rebuild -g` to update compiled packages, +of course.) + +More info at [`npm link`](/cli-commands/npm-link). + +### Before Publishing: Make Sure Your Package Installs and Works + +**This is important.** + +If you can not install it locally, you'll have +problems trying to publish it. Or, worse yet, you'll be able to +publish it, but you'll be publishing a broken or pointless package. +So don't do that. + +In the root of your package, do this: + +```bash +npm install . -g +``` + +That'll show you that it's working. If you'd rather just create a symlink +package that points to your working directory, then do this: + +```bash +npm link +``` + +Use `npm ls -g` to see if it's there. + +To test a local install, go into some other folder, and then do: + +```bash +cd ../some-other-folder +npm install ../my-package +``` + +to install it locally into the node_modules folder in that other place. + +Then go into the node-repl, and try using require("my-thing") to +bring in your module's main module. + +### Create a User Account + +Create a user with the adduser command. It works like this: + +```bash +npm adduser +``` + +and then follow the prompts. + +This is documented better in [npm adduser](/cli-commands/npm-adduser). + +### Publish your package + +This part's easy. In the root of your folder, do this: + +```bash +npm publish +``` + +You can give publish a url to a tarball, or a filename of a tarball, +or a path to a folder. + +Note that pretty much **everything in that folder will be exposed** +by default. So, if you have secret stuff in there, use a +`.npmignore` file to list out the globs to ignore, or publish +from a fresh checkout. + +### Brag about it + +Send emails, write blogs, blab in IRC. + +Tell the world how easy it is to install your program! + +### See also + +* [npm](/cli-commands/npm) +* [npm init](/cli-commands/npm-init) +* [package.json](/configuring-npm/package-json) +* [npm scripts](/using-npm/scripts) +* [npm publish](/cli-commands/npm-publish) +* [npm adduser](/cli-commands/npm-adduser) +* [npm registry](/using-npm/registry) diff --git a/docs/content/using-npm/disputes.md b/docs/content/using-npm/disputes.md new file mode 100644 index 0000000000000..bb2d25663cb26 --- /dev/null +++ b/docs/content/using-npm/disputes.md @@ -0,0 +1,137 @@ +--- +section: using-npm +title: disputes +description: Handling Module Name Disputes +--- + +# disputes(7) + +## Handling Module Name Disputes + +This document describes the steps that you should take to resolve module name +disputes with other npm publishers. It also describes special steps you should +take about names you think infringe your trademarks. + +This document is a clarification of the acceptable behavior outlined in the +[npm Code of Conduct](https://www.npmjs.com/policies/conduct), and nothing in +this document should be interpreted to contradict any aspect of the npm Code of +Conduct. + +### TL;DR + +1. Get the author email with `npm owner ls ` +2. Email the author, CC +3. After a few weeks, if there's no resolution, we'll sort it out. + +Don't squat on package names. Publish code or move out of the way. + +### Description + +There sometimes arise cases where a user publishes a module, and then later, +some other user wants to use that name. Here are some common ways that happens +(each of these is based on actual events.) + +1. Alice writes a JavaScript module `foo`, which is not node-specific. Alice + doesn't use node at all. Yusuf wants to use `foo` in node, so he wraps it in + an npm module. Some time later, Alice starts using node, and wants to take + over management of her program. +2. Yusuf writes an npm module `foo`, and publishes it. Perhaps much later, Alice + finds a bug in `foo`, and fixes it. She sends a pull request to Yusuf, but + Yusuf doesn't have the time to deal with it, because he has a new job and a + new baby and is focused on his new Erlang project, and kind of not involved + with node any more. Alice would like to publish a new `foo`, but can't, + because the name is taken. +3. Yusuf writes a 10-line flow-control library, and calls it `foo`, and + publishes it to the npm registry. Being a simple little thing, it never + really has to be updated. Alice works for Foo Inc, the makers of the + critically acclaimed and widely-marketed `foo` JavaScript toolkit framework. + They publish it to npm as `foojs`, but people are routinely confused when + `npm install foo` is some different thing. +4. Yusuf writes a parser for the widely-known `foo` file format, because he + needs it for work. Then, he gets a new job, and never updates the prototype. + Later on, Alice writes a much more complete `foo` parser, but can't publish, + because Yusuf's `foo` is in the way. + +1. `npm owner ls foo`. This will tell Alice the email address of the owner + (Yusuf). +2. Alice emails Yusuf, explaining the situation **as respectfully as possible**, + and what she would like to do with the module name. She adds the npm support + staff to the CC list of the email. Mention in the email + that Yusuf can run npm owner `add alice foo` to add Alice as an owner of the + foo package. +3. After a reasonable amount of time, if Yusuf has not responded, or if Yusuf + and Alice can't come to any sort of resolution, email support + and we'll sort it out. ("Reasonable" is usually at least + 4 weeks.) + +### Reasoning + +In almost every case so far, the parties involved have been able to reach an +amicable resolution without any major intervention. Most people really do want +to be reasonable, and are probably not even aware that they're in your way. + +Module ecosystems are most vibrant and powerful when they are as self-directed +as possible. If an admin one day deletes something you had worked on, then that +is going to make most people quite upset, regardless of the justification. When +humans solve their problems by talking to other humans with respect, everyone +has the chance to end up feeling good about the interaction. + +### Exceptions + +Some things are not allowed, and will be removed without discussion if they are +brought to the attention of the npm registry admins, including but not limited +to: + +1. Malware (that is, a package designed to exploit or harm the machine on which + it is installed). +2. Violations of copyright or licenses (for example, cloning an MIT-licensed + program, and then removing or changing the copyright and license statement). +3. Illegal content. +4. "Squatting" on a package name that you plan to use, but aren't actually + using. Sorry, I don't care how great the name is, or how perfect a fit it is + for the thing that someday might happen. If someone wants to use it today, + and you're just taking up space with an empty tarball, you're going to be + evicted. +5. Putting empty packages in the registry. Packages must have SOME + functionality. It can be silly, but it can't be nothing. (See also: + squatting.) +6. Doing weird things with the registry, like using it as your own personal + application database or otherwise putting non-packagey things into it. +7. Other things forbidden by the npm + [Code of Conduct](https://www.npmjs.com/policies/conduct) such as hateful + language, pornographic content, or harassment. + +If you see bad behavior like this, please report it to right +away. **You are never expected to resolve abusive behavior on your own. We are +here to help.** + +### Trademarks + +If you think another npm publisher is infringing your trademark, such as by +using a confusingly similar package name, email with a link to +the package or user account on [https://www.npmjs.com/](https://www.npmjs.com/). +Attach a copy of your trademark registration certificate. + +If we see that the package's publisher is intentionally misleading others by +misusing your registered mark without permission, we will transfer the package +name to you. Otherwise, we will contact the package publisher and ask them to +clear up any confusion with changes to their package's `README` file or +metadata. + +### Changes + +This is a living document and may be updated from time to time. Please refer to +the [git history for this document](https://github.com/npm/cli/commits/latest/doc/misc/npm-disputes.md) +to view the changes. + +### License + +Copyright (C) npm, Inc., All rights reserved + +This document may be reused under a Creative Commons Attribution-ShareAlike +License. + +### See also + +* [npm registry](/using-npm/registry) +* [npm owner](/cli-commands/npm-owner) diff --git a/docs/content/using-npm/orgs.md b/docs/content/using-npm/orgs.md new file mode 100644 index 0000000000000..9709a12d72675 --- /dev/null +++ b/docs/content/using-npm/orgs.md @@ -0,0 +1,97 @@ +--- +section: using-npm +title: orgs +description: Working with Teams & Orgs +--- + +# orgs(7) + +## Working with Teams & Orgs + +### Description + +There are three levels of org users: + +1. Super admin, controls billing & adding people to the org. +2. Team admin, manages team membership & package access. +3. Developer, works on packages they are given access to. + +The super admin is the only person who can add users to the org because it impacts the monthly bill. The super admin will use the website to manage membership. Every org has a `developers` team that all users are automatically added to. + +The team admin is the person who manages team creation, team membership, and package access for teams. The team admin grants package access to teams, not individuals. + +The developer will be able to access packages based on the teams they are on. Access is either read-write or read-only. + +There are two main commands: + +1. `npm team` see [npm team](/cli-commands/npm-team) for more details +2. `npm access` see [npm access](/cli-commands/npm-access) for more details + +### Team Admins create teams + +* Check who you’ve added to your org: + +```bash +npm team ls :developers +``` + +* Each org is automatically given a `developers` team, so you can see the whole list of team members in your org. This team automatically gets read-write access to all packages, but you can change that with the `access` command. + +* Create a new team: + +```bash +npm team create +``` + +* Add members to that team: + +```bash +npm team add +``` + +### Publish a package and adjust package access + +* In package directory, run + +```bash +npm init --scope= +``` +to scope it for your org & publish as usual + +* Grant access: + +```bash +npm access grant [] +``` + +* Revoke access: + +```bash +npm access revoke [] +``` + +### Monitor your package access + +* See what org packages a team member can access: + +```bash +npm access ls-packages +``` + +* See packages available to a specific team: + +```bash +npm access ls-packages +``` + +* Check which teams are collaborating on a package: + +```bash +npm access ls-collaborators +``` + +### See also + +* [npm team](/cli-commands/npm-team) +* [npm access](/cli-commands/npm-access) +* [npm scope](/using-npm/scope) diff --git a/docs/content/using-npm/registry.md b/docs/content/using-npm/registry.md new file mode 100644 index 0000000000000..cd6a2e4d71eeb --- /dev/null +++ b/docs/content/using-npm/registry.md @@ -0,0 +1,107 @@ +--- +section: using-npm +title: registry +description: The JavaScript Package Registry +--- + +# registry(7) + +## The JavaScript Package Registry + +### Description + +To resolve packages by name and version, npm talks to a registry website +that implements the CommonJS Package Registry specification for reading +package info. + +npm is configured to use npm, Inc.'s public registry at + by default. Use of the npm public registry is +subject to terms of use available at . + +You can configure npm to use any compatible registry you like, and even run +your own registry. Use of someone else's registry may be governed by their +terms of use. + +npm's package registry implementation supports several +write APIs as well, to allow for publishing packages and managing user +account information. + +The npm public registry is powered by a CouchDB database, +of which there is a public mirror at +. The code for the couchapp is +available at . + +The registry URL used is determined by the scope of the package (see +[`scope`](/using-npm/scope). If no scope is specified, the default registry is used, which is +supplied by the `registry` config parameter. See [`npm config`](/cli-commands/npm-config), +[`npmrc`](/configuring-npm/npmrc), and [`config`](/using-npm/config) for more on managing npm's configuration. + +### Does npm send any information about me back to the registry? + +Yes. + +When making requests of the registry npm adds two headers with information +about your environment: + +* `Npm-Scope` – If your project is scoped, this header will contain its + scope. In the future npm hopes to build registry features that use this + information to allow you to customize your experience for your + organization. +* `Npm-In-CI` – Set to "true" if npm believes this install is running in a + continuous integration environment, "false" otherwise. This is detected by + looking for the following environment variables: `CI`, `TDDIUM`, + `JENKINS_URL`, `bamboo.buildKey`. If you'd like to learn more you may find + the [original PR](https://github.com/npm/npm-registry-client/pull/129) + interesting. + This is used to gather better metrics on how npm is used by humans, versus + build farms. + +The npm registry does not try to correlate the information in these headers +with any authenticated accounts that may be used in the same requests. + +### Can I run my own private registry? + +Yes! + +The easiest way is to replicate the couch database, and use the same (or +similar) design doc to implement the APIs. + +If you set up continuous replication from the official CouchDB, and then +set your internal CouchDB as the registry config, then you'll be able +to read any published packages, in addition to your private ones, and by +default will only publish internally. + +If you then want to publish a package for the whole world to see, you can +simply override the `--registry` option for that `publish` command. + +### I don't want my package published in the official registry. It's private. + +Set `"private": true` in your package.json to prevent it from being +published at all, or +`"publishConfig":{"registry":"http://my-internal-registry.local"}` +to force it to be published only to your internal registry. + +See [`package.json`](/configuring-npm/package-json) for more info on what goes in the package.json file. + +### Will you replicate from my registry into the public one? + +No. If you want things to be public, then publish them into the public +registry using npm. What little security there is would be for nought +otherwise. + +### Do I have to use couchdb to build a registry that npm can talk to? + +No, but it's way easier. Basically, yes, you do, or you have to +effectively implement the entire CouchDB API anyway. + +### Is there a website or something to see package docs and such? + +Yes, head over to + +### See also + +* [npm config](/cli-commands/npm-config) +* [config](/using-npm/config) +* [npmrc](/configuring-npm/npmrc) +* [npm developers](/using-npm/developers) +* [npm disputes](/using-npm/disputes) diff --git a/docs/content/using-npm/removal.md b/docs/content/using-npm/removal.md new file mode 100644 index 0000000000000..7c83684673240 --- /dev/null +++ b/docs/content/using-npm/removal.md @@ -0,0 +1,70 @@ +--- +section: using-npm +title: removal +description: Cleaning the Slate +--- + +# removal(7) + +## Cleaning the Slate + +### Synopsis + +So sad to see you go. + +```bash +sudo npm uninstall npm -g +``` + +Or, if that fails, get the npm source code, and do: + +```bash +sudo make uninstall +``` + +### More Severe Uninstalling + +Usually, the above instructions are sufficient. That will remove +npm, but leave behind anything you've installed. + +If that doesn't work, or if you require more drastic measures, +continue reading. + +Note that this is only necessary for globally-installed packages. Local +installs are completely contained within a project's `node_modules` +folder. Delete that folder, and everything is gone less a package's +install script is particularly ill-behaved). + +This assumes that you installed node and npm in the default place. If +you configured node with a different `--prefix`, or installed npm with a +different prefix setting, then adjust the paths accordingly, replacing +`/usr/local` with your install prefix. + +To remove everything npm-related manually: + +```bash +rm -rf /usr/local/{lib/node{,/.npm,_modules},bin,share/man}/npm* +``` + +If you installed things *with* npm, then your best bet is to uninstall +them with npm first, and then install them again once you have a +proper install. This can help find any symlinks that are lying +around: + +```bash +ls -laF /usr/local/{lib/node{,/.npm},bin,share/man} | grep npm +``` + +Prior to version 0.3, npm used shim files for executables and node +modules. To track those down, you can do the following: + +```bash +find /usr/local/{lib/node,bin} -exec grep -l npm \{\} \; ; +``` + +(This is also in the README file.) + +### See also + +* [npm uninstall](/cli-commands/npm-uninstall) +* [npm prune](/cli-commands/npm-prune) diff --git a/docs/content/using-npm/scope.md b/docs/content/using-npm/scope.md new file mode 100644 index 0000000000000..2cbc108f0db92 --- /dev/null +++ b/docs/content/using-npm/scope.md @@ -0,0 +1,131 @@ +--- +section: using-npm +title: scope +description: Scoped packages +--- +# scope(7) + +## Scoped packages + +### Description + +All npm packages have a name. Some package names also have a scope. A scope +follows the usual rules for package names (URL-safe characters, no leading dots +or underscores). When used in package names, scopes are preceded by an `@` symbol +and followed by a slash, e.g. + +```bash +@somescope/somepackagename +``` + +Scopes are a way of grouping related packages together, and also affect a few +things about the way npm treats the package. + +Each npm user/organization has their own scope, and only you can add packages +in your scope. This means you don't have to worry about someone taking your +package name ahead of you. Thus it is also a good way to signal official packages +for organizations. + +Scoped packages can be published and installed as of `npm@2` and are supported +by the primary npm registry. Unscoped packages can depend on scoped packages and +vice versa. The npm client is backwards-compatible with unscoped registries, +so it can be used to work with scoped and unscoped registries at the same time. + +### Installing scoped packages + +Scoped packages are installed to a sub-folder of the regular installation +folder, e.g. if your other packages are installed in `node_modules/packagename`, +scoped modules will be installed in `node_modules/@myorg/packagename`. The scope +folder (`@myorg`) is simply the name of the scope preceded by an `@` symbol, and can +contain any number of scoped packages. + +A scoped package is installed by referencing it by name, preceded by an +`@` symbol, in `npm install`: + +```bash +npm install @myorg/mypackage +``` + +Or in `package.json`: + +```json +"dependencies": { + "@myorg/mypackage": "^1.3.0" +} +``` + +Note that if the `@` symbol is omitted, in either case, npm will instead attempt to +install from GitHub; see [`npm install`](/cli-commands/npm-install). + +### Requiring scoped packages + +Because scoped packages are installed into a scope folder, you have to +include the name of the scope when requiring them in your code, e.g. + +```javascript +require('@myorg/mypackage') +``` + +There is nothing special about the way Node treats scope folders. This +simply requires the `mypackage` module in the folder named `@myorg`. + +### Publishing scoped packages + +Scoped packages can be published from the CLI as of `npm@2` and can be +published to any registry that supports them, including the primary npm +registry. + +(As of 2015-04-19, and with npm 2.0 or better, the primary npm registry +**does** support scoped packages.) + +If you wish, you may associate a scope with a registry; see below. + +#### Publishing public scoped packages to the primary npm registry + +To publish a public scoped package, you must specify `--access public` with +the initial publication. This will publish the package and set access +to `public` as if you had run `npm access public` after publishing. + +#### Publishing private scoped packages to the npm registry + +To publish a private scoped package to the npm registry, you must have +an [npm Private Modules](https://docs.npmjs.com/private-modules/intro) +account. + +You can then publish the module with `npm publish` or `npm publish +--access restricted`, and it will be present in the npm registry, with +restricted access. You can then change the access permissions, if +desired, with `npm access` or on the npmjs.com website. + +### Associating a scope with a registry + +Scopes can be associated with a separate registry. This allows you to +seamlessly use a mix of packages from the primary npm registry and one or more +private registries, such as npm Enterprise. + +You can associate a scope with a registry at login, e.g. + +```bash +npm login --registry=http://reg.example.com --scope=@myco +``` + +Scopes have a many-to-one relationship with registries: one registry can +host multiple scopes, but a scope only ever points to one registry. + +You can also associate a scope with a registry using `npm config`: + +```bash +npm config set @myco:registry http://reg.example.com +``` + +Once a scope is associated with a registry, any `npm install` for a package +with that scope will request packages from that registry instead. Any +`npm publish` for a package name that contains the scope will be published to +that registry instead. + +### See also + +* [npm install](/cli-commands/npm-install) +* [npm publish](/cli-commands/npm-publish) +* [npm access](/cli-commands/npm-access) +* [npm registry](/using-npm/registry) diff --git a/docs/content/using-npm/scripts.md b/docs/content/using-npm/scripts.md new file mode 100644 index 0000000000000..a518e804727a5 --- /dev/null +++ b/docs/content/using-npm/scripts.md @@ -0,0 +1,296 @@ +--- +section: using-npm +title: scripts +description: How npm handles the "scripts" field +--- + +# scripts(7) + +## How npm handles the "scripts" field + +### Description + +The `"scripts"` property of of your `package.json` file supports a number of built-in scripts and their preset life cycle events as well as arbitrary scripts. These all can be executed by running `npm run-script ` or `npm run ` for short. *Pre* and *post* commands with matching names will be run for those as well (e.g. `premyscript`, `myscript`, `postmyscript`). Scripts from dependencies can be run with `npm explore -- npm run `. + +### Pre & Post Scripts + +To create "pre" or "post" scripts for any scripts defined in the `"scripts"` section of the `package.json`, simply create another script *with a matching name* and add "pre" or "post" to the beginning of them. + +```json +{ + "scripts": { + "precompress": "{{ executes BEFORE the `compress` script }}", + "compress": "{{ run command to compress files }}", + "postcompress": "{{ executes AFTER `compress` script }}" + } +} +``` + +### Life Cycle Scripts + +There are some special life cycle scripts that happen only in certain situations. These scripts happen in addtion to the "pre" and "post" script. +* `prepare`, `prepublish`, `prepublishOnly`, `prepack`, `postpack` + +**prepare** (since `npm@4.0.0`) +* Runs BEFORE the package is packed +* Runs BEFORE the package is published +* Runs on local `npm install` without any arguments +* Run AFTER `prepublish`, but BEFORE `prepublishOnly` +* NOTE: If a package being installed through git contains a `prepare` script, its `dependencies` and `devDependencies` will be installed, and the prepare script will be run, before the package is packaged and installed. + +**prepublish** (DEPRECATED) +* Same as `prepare` + +**prepublishOnly** +* Runs BEFORE the package is prepared and packed, ONLY on `npm publish`. + +**prepack** +* Runs BEFORE a tarball is packed (on "`npm pack`", "`npm publish`", and when installing a git dependencies). +* NOTE: "`npm run pack`" is NOT the same as "`npm pack`". "`npm run pack`" is an arbitrary user defined script name, where as, "`npm pack`" is a CLI defined command. + +**postpack** +* Runs AFTER the tarball has been generated and moved to its final destination. + +#### Prepare and Prepublish + +**Deprecation Note: prepublish** + +Since `npm@1.1.71`, the npm CLI has run the `prepublish` script for both `npm publish` and `npm install`, because it's a convenient way to prepare a package for use (some common use cases are described in the section below). It has also turned out to be, in practice, [very confusing](https://github.com/npm/npm/issues/10074). As of `npm@4.0.0`, a new event has been introduced, `prepare`, that preserves this existing behavior. A _new_ event, `prepublishOnly` has been added as a transitional strategy to allow users to avoid the confusing behavior of existing npm versions and only run on `npm publish` (for instance, running the tests one last time to ensure they're in good shape). + +See for a much lengthier justification, with further reading, for this change. + +**Use Cases** + +If you need to perform operations on your package before it is used, in a way that is not dependent on the operating system or architecture of the target system, use a `prepublish` script. This includes tasks such as: + +* Compiling CoffeeScript source code into JavaScript. +* Creating minified versions of JavaScript source code. +* Fetching remote resources that your package will use. + +The advantage of doing these things at `prepublish` time is that they can be done once, in a single place, thus reducing complexity and variability. Additionally, this means that: + +* You can depend on `coffee-script` as a `devDependency`, and thus + your users don't need to have it installed. +* You don't need to include minifiers in your package, reducing + the size for your users. +* You don't need to rely on your users having `curl` or `wget` or + other system tools on the target machines. + +### Life Cycle Operation Order + +#### [`npm publish`](/cli-commands/npm-publish) + +* `prepublishOnly` +* `prepare` +* `prepublish` +* `publish` +* `postpublish` + +#### [`npm pack`](/cli-commands/npm-pack) + +* `prepack` +* `postpack` + +#### [`npm install`](/cli-commands/npm-install) + +* `preinstall` +* `install` +* `postinstall` + +Also triggers + +* `prepublish` (when on local) +* `prepare` (when on local) + +#### [`npm start`](/cli-commands/npm-start) + +`npm run start` has an `npm start` shorthand. + +* `prestart` +* `start` +* `poststart` + +### Default Values +npm will default some script values based on package contents. + +* `"start": "node server.js"`: + + If there is a `server.js` file in the root of your package, then npm + will default the `start` command to `node server.js`. + +* `"install": "node-gyp rebuild"`: + + If there is a `binding.gyp` file in the root of your package and you + haven't defined your own `install` or `preinstall` scripts, npm will + default the `install` command to compile using node-gyp. + +### User + +If npm was invoked with root privileges, then it will change the uid +to the user account or uid specified by the `user` config, which +defaults to `nobody`. Set the `unsafe-perm` flag to run scripts with +root privileges. + +### Environment + +Package scripts run in an environment where many pieces of information +are made available regarding the setup of npm and the current state of +the process. + + +#### path + +If you depend on modules that define executable scripts, like test +suites, then those executables will be added to the `PATH` for +executing the scripts. So, if your package.json has this: + +```json +{ "name" : "foo" +, "dependencies" : { "bar" : "0.1.x" } +, "scripts": { "start" : "bar ./test" } } +``` + +then you could run `npm start` to execute the `bar` script, which is +exported into the `node_modules/.bin` directory on `npm install`. + +#### package.json vars + +The package.json fields are tacked onto the `npm_package_` prefix. So, +for instance, if you had `{"name":"foo", "version":"1.2.5"}` in your +package.json file, then your package scripts would have the +`npm_package_name` environment variable set to "foo", and the +`npm_package_version` set to "1.2.5". You can access these variables +in your code with `process.env.npm_package_name` and +`process.env.npm_package_version`, and so on for other fields. + +#### configuration + +Configuration parameters are put in the environment with the +`npm_config_` prefix. For instance, you can view the effective `root` +config by checking the `npm_config_root` environment variable. + +#### Special: package.json "config" object + +The package.json "config" keys are overwritten in the environment if +there is a config param of `[@]:`. For example, +if the package.json has this: + +```json +{ "name" : "foo" +, "config" : { "port" : "8080" } +, "scripts" : { "start" : "node server.js" } } +``` + +and the server.js is this: + +```javascript +http.createServer(...).listen(process.env.npm_package_config_port) +``` + +then the user could change the behavior by doing: + +```bash + npm config set foo:port 80 + ``` + +#### current lifecycle event + +Lastly, the `npm_lifecycle_event` environment variable is set to +whichever stage of the cycle is being executed. So, you could have a +single script used for different parts of the process which switches +based on what's currently happening. + +Objects are flattened following this format, so if you had +`{"scripts":{"install":"foo.js"}}` in your package.json, then you'd +see this in the script: + +```bash +process.env.npm_package_scripts_install === "foo.js" +``` + +### Examples + +For example, if your package.json contains this: + +```json +{ "scripts" : + { "install" : "scripts/install.js" + , "postinstall" : "scripts/postinstall.js" + , "uninstall" : "scripts/uninstall.js" + } +} +``` + +then `scripts/install.js` will be called for the install +and post-install stages of the lifecycle, and `scripts/uninstall.js` +will be called when the package is uninstalled. Since +`scripts/install.js` is running for two different phases, it would +be wise in this case to look at the `npm_lifecycle_event` environment +variable. + +If you want to run a make command, you can do so. This works just +fine: + +```json +{ "scripts" : + { "preinstall" : "./configure" + , "install" : "make && make install" + , "test" : "make test" + } +} +``` + +### Exiting + +Scripts are run by passing the line as a script argument to `sh`. + +If the script exits with a code other than 0, then this will abort the +process. + +Note that these script files don't have to be nodejs or even +javascript programs. They just have to be some kind of executable +file. + +### Hook Scripts + +If you want to run a specific script at a specific lifecycle event for +ALL packages, then you can use a hook script. + +Place an executable file at `node_modules/.hooks/{eventname}`, and +it'll get run for all packages when they are going through that point +in the package lifecycle for any packages installed in that root. + +Hook scripts are run exactly the same way as package.json scripts. +That is, they are in a separate child process, with the env described +above. + +### Best Practices + +* Don't exit with a non-zero error code unless you *really* mean it. + Except for uninstall scripts, this will cause the npm action to + fail, and potentially be rolled back. If the failure is minor or + only will prevent some optional features, then it's better to just + print a warning and exit successfully. +* Try not to use scripts to do what npm can do for you. Read through + [`package.json`](/configuring-npm/package-json) to see all the things that you can specify and enable + by simply describing your package appropriately. In general, this + will lead to a more robust and consistent state. +* Inspect the env to determine where to put things. For instance, if + the `npm_config_binroot` environment variable is set to `/home/user/bin`, then + don't try to install executables into `/usr/local/bin`. The user + probably set it up that way for a reason. +* Don't prefix your script commands with "sudo". If root permissions + are required for some reason, then it'll fail with that error, and + the user will sudo the npm command in question. +* Don't use `install`. Use a `.gyp` file for compilation, and `prepublish` + for anything else. You should almost never have to explicitly set a + preinstall or install script. If you are doing this, please consider if + there is another option. The only valid use of `install` or `preinstall` + scripts is for compilation which must be done on the target architecture. + +### See Also + +* [npm run-script](/cli-commands/npm-run-script) +* [package.json](/configuring-npm/package-json) +* [npm developers](/using-npm/developers) +* [npm install](/cli-commands/npm-install) diff --git a/doc/misc/semver.md b/docs/content/using-npm/semver.md similarity index 86% rename from doc/misc/semver.md rename to docs/content/using-npm/semver.md index 1c2dbf55b8d2f..92c6381b7fe85 100644 --- a/doc/misc/semver.md +++ b/docs/content/using-npm/semver.md @@ -20,6 +20,7 @@ semver.clean(' =v1.2.3 ') // '1.2.3' semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true semver.gt('1.2.3', '9.8.7') // false semver.lt('1.2.3', '9.8.7') // true +semver.minVersion('>=1.0.0') // '1.0.0' semver.valid(semver.coerce('v2')) // '2.0.0' semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7' ``` @@ -29,9 +30,7 @@ As a command-line utility: ``` $ semver -h -SemVer 5.3.0 - -A JavaScript implementation of the http://semver.org/ specification +A JavaScript implementation of the https://semver.org/ specification Copyright Isaac Z. Schlueter Usage: semver [options] [ [...]] @@ -54,6 +53,9 @@ Options: -l --loose Interpret versions and ranges loosely +-p --include-prerelease + Always include prerelease versions in range matching + -c --coerce Coerce a string into SemVer if possible (does not imply --loose) @@ -70,7 +72,7 @@ multiple versions to the utility will just sort them. ## Versions A "version" is described by the `v2.0.0` specification found at -. +. A leading `"="` or `"v"` character is stripped off and ignored. @@ -136,6 +138,13 @@ the user is indicating that they are aware of the risk. However, it is still not appropriate to assume that they have opted into taking a similar risk on the *next* set of prerelease versions. +Note that this behavior can be suppressed (treating all prerelease +versions as if they were normal versions, for the purpose of range +matching) by setting the `includePrerelease` flag on the options +object to any +[functions](https://github.com/npm/node-semver#functions) that do +range matching. + #### Prerelease Identifiers The method `.inc` takes an additional `identifier` string argument that @@ -289,9 +298,19 @@ part ::= nr | [-0-9A-Za-z]+ ## Functions -All methods and classes take a final `loose` boolean argument that, if -true, will be more forgiving about not-quite-valid semver strings. -The resulting output will always be 100% strict, of course. +All methods and classes take a final `options` object argument. All +options in this object are `false` by default. The options supported +are: + +- `loose` Be more forgiving about not-quite-valid semver strings. + (Any resulting output will always be 100% strict compliant, of + course.) For backwards compatibility reasons, if the `options` + argument is a boolean value instead of an object, it is interpreted + to be the `loose` param. +- `includePrerelease` Set to suppress the [default + behavior](https://github.com/npm/node-semver#prerelease-tags) of + excluding prerelease tagged versions from ranges unless they are + explicitly opted into. Strict-mode Comparators and Ranges will be strict about the SemVer strings that they parse. @@ -314,6 +333,8 @@ strings that they parse. * `patch(v)`: Return the patch version number. * `intersects(r1, r2, loose)`: Return true if the two supplied ranges or comparators intersect. +* `parse(v)`: Attempt to parse a string as a semantic version, returning either + a `SemVer` object or `null`. ### Comparison @@ -350,6 +371,8 @@ strings that they parse. that satisfies the range, or `null` if none of them do. * `minSatisfying(versions, range)`: Return the lowest version in the list that satisfies the range, or `null` if none of them do. +* `minVersion(range)`: Return the lowest version that can possibly match + the given range. * `gtr(version, range)`: Return `true` if version is greater than all the versions possible in the range. * `ltr(version, range)`: Return `true` if version is less than all the @@ -375,14 +398,15 @@ range, use the `satisfies(version, range)` function. * `coerce(version)`: Coerces a string to semver if possible -This aims to provide a very forgiving translation of a non-semver -string to semver. It looks for the first digit in a string, and -consumes all remaining characters which satisfy at least a partial semver -(e.g., `1`, `1.2`, `1.2.3`) up to the max permitted length (256 characters). -Longer versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). -All surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes `3.4.0`). -Only text which lacks digits will fail coercion (`version one` is not valid). -The maximum length for any semver component considered for coercion is 16 characters; -longer components will be ignored (`10000000000000000.4.7.4` becomes `4.7.4`). -The maximum value for any semver component is `Integer.MAX_SAFE_INTEGER || (2**53 - 1)`; -higher value components are invalid (`9999999999999999.4.7.4` is likely invalid). +This aims to provide a very forgiving translation of a non-semver string to +semver. It looks for the first digit in a string, and consumes all +remaining characters which satisfy at least a partial semver (e.g., `1`, +`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer +versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All +surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes +`3.4.0`). Only text which lacks digits will fail coercion (`version one` +is not valid). The maximum length for any semver component considered for +coercion is 16 characters; longer components will be ignored +(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any +semver component is `Number.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value +components are invalid (`9999999999999999.4.7.4` is likely invalid). diff --git a/docs/gatsby-browser.js b/docs/gatsby-browser.js new file mode 100644 index 0000000000000..b1425d1fb58cf --- /dev/null +++ b/docs/gatsby-browser.js @@ -0,0 +1,8 @@ +import React from 'react' +import Layout from './src/components/layout' +require('prismjs/themes/prism-tomorrow.css') +require('./src/main.css') + +export const wrapPageElement = ({ element, props }) => { + return {element} +} diff --git a/docs/gatsby-config.js b/docs/gatsby-config.js new file mode 100644 index 0000000000000..f44e1bc4edec4 --- /dev/null +++ b/docs/gatsby-config.js @@ -0,0 +1,96 @@ +const IS_STATIC = process.env.GATSBY_IS_STATIC + +const OPTS = { + siteMetadata: { + title: 'npm cli documentation', + description: 'Documentation for the npm cli.', + author: '@gatsbyjs' + }, + plugins: [ + 'gatsby-plugin-root-import', + 'gatsby-plugin-react-helmet', + 'gatsby-plugin-catch-links', + 'gatsby-plugin-styled-components', + { + resolve: 'gatsby-source-filesystem', + options: { + name: 'src', + path: `${__dirname}/content/` + } + }, + { + resolve: 'gatsby-plugin-no-sourcemaps' + }, + 'gatsby-plugin-sharp', + { + resolve: 'gatsby-plugin-manifest', + options: { + name: 'gatsby-starter-default', + short_name: 'starter', + start_url: '/', + background_color: '#663399', + theme_color: '#663399', + display: 'minimal-ui', + icon: 'src/images/npm-icon.png' // This path is relative to the root of the site. + } + }, + { + resolve: 'gatsby-plugin-prefetch-google-fonts', + options: { + fonts: [ + { + family: 'Poppins', + subsets: ['latin'], + variants: ['300', '400', '500'] + }, + { + family: 'Inconsolata', + subsets: ['latin'], + variants: ['400', '700'] + } + ] + } + }, + { + resolve: 'gatsby-transformer-remark', + options: { + // CommonMark mode (default: true) + commonmark: true, + // Footnotes mode (default: true) + footnotes: true, + // Pedantic mode (default: true) + pedantic: true, + // GitHub Flavored Markdown mode (default: true) + gfm: true, + // Plugins configs + plugins: [{ + resolve: 'gatsby-remark-autolink-headers', + options: { + offsetY: '100', + icon: '', + className: 'header-link-class', + maintainCase: false, + removeAccents: true + } + }, + { + resolve: 'gatsby-remark-prismjs', + options: { + classPrefix: 'language-', + inlineCodeMarker: null, + aliases: {}, + showLineNumbers: false, + noInlineHighlight: false + } + }] + } + } + ] +} + +const STATIC_OPTS = Object.assign({}, OPTS, { + pathPrefix: '__GATSBY_IPFS_PATH_PREFIX__', + plugins: OPTS.plugins.concat(['gatsby-plugin-ipfs']) +}) + +module.exports = IS_STATIC ? STATIC_OPTS : OPTS diff --git a/docs/gatsby-node.js b/docs/gatsby-node.js new file mode 100644 index 0000000000000..01affc6307f45 --- /dev/null +++ b/docs/gatsby-node.js @@ -0,0 +1,43 @@ +const {createFilePath} = require('gatsby-source-filesystem') +const path = require('path') + +exports.onCreateNode = ({node, getNode, actions}) => { + const {createNodeField} = actions + if (node.internal.type === 'MarkdownRemark') { + const slug = createFilePath({node, getNode, basePath: 'content', trailingSlash: false}) + createNodeField({ + node, + name: 'slug', + value: slug + }) + } +} + +exports.createPages = ({graphql, actions}) => { + const {createPage} = actions + return graphql(` + { + allMarkdownRemark { + edges { + node { + id + fields { + slug + } + html + } + } + } + } + `).then(result => { + result.data.allMarkdownRemark.edges.forEach(({node}) => { + createPage({ + path: node.fields.slug, + component: path.resolve('./src/templates/Page.js'), + context: { + slug: node.fields.slug + } + }) + }) + }) +} diff --git a/docs/gatsby-ssr.js b/docs/gatsby-ssr.js new file mode 100644 index 0000000000000..33361c0a38ca7 --- /dev/null +++ b/docs/gatsby-ssr.js @@ -0,0 +1,6 @@ +import React from 'react' +import Layout from './src/components/layout' + +export const wrapPageElement = ({ element, props }) => { + return {element} +} diff --git a/docs/package-lock.json b/docs/package-lock.json new file mode 100644 index 0000000000000..75bda15a6ee8d --- /dev/null +++ b/docs/package-lock.json @@ -0,0 +1,18516 @@ +{ + "name": "npm-cli-docs", + "version": "0.1.0", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "@babel/code-frame": { + "version": "7.5.5", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.5.5.tgz", + "integrity": "sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw==", + "requires": { + "@babel/highlight": "^7.0.0" + } + }, + "@babel/core": { + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.7.7.tgz", + "integrity": "sha512-jlSjuj/7z138NLZALxVgrx13AOtqip42ATZP7+kYl53GvDV6+4dCek1mVUo8z8c8Xnw/mx2q3d9HWh3griuesQ==", + "requires": { + "@babel/code-frame": "^7.5.5", + "@babel/generator": "^7.7.7", + "@babel/helpers": "^7.7.4", + "@babel/parser": "^7.7.7", + "@babel/template": "^7.7.4", + "@babel/traverse": "^7.7.4", + "@babel/types": "^7.7.4", + "convert-source-map": "^1.7.0", + "debug": "^4.1.0", + "json5": "^2.1.0", + "lodash": "^4.17.13", + "resolve": "^1.3.2", + "semver": "^5.4.1", + "source-map": "^0.5.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "@babel/generator": { + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.7.7.tgz", + "integrity": "sha512-/AOIBpHh/JU1l0ZFS4kiRCBnLi6OTHzh0RPk3h9isBxkkqELtQNFi1Vr/tiG9p1yfoUdKVwISuXWQR+hwwM4VQ==", + "requires": { + "@babel/types": "^7.7.4", + "jsesc": "^2.5.1", + "lodash": "^4.17.13", + "source-map": "^0.5.0" + } + }, + "@babel/helper-annotate-as-pure": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.7.4.tgz", + "integrity": "sha512-2BQmQgECKzYKFPpiycoF9tlb5HA4lrVyAmLLVK177EcQAqjVLciUb2/R+n1boQ9y5ENV3uz2ZqiNw7QMBBw1Og==", + "requires": { + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-builder-binary-assignment-operator-visitor": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.7.4.tgz", + "integrity": "sha512-Biq/d/WtvfftWZ9Uf39hbPBYDUo986m5Bb4zhkeYDGUllF43D+nUe5M6Vuo6/8JDK/0YX/uBdeoQpyaNhNugZQ==", + "requires": { + "@babel/helper-explode-assignable-expression": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-builder-react-jsx": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-react-jsx/-/helper-builder-react-jsx-7.7.4.tgz", + "integrity": "sha512-kvbfHJNN9dg4rkEM4xn1s8d1/h6TYNvajy9L1wx4qLn9HFg0IkTsQi4rfBe92nxrPUFcMsHoMV+8rU7MJb3fCA==", + "requires": { + "@babel/types": "^7.7.4", + "esutils": "^2.0.0" + } + }, + "@babel/helper-call-delegate": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-call-delegate/-/helper-call-delegate-7.7.4.tgz", + "integrity": "sha512-8JH9/B7J7tCYJ2PpWVpw9JhPuEVHztagNVuQAFBVFYluRMlpG7F1CgKEgGeL6KFqcsIa92ZYVj6DSc0XwmN1ZA==", + "requires": { + "@babel/helper-hoist-variables": "^7.7.4", + "@babel/traverse": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-create-class-features-plugin": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.7.4.tgz", + "integrity": "sha512-l+OnKACG4uiDHQ/aJT8dwpR+LhCJALxL0mJ6nzjB25e5IPwqV1VOsY7ah6UB1DG+VOXAIMtuC54rFJGiHkxjgA==", + "requires": { + "@babel/helper-function-name": "^7.7.4", + "@babel/helper-member-expression-to-functions": "^7.7.4", + "@babel/helper-optimise-call-expression": "^7.7.4", + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/helper-replace-supers": "^7.7.4", + "@babel/helper-split-export-declaration": "^7.7.4" + } + }, + "@babel/helper-create-regexp-features-plugin": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.7.4.tgz", + "integrity": "sha512-Mt+jBKaxL0zfOIWrfQpnfYCN7/rS6GKx6CCCfuoqVVd+17R8zNDlzVYmIi9qyb2wOk002NsmSTDymkIygDUH7A==", + "requires": { + "@babel/helper-regex": "^7.4.4", + "regexpu-core": "^4.6.0" + } + }, + "@babel/helper-define-map": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-define-map/-/helper-define-map-7.7.4.tgz", + "integrity": "sha512-v5LorqOa0nVQUvAUTUF3KPastvUt/HzByXNamKQ6RdJRTV7j8rLL+WB5C/MzzWAwOomxDhYFb1wLLxHqox86lg==", + "requires": { + "@babel/helper-function-name": "^7.7.4", + "@babel/types": "^7.7.4", + "lodash": "^4.17.13" + } + }, + "@babel/helper-explode-assignable-expression": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.7.4.tgz", + "integrity": "sha512-2/SicuFrNSXsZNBxe5UGdLr+HZg+raWBLE9vC98bdYOKX/U6PY0mdGlYUJdtTDPSU0Lw0PNbKKDpwYHJLn2jLg==", + "requires": { + "@babel/traverse": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-function-name": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.7.4.tgz", + "integrity": "sha512-AnkGIdiBhEuiwdoMnKm7jfPfqItZhgRaZfMg1XX3bS25INOnLPjPG1Ppnajh8eqgt5kPJnfqrRHqFqmjKDZLzQ==", + "requires": { + "@babel/helper-get-function-arity": "^7.7.4", + "@babel/template": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-get-function-arity": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.7.4.tgz", + "integrity": "sha512-QTGKEdCkjgzgfJ3bAyRwF4yyT3pg+vDgan8DSivq1eS0gwi+KGKE5x8kRcbeFTb/673mkO5SN1IZfmCfA5o+EA==", + "requires": { + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-hoist-variables": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.7.4.tgz", + "integrity": "sha512-wQC4xyvc1Jo/FnLirL6CEgPgPCa8M74tOdjWpRhQYapz5JC7u3NYU1zCVoVAGCE3EaIP9T1A3iW0WLJ+reZlpQ==", + "requires": { + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-member-expression-to-functions": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.7.4.tgz", + "integrity": "sha512-9KcA1X2E3OjXl/ykfMMInBK+uVdfIVakVe7W7Lg3wfXUNyS3Q1HWLFRwZIjhqiCGbslummPDnmb7vIekS0C1vw==", + "requires": { + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-module-imports": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.7.4.tgz", + "integrity": "sha512-dGcrX6K9l8258WFjyDLJwuVKxR4XZfU0/vTUgOQYWEnRD8mgr+p4d6fCUMq/ys0h4CCt/S5JhbvtyErjWouAUQ==", + "requires": { + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-module-transforms": { + "version": "7.7.5", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.7.5.tgz", + "integrity": "sha512-A7pSxyJf1gN5qXVcidwLWydjftUN878VkalhXX5iQDuGyiGK3sOrrKKHF4/A4fwHtnsotv/NipwAeLzY4KQPvw==", + "requires": { + "@babel/helper-module-imports": "^7.7.4", + "@babel/helper-simple-access": "^7.7.4", + "@babel/helper-split-export-declaration": "^7.7.4", + "@babel/template": "^7.7.4", + "@babel/types": "^7.7.4", + "lodash": "^4.17.13" + } + }, + "@babel/helper-optimise-call-expression": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.7.4.tgz", + "integrity": "sha512-VB7gWZ2fDkSuqW6b1AKXkJWO5NyNI3bFL/kK79/30moK57blr6NbH8xcl2XcKCwOmJosftWunZqfO84IGq3ZZg==", + "requires": { + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-plugin-utils": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz", + "integrity": "sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA==" + }, + "@babel/helper-regex": { + "version": "7.5.5", + "resolved": "https://registry.npmjs.org/@babel/helper-regex/-/helper-regex-7.5.5.tgz", + "integrity": "sha512-CkCYQLkfkiugbRDO8eZn6lRuR8kzZoGXCg3149iTk5se7g6qykSpy3+hELSwquhu+TgHn8nkLiBwHvNX8Hofcw==", + "requires": { + "lodash": "^4.17.13" + } + }, + "@babel/helper-remap-async-to-generator": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.7.4.tgz", + "integrity": "sha512-Sk4xmtVdM9sA/jCI80f+KS+Md+ZHIpjuqmYPk1M7F/upHou5e4ReYmExAiu6PVe65BhJPZA2CY9x9k4BqE5klw==", + "requires": { + "@babel/helper-annotate-as-pure": "^7.7.4", + "@babel/helper-wrap-function": "^7.7.4", + "@babel/template": "^7.7.4", + "@babel/traverse": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-replace-supers": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.7.4.tgz", + "integrity": "sha512-pP0tfgg9hsZWo5ZboYGuBn/bbYT/hdLPVSS4NMmiRJdwWhP0IznPwN9AE1JwyGsjSPLC364I0Qh5p+EPkGPNpg==", + "requires": { + "@babel/helper-member-expression-to-functions": "^7.7.4", + "@babel/helper-optimise-call-expression": "^7.7.4", + "@babel/traverse": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-simple-access": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.7.4.tgz", + "integrity": "sha512-zK7THeEXfan7UlWsG2A6CI/L9jVnI5+xxKZOdej39Y0YtDYKx9raHk5F2EtK9K8DHRTihYwg20ADt9S36GR78A==", + "requires": { + "@babel/template": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-split-export-declaration": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.7.4.tgz", + "integrity": "sha512-guAg1SXFcVr04Guk9eq0S4/rWS++sbmyqosJzVs8+1fH5NI+ZcmkaSkc7dmtAFbHFva6yRJnjW3yAcGxjueDug==", + "requires": { + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-wrap-function": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.7.4.tgz", + "integrity": "sha512-VsfzZt6wmsocOaVU0OokwrIytHND55yvyT4BPB9AIIgwr8+x7617hetdJTsuGwygN5RC6mxA9EJztTjuwm2ofg==", + "requires": { + "@babel/helper-function-name": "^7.7.4", + "@babel/template": "^7.7.4", + "@babel/traverse": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/helpers": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.7.4.tgz", + "integrity": "sha512-ak5NGZGJ6LV85Q1Zc9gn2n+ayXOizryhjSUBTdu5ih1tlVCJeuQENzc4ItyCVhINVXvIT/ZQ4mheGIsfBkpskg==", + "requires": { + "@babel/template": "^7.7.4", + "@babel/traverse": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/highlight": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.5.0.tgz", + "integrity": "sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ==", + "requires": { + "chalk": "^2.0.0", + "esutils": "^2.0.2", + "js-tokens": "^4.0.0" + } + }, + "@babel/parser": { + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.7.7.tgz", + "integrity": "sha512-WtTZMZAZLbeymhkd/sEaPD8IQyGAhmuTuvTzLiCFM7iXiVdY0gc0IaI+cW0fh1BnSMbJSzXX6/fHllgHKwHhXw==" + }, + "@babel/plugin-proposal-async-generator-functions": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.7.4.tgz", + "integrity": "sha512-1ypyZvGRXriY/QP668+s8sFr2mqinhkRDMPSQLNghCQE+GAkFtp+wkHVvg2+Hdki8gwP+NFzJBJ/N1BfzCCDEw==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/helper-remap-async-to-generator": "^7.7.4", + "@babel/plugin-syntax-async-generators": "^7.7.4" + } + }, + "@babel/plugin-proposal-class-properties": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.7.4.tgz", + "integrity": "sha512-EcuXeV4Hv1X3+Q1TsuOmyyxeTRiSqurGJ26+I/FW1WbymmRRapVORm6x1Zl3iDIHyRxEs+VXWp6qnlcfcJSbbw==", + "requires": { + "@babel/helper-create-class-features-plugin": "^7.7.4", + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-proposal-dynamic-import": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.7.4.tgz", + "integrity": "sha512-StH+nGAdO6qDB1l8sZ5UBV8AC3F2VW2I8Vfld73TMKyptMU9DY5YsJAS8U81+vEtxcH3Y/La0wG0btDrhpnhjQ==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/plugin-syntax-dynamic-import": "^7.7.4" + } + }, + "@babel/plugin-proposal-json-strings": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.7.4.tgz", + "integrity": "sha512-wQvt3akcBTfLU/wYoqm/ws7YOAQKu8EVJEvHip/mzkNtjaclQoCCIqKXFP5/eyfnfbQCDV3OLRIK3mIVyXuZlw==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/plugin-syntax-json-strings": "^7.7.4" + } + }, + "@babel/plugin-proposal-nullish-coalescing-operator": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.7.4.tgz", + "integrity": "sha512-TbYHmr1Gl1UC7Vo2HVuj/Naci5BEGNZ0AJhzqD2Vpr6QPFWpUmBRLrIDjedzx7/CShq0bRDS2gI4FIs77VHLVQ==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.7.4" + } + }, + "@babel/plugin-proposal-object-rest-spread": { + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.7.7.tgz", + "integrity": "sha512-3qp9I8lelgzNedI3hrhkvhaEYree6+WHnyA/q4Dza9z7iEIs1eyhWyJnetk3jJ69RT0AT4G0UhEGwyGFJ7GUuQ==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/plugin-syntax-object-rest-spread": "^7.7.4" + } + }, + "@babel/plugin-proposal-optional-catch-binding": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.7.4.tgz", + "integrity": "sha512-DyM7U2bnsQerCQ+sejcTNZh8KQEUuC3ufzdnVnSiUv/qoGJp2Z3hanKL18KDhsBT5Wj6a7CMT5mdyCNJsEaA9w==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/plugin-syntax-optional-catch-binding": "^7.7.4" + } + }, + "@babel/plugin-proposal-optional-chaining": { + "version": "7.7.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.7.5.tgz", + "integrity": "sha512-sOwFqT8JSchtJeDD+CjmWCaiFoLxY4Ps7NjvwHC/U7l4e9i5pTRNt8nDMIFSOUL+ncFbYSwruHM8WknYItWdXw==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/plugin-syntax-optional-chaining": "^7.7.4" + } + }, + "@babel/plugin-proposal-unicode-property-regex": { + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.7.7.tgz", + "integrity": "sha512-80PbkKyORBUVm1fbTLrHpYdJxMThzM1UqFGh0ALEhO9TYbG86Ah9zQYAB/84axz2vcxefDLdZwWwZNlYARlu9w==", + "requires": { + "@babel/helper-create-regexp-features-plugin": "^7.7.4", + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-syntax-async-generators": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.7.4.tgz", + "integrity": "sha512-Li4+EjSpBgxcsmeEF8IFcfV/+yJGxHXDirDkEoyFjumuwbmfCVHUt0HuowD/iGM7OhIRyXJH9YXxqiH6N815+g==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-syntax-dynamic-import": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.7.4.tgz", + "integrity": "sha512-jHQW0vbRGvwQNgyVxwDh4yuXu4bH1f5/EICJLAhl1SblLs2CDhrsmCk+v5XLdE9wxtAFRyxx+P//Iw+a5L/tTg==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-syntax-json-strings": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.7.4.tgz", + "integrity": "sha512-QpGupahTQW1mHRXddMG5srgpHWqRLwJnJZKXTigB9RPFCCGbDGCgBeM/iC82ICXp414WeYx/tD54w7M2qRqTMg==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-syntax-jsx": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.7.4.tgz", + "integrity": "sha512-wuy6fiMe9y7HeZBWXYCGt2RGxZOj0BImZ9EyXJVnVGBKO/Br592rbR3rtIQn0eQhAk9vqaKP5n8tVqEFBQMfLg==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.7.4.tgz", + "integrity": "sha512-XKh/yIRPiQTOeBg0QJjEus5qiSKucKAiApNtO1psqG7D17xmE+X2i5ZqBEuSvo0HRuyPaKaSN/Gy+Ha9KFQolw==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-syntax-object-rest-spread": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.7.4.tgz", + "integrity": "sha512-mObR+r+KZq0XhRVS2BrBKBpr5jqrqzlPvS9C9vuOf5ilSwzloAl7RPWLrgKdWS6IreaVrjHxTjtyqFiOisaCwg==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-syntax-optional-catch-binding": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.7.4.tgz", + "integrity": "sha512-4ZSuzWgFxqHRE31Glu+fEr/MirNZOMYmD/0BhBWyLyOOQz/gTAl7QmWm2hX1QxEIXsr2vkdlwxIzTyiYRC4xcQ==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-syntax-optional-chaining": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.7.4.tgz", + "integrity": "sha512-2MqYD5WjZSbJdUagnJvIdSfkb/ucOC9/1fRJxm7GAxY6YQLWlUvkfxoNbUPcPLHJyetKUDQ4+yyuUyAoc0HriA==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-syntax-top-level-await": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.7.4.tgz", + "integrity": "sha512-wdsOw0MvkL1UIgiQ/IFr3ETcfv1xb8RMM0H9wbiDyLaJFyiDg5oZvDLCXosIXmFeIlweML5iOBXAkqddkYNizg==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-arrow-functions": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.7.4.tgz", + "integrity": "sha512-zUXy3e8jBNPiffmqkHRNDdZM2r8DWhCB7HhcoyZjiK1TxYEluLHAvQuYnTT+ARqRpabWqy/NHkO6e3MsYB5YfA==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-async-to-generator": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.7.4.tgz", + "integrity": "sha512-zpUTZphp5nHokuy8yLlyafxCJ0rSlFoSHypTUWgpdwoDXWQcseaect7cJ8Ppk6nunOM6+5rPMkod4OYKPR5MUg==", + "requires": { + "@babel/helper-module-imports": "^7.7.4", + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/helper-remap-async-to-generator": "^7.7.4" + } + }, + "@babel/plugin-transform-block-scoped-functions": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.7.4.tgz", + "integrity": "sha512-kqtQzwtKcpPclHYjLK//3lH8OFsCDuDJBaFhVwf8kqdnF6MN4l618UDlcA7TfRs3FayrHj+svYnSX8MC9zmUyQ==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-block-scoping": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.7.4.tgz", + "integrity": "sha512-2VBe9u0G+fDt9B5OV5DQH4KBf5DoiNkwFKOz0TCvBWvdAN2rOykCTkrL+jTLxfCAm76l9Qo5OqL7HBOx2dWggg==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0", + "lodash": "^4.17.13" + } + }, + "@babel/plugin-transform-classes": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.7.4.tgz", + "integrity": "sha512-sK1mjWat7K+buWRuImEzjNf68qrKcrddtpQo3swi9j7dUcG6y6R6+Di039QN2bD1dykeswlagupEmpOatFHHUg==", + "requires": { + "@babel/helper-annotate-as-pure": "^7.7.4", + "@babel/helper-define-map": "^7.7.4", + "@babel/helper-function-name": "^7.7.4", + "@babel/helper-optimise-call-expression": "^7.7.4", + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/helper-replace-supers": "^7.7.4", + "@babel/helper-split-export-declaration": "^7.7.4", + "globals": "^11.1.0" + }, + "dependencies": { + "globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==" + } + } + }, + "@babel/plugin-transform-computed-properties": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.7.4.tgz", + "integrity": "sha512-bSNsOsZnlpLLyQew35rl4Fma3yKWqK3ImWMSC/Nc+6nGjC9s5NFWAer1YQ899/6s9HxO2zQC1WoFNfkOqRkqRQ==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-destructuring": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.7.4.tgz", + "integrity": "sha512-4jFMXI1Cu2aXbcXXl8Lr6YubCn6Oc7k9lLsu8v61TZh+1jny2BWmdtvY9zSUlLdGUvcy9DMAWyZEOqjsbeg/wA==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-dotall-regex": { + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.7.7.tgz", + "integrity": "sha512-b4in+YlTeE/QmTgrllnb3bHA0HntYvjz8O3Mcbx75UBPJA2xhb5A8nle498VhxSXJHQefjtQxpnLPehDJ4TRlg==", + "requires": { + "@babel/helper-create-regexp-features-plugin": "^7.7.4", + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-duplicate-keys": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.7.4.tgz", + "integrity": "sha512-g1y4/G6xGWMD85Tlft5XedGaZBCIVN+/P0bs6eabmcPP9egFleMAo65OOjlhcz1njpwagyY3t0nsQC9oTFegJA==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-exponentiation-operator": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.7.4.tgz", + "integrity": "sha512-MCqiLfCKm6KEA1dglf6Uqq1ElDIZwFuzz1WH5mTf8k2uQSxEJMbOIEh7IZv7uichr7PMfi5YVSrr1vz+ipp7AQ==", + "requires": { + "@babel/helper-builder-binary-assignment-operator-visitor": "^7.7.4", + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-for-of": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.7.4.tgz", + "integrity": "sha512-zZ1fD1B8keYtEcKF+M1TROfeHTKnijcVQm0yO/Yu1f7qoDoxEIc/+GX6Go430Bg84eM/xwPFp0+h4EbZg7epAA==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-function-name": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.7.4.tgz", + "integrity": "sha512-E/x09TvjHNhsULs2IusN+aJNRV5zKwxu1cpirZyRPw+FyyIKEHPXTsadj48bVpc1R5Qq1B5ZkzumuFLytnbT6g==", + "requires": { + "@babel/helper-function-name": "^7.7.4", + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-literals": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.7.4.tgz", + "integrity": "sha512-X2MSV7LfJFm4aZfxd0yLVFrEXAgPqYoDG53Br/tCKiKYfX0MjVjQeWPIhPHHsCqzwQANq+FLN786fF5rgLS+gw==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-member-expression-literals": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.7.4.tgz", + "integrity": "sha512-9VMwMO7i69LHTesL0RdGy93JU6a+qOPuvB4F4d0kR0zyVjJRVJRaoaGjhtki6SzQUu8yen/vxPKN6CWnCUw6bA==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-modules-amd": { + "version": "7.7.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.7.5.tgz", + "integrity": "sha512-CT57FG4A2ZUNU1v+HdvDSDrjNWBrtCmSH6YbbgN3Lrf0Di/q/lWRxZrE72p3+HCCz9UjfZOEBdphgC0nzOS6DQ==", + "requires": { + "@babel/helper-module-transforms": "^7.7.5", + "@babel/helper-plugin-utils": "^7.0.0", + "babel-plugin-dynamic-import-node": "^2.3.0" + } + }, + "@babel/plugin-transform-modules-commonjs": { + "version": "7.7.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.7.5.tgz", + "integrity": "sha512-9Cq4zTFExwFhQI6MT1aFxgqhIsMWQWDVwOgLzl7PTWJHsNaqFvklAU+Oz6AQLAS0dJKTwZSOCo20INwktxpi3Q==", + "requires": { + "@babel/helper-module-transforms": "^7.7.5", + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/helper-simple-access": "^7.7.4", + "babel-plugin-dynamic-import-node": "^2.3.0" + } + }, + "@babel/plugin-transform-modules-systemjs": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.7.4.tgz", + "integrity": "sha512-y2c96hmcsUi6LrMqvmNDPBBiGCiQu0aYqpHatVVu6kD4mFEXKjyNxd/drc18XXAf9dv7UXjrZwBVmTTGaGP8iw==", + "requires": { + "@babel/helper-hoist-variables": "^7.7.4", + "@babel/helper-plugin-utils": "^7.0.0", + "babel-plugin-dynamic-import-node": "^2.3.0" + } + }, + "@babel/plugin-transform-modules-umd": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.7.4.tgz", + "integrity": "sha512-u2B8TIi0qZI4j8q4C51ktfO7E3cQ0qnaXFI1/OXITordD40tt17g/sXqgNNCcMTcBFKrUPcGDx+TBJuZxLx7tw==", + "requires": { + "@babel/helper-module-transforms": "^7.7.4", + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-named-capturing-groups-regex": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.7.4.tgz", + "integrity": "sha512-jBUkiqLKvUWpv9GLSuHUFYdmHg0ujC1JEYoZUfeOOfNydZXp1sXObgyPatpcwjWgsdBGsagWW0cdJpX/DO2jMw==", + "requires": { + "@babel/helper-create-regexp-features-plugin": "^7.7.4" + } + }, + "@babel/plugin-transform-new-target": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.7.4.tgz", + "integrity": "sha512-CnPRiNtOG1vRodnsyGX37bHQleHE14B9dnnlgSeEs3ek3fHN1A1SScglTCg1sfbe7sRQ2BUcpgpTpWSfMKz3gg==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-object-super": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.7.4.tgz", + "integrity": "sha512-ho+dAEhC2aRnff2JCA0SAK7V2R62zJd/7dmtoe7MHcso4C2mS+vZjn1Pb1pCVZvJs1mgsvv5+7sT+m3Bysb6eg==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/helper-replace-supers": "^7.7.4" + } + }, + "@babel/plugin-transform-parameters": { + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.7.7.tgz", + "integrity": "sha512-OhGSrf9ZBrr1fw84oFXj5hgi8Nmg+E2w5L7NhnG0lPvpDtqd7dbyilM2/vR8CKbJ907RyxPh2kj6sBCSSfI9Ew==", + "requires": { + "@babel/helper-call-delegate": "^7.7.4", + "@babel/helper-get-function-arity": "^7.7.4", + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-property-literals": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.7.4.tgz", + "integrity": "sha512-MatJhlC4iHsIskWYyawl53KuHrt+kALSADLQQ/HkhTjX954fkxIEh4q5slL4oRAnsm/eDoZ4q0CIZpcqBuxhJQ==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-react-display-name": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.7.4.tgz", + "integrity": "sha512-sBbIvqYkthai0X0vkD2xsAwluBp+LtNHH+/V4a5ydifmTtb8KOVOlrMIk/MYmIc4uTYDnjZUHQildYNo36SRJw==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-react-jsx": { + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.7.7.tgz", + "integrity": "sha512-SlPjWPbva2+7/ZJbGcoqjl4LsQaLpKEzxW9hcxU7675s24JmdotJOSJ4cgAbV82W3FcZpHIGmRZIlUL8ayMvjw==", + "requires": { + "@babel/helper-builder-react-jsx": "^7.7.4", + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/plugin-syntax-jsx": "^7.7.4" + } + }, + "@babel/plugin-transform-react-jsx-self": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.7.4.tgz", + "integrity": "sha512-PWYjSfqrO273mc1pKCRTIJXyqfc9vWYBax88yIhQb+bpw3XChVC7VWS4VwRVs63wFHKxizvGSd00XEr+YB9Q2A==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/plugin-syntax-jsx": "^7.7.4" + } + }, + "@babel/plugin-transform-react-jsx-source": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.7.4.tgz", + "integrity": "sha512-5ZU9FnPhqtHsOXxutRtXZAzoEJwDaP32QcobbMP1/qt7NYcsCNK8XgzJcJfoEr/ZnzVvUNInNjIW22Z6I8p9mg==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/plugin-syntax-jsx": "^7.7.4" + } + }, + "@babel/plugin-transform-regenerator": { + "version": "7.7.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.7.5.tgz", + "integrity": "sha512-/8I8tPvX2FkuEyWbjRCt4qTAgZK0DVy8QRguhA524UH48RfGJy94On2ri+dCuwOpcerPRl9O4ebQkRcVzIaGBw==", + "requires": { + "regenerator-transform": "^0.14.0" + } + }, + "@babel/plugin-transform-reserved-words": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.7.4.tgz", + "integrity": "sha512-OrPiUB5s5XvkCO1lS7D8ZtHcswIC57j62acAnJZKqGGnHP+TIc/ljQSrgdX/QyOTdEK5COAhuc820Hi1q2UgLQ==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-runtime": { + "version": "7.7.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.7.6.tgz", + "integrity": "sha512-tajQY+YmXR7JjTwRvwL4HePqoL3DYxpYXIHKVvrOIvJmeHe2y1w4tz5qz9ObUDC9m76rCzIMPyn4eERuwA4a4A==", + "requires": { + "@babel/helper-module-imports": "^7.7.4", + "@babel/helper-plugin-utils": "^7.0.0", + "resolve": "^1.8.1", + "semver": "^5.5.1" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "@babel/plugin-transform-shorthand-properties": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.7.4.tgz", + "integrity": "sha512-q+suddWRfIcnyG5YiDP58sT65AJDZSUhXQDZE3r04AuqD6d/XLaQPPXSBzP2zGerkgBivqtQm9XKGLuHqBID6Q==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-spread": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.7.4.tgz", + "integrity": "sha512-8OSs0FLe5/80cndziPlg4R0K6HcWSM0zyNhHhLsmw/Nc5MaA49cAsnoJ/t/YZf8qkG7fD+UjTRaApVDB526d7Q==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-sticky-regex": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.7.4.tgz", + "integrity": "sha512-Ls2NASyL6qtVe1H1hXts9yuEeONV2TJZmplLONkMPUG158CtmnrzW5Q5teibM5UVOFjG0D3IC5mzXR6pPpUY7A==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/helper-regex": "^7.0.0" + } + }, + "@babel/plugin-transform-template-literals": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.7.4.tgz", + "integrity": "sha512-sA+KxLwF3QwGj5abMHkHgshp9+rRz+oY9uoRil4CyLtgEuE/88dpkeWgNk5qKVsJE9iSfly3nvHapdRiIS2wnQ==", + "requires": { + "@babel/helper-annotate-as-pure": "^7.7.4", + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-typeof-symbol": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.7.4.tgz", + "integrity": "sha512-KQPUQ/7mqe2m0B8VecdyaW5XcQYaePyl9R7IsKd+irzj6jvbhoGnRE+M0aNkyAzI07VfUQ9266L5xMARitV3wg==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-unicode-regex": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.7.4.tgz", + "integrity": "sha512-N77UUIV+WCvE+5yHw+oks3m18/umd7y392Zv7mYTpFqHtkpcc+QUz+gLJNTWVlWROIWeLqY0f3OjZxV5TcXnRw==", + "requires": { + "@babel/helper-create-regexp-features-plugin": "^7.7.4", + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/polyfill": { + "version": "7.7.0", + "resolved": "https://registry.npmjs.org/@babel/polyfill/-/polyfill-7.7.0.tgz", + "integrity": "sha512-/TS23MVvo34dFmf8mwCisCbWGrfhbiWZSwBo6HkADTBhUa2Q/jWltyY/tpofz/b6/RIhqaqQcquptCirqIhOaQ==", + "requires": { + "core-js": "^2.6.5", + "regenerator-runtime": "^0.13.2" + } + }, + "@babel/preset-env": { + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.7.7.tgz", + "integrity": "sha512-pCu0hrSSDVI7kCVUOdcMNQEbOPJ52E+LrQ14sN8uL2ALfSqePZQlKrOy+tM4uhEdYlCHi4imr8Zz2cZe9oSdIg==", + "requires": { + "@babel/helper-module-imports": "^7.7.4", + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/plugin-proposal-async-generator-functions": "^7.7.4", + "@babel/plugin-proposal-dynamic-import": "^7.7.4", + "@babel/plugin-proposal-json-strings": "^7.7.4", + "@babel/plugin-proposal-object-rest-spread": "^7.7.7", + "@babel/plugin-proposal-optional-catch-binding": "^7.7.4", + "@babel/plugin-proposal-unicode-property-regex": "^7.7.7", + "@babel/plugin-syntax-async-generators": "^7.7.4", + "@babel/plugin-syntax-dynamic-import": "^7.7.4", + "@babel/plugin-syntax-json-strings": "^7.7.4", + "@babel/plugin-syntax-object-rest-spread": "^7.7.4", + "@babel/plugin-syntax-optional-catch-binding": "^7.7.4", + "@babel/plugin-syntax-top-level-await": "^7.7.4", + "@babel/plugin-transform-arrow-functions": "^7.7.4", + "@babel/plugin-transform-async-to-generator": "^7.7.4", + "@babel/plugin-transform-block-scoped-functions": "^7.7.4", + "@babel/plugin-transform-block-scoping": "^7.7.4", + "@babel/plugin-transform-classes": "^7.7.4", + "@babel/plugin-transform-computed-properties": "^7.7.4", + "@babel/plugin-transform-destructuring": "^7.7.4", + "@babel/plugin-transform-dotall-regex": "^7.7.7", + "@babel/plugin-transform-duplicate-keys": "^7.7.4", + "@babel/plugin-transform-exponentiation-operator": "^7.7.4", + "@babel/plugin-transform-for-of": "^7.7.4", + "@babel/plugin-transform-function-name": "^7.7.4", + "@babel/plugin-transform-literals": "^7.7.4", + "@babel/plugin-transform-member-expression-literals": "^7.7.4", + "@babel/plugin-transform-modules-amd": "^7.7.5", + "@babel/plugin-transform-modules-commonjs": "^7.7.5", + "@babel/plugin-transform-modules-systemjs": "^7.7.4", + "@babel/plugin-transform-modules-umd": "^7.7.4", + "@babel/plugin-transform-named-capturing-groups-regex": "^7.7.4", + "@babel/plugin-transform-new-target": "^7.7.4", + "@babel/plugin-transform-object-super": "^7.7.4", + "@babel/plugin-transform-parameters": "^7.7.7", + "@babel/plugin-transform-property-literals": "^7.7.4", + "@babel/plugin-transform-regenerator": "^7.7.5", + "@babel/plugin-transform-reserved-words": "^7.7.4", + "@babel/plugin-transform-shorthand-properties": "^7.7.4", + "@babel/plugin-transform-spread": "^7.7.4", + "@babel/plugin-transform-sticky-regex": "^7.7.4", + "@babel/plugin-transform-template-literals": "^7.7.4", + "@babel/plugin-transform-typeof-symbol": "^7.7.4", + "@babel/plugin-transform-unicode-regex": "^7.7.4", + "@babel/types": "^7.7.4", + "browserslist": "^4.6.0", + "core-js-compat": "^3.6.0", + "invariant": "^2.2.2", + "js-levenshtein": "^1.1.3", + "semver": "^5.5.0" + }, + "dependencies": { + "browserslist": { + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.8.3.tgz", + "integrity": "sha512-iU43cMMknxG1ClEZ2MDKeonKE1CCrFVkQK2AqO2YWFmvIrx4JWrvQ4w4hQez6EpVI8rHTtqh/ruHHDHSOKxvUg==", + "requires": { + "caniuse-lite": "^1.0.30001017", + "electron-to-chromium": "^1.3.322", + "node-releases": "^1.1.44" + } + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "@babel/preset-react": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.7.4.tgz", + "integrity": "sha512-j+vZtg0/8pQr1H8wKoaJyGL2IEk3rG/GIvua7Sec7meXVIvGycihlGMx5xcU00kqCJbwzHs18xTu3YfREOqQ+g==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/plugin-transform-react-display-name": "^7.7.4", + "@babel/plugin-transform-react-jsx": "^7.7.4", + "@babel/plugin-transform-react-jsx-self": "^7.7.4", + "@babel/plugin-transform-react-jsx-source": "^7.7.4" + } + }, + "@babel/runtime": { + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.7.7.tgz", + "integrity": "sha512-uCnC2JEVAu8AKB5do1WRIsvrdJ0flYx/A/9f/6chdacnEZ7LmavjdsDXr5ksYBegxtuTPR5Va9/+13QF/kFkCA==", + "requires": { + "regenerator-runtime": "^0.13.2" + } + }, + "@babel/runtime-corejs3": { + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.7.7.tgz", + "integrity": "sha512-kr3W3Fw8mB/CTru2M5zIRQZZgC/9zOxNSoJ/tVCzjPt3H1/p5uuGbz6WwmaQy/TLQcW31rUhUUWKY28sXFRelA==", + "requires": { + "core-js-pure": "^3.0.0", + "regenerator-runtime": "^0.13.2" + } + }, + "@babel/template": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.7.4.tgz", + "integrity": "sha512-qUzihgVPguAzXCK7WXw8pqs6cEwi54s3E+HrejlkuWO6ivMKx9hZl3Y2fSXp9i5HgyWmj7RKP+ulaYnKM4yYxw==", + "requires": { + "@babel/code-frame": "^7.0.0", + "@babel/parser": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/traverse": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.7.4.tgz", + "integrity": "sha512-P1L58hQyupn8+ezVA2z5KBm4/Zr4lCC8dwKCMYzsa5jFMDMQAzaBNy9W5VjB+KAmBjb40U7a/H6ao+Xo+9saIw==", + "requires": { + "@babel/code-frame": "^7.5.5", + "@babel/generator": "^7.7.4", + "@babel/helper-function-name": "^7.7.4", + "@babel/helper-split-export-declaration": "^7.7.4", + "@babel/parser": "^7.7.4", + "@babel/types": "^7.7.4", + "debug": "^4.1.0", + "globals": "^11.1.0", + "lodash": "^4.17.13" + }, + "dependencies": { + "globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==" + } + } + }, + "@babel/types": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.7.4.tgz", + "integrity": "sha512-cz5Ji23KCi4T+YIE/BolWosrJuSmoZeN1EFnRtBwF+KKLi8GG/Z2c2hOJJeCXPk4mwk4QFvTmwIodJowXgttRA==", + "requires": { + "esutils": "^2.0.2", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + }, + "@emotion/cache": { + "version": "10.0.27", + "resolved": "https://registry.npmjs.org/@emotion/cache/-/cache-10.0.27.tgz", + "integrity": "sha512-Zp8BEpbMunFsTcqAK4D7YTm3MvCp1SekflSLJH8lze2fCcSZ/yMkXHo8kb3t1/1Tdd3hAqf3Fb7z9VZ+FMiC9w==", + "requires": { + "@emotion/sheet": "0.9.4", + "@emotion/stylis": "0.8.5", + "@emotion/utils": "0.11.3", + "@emotion/weak-memoize": "0.2.5" + } + }, + "@emotion/core": { + "version": "10.0.27", + "resolved": "https://registry.npmjs.org/@emotion/core/-/core-10.0.27.tgz", + "integrity": "sha512-XbD5R36pVbohQMnKfajHv43g8EbN4NHdF6Zh9zg/C0nr0jqwOw3gYnC07Xj3yG43OYSRyrGsoQ5qPwc8ycvLZw==", + "requires": { + "@babel/runtime": "^7.5.5", + "@emotion/cache": "^10.0.27", + "@emotion/css": "^10.0.27", + "@emotion/serialize": "^0.11.15", + "@emotion/sheet": "0.9.4", + "@emotion/utils": "0.11.3" + } + }, + "@emotion/css": { + "version": "10.0.27", + "resolved": "https://registry.npmjs.org/@emotion/css/-/css-10.0.27.tgz", + "integrity": "sha512-6wZjsvYeBhyZQYNrGoR5yPMYbMBNEnanDrqmsqS1mzDm1cOTu12shvl2j4QHNS36UaTE0USIJawCH9C8oW34Zw==", + "requires": { + "@emotion/serialize": "^0.11.15", + "@emotion/utils": "0.11.3", + "babel-plugin-emotion": "^10.0.27" + } + }, + "@emotion/hash": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/@emotion/hash/-/hash-0.7.4.tgz", + "integrity": "sha512-fxfMSBMX3tlIbKUdtGKxqB1fyrH6gVrX39Gsv3y8lRYKUqlgDt3UMqQyGnR1bQMa2B8aGnhLZokZgg8vT0Le+A==" + }, + "@emotion/is-prop-valid": { + "version": "0.8.6", + "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.6.tgz", + "integrity": "sha512-mnZMho3Sq8BfzkYYRVc8ilQTnc8U02Ytp6J1AwM6taQStZ3AhsEJBX2LzhA/LJirNCwM2VtHL3VFIZ+sNJUgUQ==", + "requires": { + "@emotion/memoize": "0.7.4" + } + }, + "@emotion/memoize": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.4.tgz", + "integrity": "sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw==" + }, + "@emotion/serialize": { + "version": "0.11.15", + "resolved": "https://registry.npmjs.org/@emotion/serialize/-/serialize-0.11.15.tgz", + "integrity": "sha512-YE+qnrmGwyR+XB5j7Bi+0GT1JWsdcjM/d4POu+TXkcnrRs4RFCCsi3d/Ebf+wSStHqAlTT2+dfd+b9N9EO2KBg==", + "requires": { + "@emotion/hash": "0.7.4", + "@emotion/memoize": "0.7.4", + "@emotion/unitless": "0.7.5", + "@emotion/utils": "0.11.3", + "csstype": "^2.5.7" + } + }, + "@emotion/sheet": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/@emotion/sheet/-/sheet-0.9.4.tgz", + "integrity": "sha512-zM9PFmgVSqBw4zL101Q0HrBVTGmpAxFZH/pYx/cjJT5advXguvcgjHFTCaIO3enL/xr89vK2bh0Mfyj9aa0ANA==" + }, + "@emotion/styled": { + "version": "10.0.27", + "resolved": "https://registry.npmjs.org/@emotion/styled/-/styled-10.0.27.tgz", + "integrity": "sha512-iK/8Sh7+NLJzyp9a5+vIQIXTYxfT4yB/OJbjzQanB2RZpvmzBQOHZWhpAMZWYEKRNNbsD6WfBw5sVWkb6WzS/Q==", + "requires": { + "@emotion/styled-base": "^10.0.27", + "babel-plugin-emotion": "^10.0.27" + } + }, + "@emotion/styled-base": { + "version": "10.0.27", + "resolved": "https://registry.npmjs.org/@emotion/styled-base/-/styled-base-10.0.27.tgz", + "integrity": "sha512-ufHM/HhE3nr309hJG9jxuFt71r6aHn7p+bwXduFxcwPFEfBIqvmZUMtZ9YxIsY61PVwK3bp4G1XhaCzy9smVvw==", + "requires": { + "@babel/runtime": "^7.5.5", + "@emotion/is-prop-valid": "0.8.6", + "@emotion/serialize": "^0.11.15", + "@emotion/utils": "0.11.3" + } + }, + "@emotion/stylis": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/@emotion/stylis/-/stylis-0.8.5.tgz", + "integrity": "sha512-h6KtPihKFn3T9fuIrwvXXUOwlx3rfUvfZIcP5a6rh8Y7zjE3O06hT5Ss4S/YI1AYhuZ1kjaE/5EaOOI2NqSylQ==" + }, + "@emotion/unitless": { + "version": "0.7.5", + "resolved": "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.7.5.tgz", + "integrity": "sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg==" + }, + "@emotion/utils": { + "version": "0.11.3", + "resolved": "https://registry.npmjs.org/@emotion/utils/-/utils-0.11.3.tgz", + "integrity": "sha512-0o4l6pZC+hI88+bzuaX/6BgOvQVhbt2PfmxauVaYOGgbsAw14wdKyvMCZXnsnsHys94iadcF+RG/wZyx6+ZZBw==" + }, + "@emotion/weak-memoize": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/@emotion/weak-memoize/-/weak-memoize-0.2.5.tgz", + "integrity": "sha512-6U71C2Wp7r5XtFtQzYrW5iKFT67OixrSxjI4MptCHzdSVlgabczzqLe0ZSgnub/5Kp4hSbpDB1tMytZY9pwxxA==" + }, + "@hapi/address": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@hapi/address/-/address-2.1.4.tgz", + "integrity": "sha512-QD1PhQk+s31P1ixsX0H0Suoupp3VMXzIVMSwobR3F3MSUO2YCV0B7xqLcUw/Bh8yuvd3LhpyqLQWTNcRmp6IdQ==" + }, + "@hapi/bourne": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@hapi/bourne/-/bourne-1.3.2.tgz", + "integrity": "sha512-1dVNHT76Uu5N3eJNTYcvxee+jzX4Z9lfciqRRHCU27ihbUcYi+iSc2iml5Ke1LXe1SyJCLA0+14Jh4tXJgOppA==" + }, + "@hapi/hoek": { + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-8.5.0.tgz", + "integrity": "sha512-7XYT10CZfPsH7j9F1Jmg1+d0ezOux2oM2GfArAzLwWe4mE2Dr3hVjsAL6+TFY49RRJlCdJDMw3nJsLFroTc8Kw==" + }, + "@hapi/joi": { + "version": "15.1.1", + "resolved": "https://registry.npmjs.org/@hapi/joi/-/joi-15.1.1.tgz", + "integrity": "sha512-entf8ZMOK8sc+8YfeOlM8pCfg3b5+WZIKBfUaaJT8UsjAAPjartzxIYm3TIbjvA4u+u++KbcXD38k682nVHDAQ==", + "requires": { + "@hapi/address": "2.x.x", + "@hapi/bourne": "1.x.x", + "@hapi/hoek": "8.x.x", + "@hapi/topo": "3.x.x" + } + }, + "@hapi/topo": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-3.1.6.tgz", + "integrity": "sha512-tAag0jEcjwH+P2quUfipd7liWCNX2F8NvYjQp2wtInsZxnMlypdw0FtAOLxtvvkO+GSRRbmNi8m/5y42PQJYCQ==", + "requires": { + "@hapi/hoek": "^8.3.0" + } + }, + "@jimp/bmp": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/bmp/-/bmp-0.6.8.tgz", + "integrity": "sha512-uxVgSkI62uAzk5ZazYHEHBehow590WAkLKmDXLzkr/XP/Hv2Fx1T4DKwJ/15IY5ktq5VAhAUWGXTyd8KWFsx7w==", + "requires": { + "@jimp/utils": "^0.6.8", + "bmp-js": "^0.1.0", + "core-js": "^2.5.7" + } + }, + "@jimp/core": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/core/-/core-0.6.8.tgz", + "integrity": "sha512-JOFqBBcSNiDiMZJFr6OJqC6viXj5NVBQISua0eacoYvo4YJtTajOIxC4MqWyUmGrDpRMZBR8QhSsIOwsFrdROA==", + "requires": { + "@jimp/utils": "^0.6.8", + "any-base": "^1.1.0", + "buffer": "^5.2.0", + "core-js": "^2.5.7", + "exif-parser": "^0.1.12", + "file-type": "^9.0.0", + "load-bmfont": "^1.3.1", + "mkdirp": "0.5.1", + "phin": "^2.9.1", + "pixelmatch": "^4.0.2", + "tinycolor2": "^1.4.1" + }, + "dependencies": { + "buffer": { + "version": "5.4.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.4.3.tgz", + "integrity": "sha512-zvj65TkFeIt3i6aj5bIvJDzjjQQGs4o/sNoezg1F1kYap9Nu2jcUdpwzRSJTHMMzG0H7bZkn4rNQpImhuxWX2A==", + "requires": { + "base64-js": "^1.0.2", + "ieee754": "^1.1.4" + } + }, + "file-type": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-9.0.0.tgz", + "integrity": "sha512-Qe/5NJrgIOlwijpq3B7BEpzPFcgzggOTagZmkXQY4LA6bsXKTUstK7Wp12lEJ/mLKTpvIZxmIuRcLYWT6ov9lw==" + } + } + }, + "@jimp/custom": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/custom/-/custom-0.6.8.tgz", + "integrity": "sha512-FrYlzZRVXP2vuVwd7Nc2dlK+iZk4g6IaT1Ib8Z6vU5Kkwlt83FJIPJ2UUFABf3bF5big0wkk8ZUihWxE4Nzdng==", + "requires": { + "@jimp/core": "^0.6.8", + "core-js": "^2.5.7" + } + }, + "@jimp/gif": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/gif/-/gif-0.6.8.tgz", + "integrity": "sha512-yyOlujjQcgz9zkjM5ihZDEppn9d1brJ7jQHP5rAKmqep0G7FU1D0AKcV+Ql18RhuI/CgWs10wAVcrQpmLnu4Yw==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7", + "omggif": "^1.0.9" + } + }, + "@jimp/jpeg": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/jpeg/-/jpeg-0.6.8.tgz", + "integrity": "sha512-rGtXbYpFXAn471qLpTGvhbBMNHJo5KiufN+vC5AWyufntmkt5f0Ox2Cx4ijuBMDtirZchxbMLtrfGjznS4L/ew==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7", + "jpeg-js": "^0.3.4" + } + }, + "@jimp/plugin-blit": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugin-blit/-/plugin-blit-0.6.8.tgz", + "integrity": "sha512-7Tl6YpKTSpvwQbnGNhsfX2zyl3jRVVopd276Y2hF2zpDz9Bycow7NdfNU/4Nx1jaf96X6uWOtSVINcQ7rGd47w==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7" + } + }, + "@jimp/plugin-blur": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugin-blur/-/plugin-blur-0.6.8.tgz", + "integrity": "sha512-NpZCMKxXHLDQsX9zPlWtpMA660DQStY6/z8ZetyxCDbqrLe9YCXpeR4MNhdJdABIiwTm1W5FyFF4kp81PHJx3Q==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7" + } + }, + "@jimp/plugin-color": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugin-color/-/plugin-color-0.6.8.tgz", + "integrity": "sha512-jjFyU0zNmGOH2rjzHuOMU4kaia0oo82s/7UYfn5h7OUkmUZTd6Do3ZSK1PiXA7KR+s4B76/Omm6Doh/0SGb7BQ==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7", + "tinycolor2": "^1.4.1" + } + }, + "@jimp/plugin-contain": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugin-contain/-/plugin-contain-0.6.8.tgz", + "integrity": "sha512-p/P2wCXhAzbmEgXvGsvmxLmbz45feF6VpR4m9suPSOr8PC/i/XvTklTqYEUidYYAft4vHgsYJdS74HKSMnH8lw==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7" + } + }, + "@jimp/plugin-cover": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugin-cover/-/plugin-cover-0.6.8.tgz", + "integrity": "sha512-2PvWgk+PJfRsfWDI1G8Fpjrsu0ZlpNyZxO2+fqWlVo6y/y2gP4v08FqvbkcqSjNlOu2IDWIFXpgyU0sTINWZLg==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7" + } + }, + "@jimp/plugin-crop": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugin-crop/-/plugin-crop-0.6.8.tgz", + "integrity": "sha512-CbrcpWE2xxPK1n/JoTXzhRUhP4mO07mTWaSavenCg664oQl/9XCtL+A0FekuNHzIvn4myEqvkiTwN7FsbunS/Q==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7" + } + }, + "@jimp/plugin-displace": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugin-displace/-/plugin-displace-0.6.8.tgz", + "integrity": "sha512-RmV2bPxoPE6mrPxtYSPtHxm2cGwBQr5a2p+9gH6SPy+eUMrbGjbvjwKNfXWUYD0leML+Pt5XOmAS9pIROmuruQ==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7" + } + }, + "@jimp/plugin-dither": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugin-dither/-/plugin-dither-0.6.8.tgz", + "integrity": "sha512-x6V/qjxe+xypjpQm7GbiMNqci1EW5UizrcebOhHr8AHijOEqHd2hjXh5f6QIGfrkTFelc4/jzq1UyCsYntqz9Q==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7" + } + }, + "@jimp/plugin-flip": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugin-flip/-/plugin-flip-0.6.8.tgz", + "integrity": "sha512-4il6Da6G39s9MyWBEee4jztEOUGJ40E6OlPjkMrdpDNvge6hYEAB31BczTYBP/CEY74j4LDSoY5LbcU4kv06yA==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7" + } + }, + "@jimp/plugin-gaussian": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugin-gaussian/-/plugin-gaussian-0.6.8.tgz", + "integrity": "sha512-pVOblmjv7stZjsqloi4YzHVwAPXKGdNaHPhp4KP4vj41qtc6Hxd9z/+VWGYRTunMFac84gUToe0UKIXd6GhoKw==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7" + } + }, + "@jimp/plugin-invert": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugin-invert/-/plugin-invert-0.6.8.tgz", + "integrity": "sha512-11zuLiXDHr6tFv4U8aieXqNXQEKbDbSBG/h+X62gGTNFpyn8EVPpncHhOqrAFtZUaPibBqMFlNJ15SzwC7ExsQ==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7" + } + }, + "@jimp/plugin-mask": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugin-mask/-/plugin-mask-0.6.8.tgz", + "integrity": "sha512-hZJ0OiKGJyv7hDSATwJDkunB1Ie80xJnONMgpUuUseteK45YeYNBOiZVUe8vum8QI1UwavgBzcvQ9u4fcgXc9g==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7" + } + }, + "@jimp/plugin-normalize": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugin-normalize/-/plugin-normalize-0.6.8.tgz", + "integrity": "sha512-Q4oYhU+sSyTJI7pMZlg9/mYh68ujLfOxXzQGEXuw0sHGoGQs3B0Jw7jmzGa6pIS06Hup5hD2Zuh1ppvMdjJBfQ==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7" + } + }, + "@jimp/plugin-print": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugin-print/-/plugin-print-0.6.8.tgz", + "integrity": "sha512-2aokejGn4Drv1FesnZGqh5KEq0FQtR0drlmtyZrBH+r9cx7hh0Qgf4D1BOTDEgXkfSSngjGRjKKRW/fwOrVXYw==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7", + "load-bmfont": "^1.4.0" + } + }, + "@jimp/plugin-resize": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugin-resize/-/plugin-resize-0.6.8.tgz", + "integrity": "sha512-27nPh8L1YWsxtfmV/+Ub5dOTpXyC0HMF2cu52RQSCYxr+Lm1+23dJF70AF1poUbUe+FWXphwuUxQzjBJza9UoA==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7" + } + }, + "@jimp/plugin-rotate": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugin-rotate/-/plugin-rotate-0.6.8.tgz", + "integrity": "sha512-GbjETvL05BDoLdszNUV4Y0yLkHf177MnqGqilA113LIvx9aD0FtUopGXYfRGVvmtTOTouoaGJUc+K6qngvKxww==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7" + } + }, + "@jimp/plugin-scale": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugin-scale/-/plugin-scale-0.6.8.tgz", + "integrity": "sha512-GzIYWR/oCUK2jAwku23zt19V1ssaEU4pL0x2XsLNKuuJEU6DvEytJyTMXCE7OLG/MpDBQcQclJKHgiyQm5gIOQ==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7" + } + }, + "@jimp/plugins": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/plugins/-/plugins-0.6.8.tgz", + "integrity": "sha512-fMcTI72Vn/Lz6JftezTURmyP5ml/xGMe0Ljx2KRJ85IWyP33vDmGIUuutFiBEbh2+y7lRT+aTSmjs0QGa/xTmQ==", + "requires": { + "@jimp/plugin-blit": "^0.6.8", + "@jimp/plugin-blur": "^0.6.8", + "@jimp/plugin-color": "^0.6.8", + "@jimp/plugin-contain": "^0.6.8", + "@jimp/plugin-cover": "^0.6.8", + "@jimp/plugin-crop": "^0.6.8", + "@jimp/plugin-displace": "^0.6.8", + "@jimp/plugin-dither": "^0.6.8", + "@jimp/plugin-flip": "^0.6.8", + "@jimp/plugin-gaussian": "^0.6.8", + "@jimp/plugin-invert": "^0.6.8", + "@jimp/plugin-mask": "^0.6.8", + "@jimp/plugin-normalize": "^0.6.8", + "@jimp/plugin-print": "^0.6.8", + "@jimp/plugin-resize": "^0.6.8", + "@jimp/plugin-rotate": "^0.6.8", + "@jimp/plugin-scale": "^0.6.8", + "core-js": "^2.5.7", + "timm": "^1.6.1" + } + }, + "@jimp/png": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/png/-/png-0.6.8.tgz", + "integrity": "sha512-JHHg/BZ7KDtHQrcG+a7fztw45rdf7okL/YwkN4qU5FH7Fcrp41nX5QnRviDtD9hN+GaNC7kvjvcqRAxW25qjew==", + "requires": { + "@jimp/utils": "^0.6.8", + "core-js": "^2.5.7", + "pngjs": "^3.3.3" + } + }, + "@jimp/tiff": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/tiff/-/tiff-0.6.8.tgz", + "integrity": "sha512-iWHbxd+0IKWdJyJ0HhoJCGYmtjPBOusz1z1HT/DnpePs/Lo3TO4d9ALXqYfUkyG74ZK5jULZ69KLtwuhuJz1bg==", + "requires": { + "core-js": "^2.5.7", + "utif": "^2.0.1" + } + }, + "@jimp/types": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/types/-/types-0.6.8.tgz", + "integrity": "sha512-vCZ/Cp2osy69VP21XOBACfHI5HeR60Rfd4Jidj4W73UL+HrFWOtyQiJ7hlToyu1vI5mR/NsUQpzyQvz56ADm5A==", + "requires": { + "@jimp/bmp": "^0.6.8", + "@jimp/gif": "^0.6.8", + "@jimp/jpeg": "^0.6.8", + "@jimp/png": "^0.6.8", + "@jimp/tiff": "^0.6.8", + "core-js": "^2.5.7", + "timm": "^1.6.1" + } + }, + "@jimp/utils": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/@jimp/utils/-/utils-0.6.8.tgz", + "integrity": "sha512-7RDfxQ2C/rarNG9iso5vmnKQbcvlQjBIlF/p7/uYj72WeZgVCB+5t1fFBKJSU4WhniHX4jUMijK+wYGE3Y3bGw==", + "requires": { + "core-js": "^2.5.7" + } + }, + "@mikaelkristiansson/domready": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@mikaelkristiansson/domready/-/domready-1.0.9.tgz", + "integrity": "sha512-FOAjeRHULSWXd6JMuCDwf3zPbe11kP971+Bufrj9M8rQ33ZMtThgKd6IJgzj6tr/+1Rq3czzLI1LAa9x0IC92w==" + }, + "@mrmlnc/readdir-enhanced": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@mrmlnc/readdir-enhanced/-/readdir-enhanced-2.2.1.tgz", + "integrity": "sha512-bPHp6Ji8b41szTOcaP63VlnbbO5Ny6dwAATtY6JTjh5N2OLrb5Qk/Th5cRkRQhkWCt+EJsYrNB0MiL+Gpn6e3g==", + "requires": { + "call-me-maybe": "^1.0.1", + "glob-to-regexp": "^0.3.0" + } + }, + "@nodelib/fs.scandir": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.3.tgz", + "integrity": "sha512-eGmwYQn3gxo4r7jdQnkrrN6bY478C3P+a/y72IJukF8LjB6ZHeB3c+Ehacj3sYeSmUXGlnA67/PmbM9CVwL7Dw==", + "requires": { + "@nodelib/fs.stat": "2.0.3", + "run-parallel": "^1.1.9" + } + }, + "@nodelib/fs.stat": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.3.tgz", + "integrity": "sha512-bQBFruR2TAwoevBEd/NWMoAAtNGzTRgdrqnYCc7dhzfoNvqPzLyqlEQnzZ3kVnNrSp25iyxE00/3h2fqGAGArA==" + }, + "@nodelib/fs.walk": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.4.tgz", + "integrity": "sha512-1V9XOY4rDW0rehzbrcqAmHnz8e7SKvX27gh8Gt2WgB0+pdzdiLV83p72kZPU+jvMbS1qU5mauP2iOvO8rhmurQ==", + "requires": { + "@nodelib/fs.scandir": "2.1.3", + "fastq": "^1.6.0" + } + }, + "@pieh/friendly-errors-webpack-plugin": { + "version": "1.7.0-chalk-2", + "resolved": "https://registry.npmjs.org/@pieh/friendly-errors-webpack-plugin/-/friendly-errors-webpack-plugin-1.7.0-chalk-2.tgz", + "integrity": "sha512-65+vYGuDkHBCWWjqzzR/Ck318+d6yTI00EqII9qe3aPD1J3Olhvw0X38uM5moQb1PK/ksDXwSoPGt/5QhCiotw==", + "requires": { + "chalk": "^2.4.2", + "error-stack-parser": "^2.0.0", + "string-width": "^2.0.0", + "strip-ansi": "^3" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + }, + "dependencies": { + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "requires": { + "ansi-regex": "^3.0.0" + } + } + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "requires": { + "ansi-regex": "^2.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + } + } + } + } + }, + "@reach/router": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@reach/router/-/router-1.2.1.tgz", + "integrity": "sha512-kTaX08X4g27tzIFQGRukaHmNbtMYDS3LEWIS8+l6OayGIw6Oyo1HIF/JzeuR2FoF9z6oV+x/wJSVSq4v8tcUGQ==", + "requires": { + "create-react-context": "^0.2.1", + "invariant": "^2.2.3", + "prop-types": "^15.6.1", + "react-lifecycles-compat": "^3.0.4", + "warning": "^3.0.0" + } + }, + "@sindresorhus/is": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.14.0.tgz", + "integrity": "sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ==" + }, + "@styled-system/background": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/background/-/background-5.1.2.tgz", + "integrity": "sha512-jtwH2C/U6ssuGSvwTN3ri/IyjdHb8W9X/g8Y0JLcrH02G+BW3OS8kZdHphF1/YyRklnrKrBT2ngwGUK6aqqV3A==", + "requires": { + "@styled-system/core": "^5.1.2" + } + }, + "@styled-system/border": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/border/-/border-5.1.2.tgz", + "integrity": "sha512-mSSxyQGXELdNSOlf4RqaOKsX+w6//zooR3p6qDj5Zgc5pIdEsJm63QLz6EST/6xBJwTX0Z1w4ExItdd6Q7rlTQ==", + "requires": { + "@styled-system/core": "^5.1.2" + } + }, + "@styled-system/color": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/color/-/color-5.1.2.tgz", + "integrity": "sha512-1kCkeKDZkt4GYkuFNKc7vJQMcOmTl3bJY3YBUs7fCNM6mMYJeT1pViQ2LwBSBJytj3AB0o4IdLBoepgSgGl5MA==", + "requires": { + "@styled-system/core": "^5.1.2" + } + }, + "@styled-system/core": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/core/-/core-5.1.2.tgz", + "integrity": "sha512-XclBDdNIy7OPOsN4HBsawG2eiWfCcuFt6gxKn1x4QfMIgeO6TOlA2pZZ5GWZtIhCUqEPTgIBta6JXsGyCkLBYw==", + "requires": { + "object-assign": "^4.1.1" + } + }, + "@styled-system/css": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/@styled-system/css/-/css-5.1.4.tgz", + "integrity": "sha512-79IFT37Kxb6dlbx/0hwIGOakNHkK5oU3cMypGziShnEK8WMgK/+vuAi4MHO7uLI+FZ5U8MGYvGY9Gtk0mBzxSg==" + }, + "@styled-system/flexbox": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/flexbox/-/flexbox-5.1.2.tgz", + "integrity": "sha512-6hHV52+eUk654Y1J2v77B8iLeBNtc+SA3R4necsu2VVinSD7+XY5PCCEzBFaWs42dtOEDIa2lMrgL0YBC01mDQ==", + "requires": { + "@styled-system/core": "^5.1.2" + } + }, + "@styled-system/grid": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/grid/-/grid-5.1.2.tgz", + "integrity": "sha512-K3YiV1KyHHzgdNuNlaw8oW2ktMuGga99o1e/NAfTEi5Zsa7JXxzwEnVSDSBdJC+z6R8WYTCYRQC6bkVFcvdTeg==", + "requires": { + "@styled-system/core": "^5.1.2" + } + }, + "@styled-system/layout": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/layout/-/layout-5.1.2.tgz", + "integrity": "sha512-wUhkMBqSeacPFhoE9S6UF3fsMEKFv91gF4AdDWp0Aym1yeMPpqz9l9qS/6vjSsDPF7zOb5cOKC3tcKKOMuDCPw==", + "requires": { + "@styled-system/core": "^5.1.2" + } + }, + "@styled-system/position": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/position/-/position-5.1.2.tgz", + "integrity": "sha512-60IZfMXEOOZe3l1mCu6sj/2NAyUmES2kR9Kzp7s2D3P4qKsZWxD1Se1+wJvevb+1TP+ZMkGPEYYXRyU8M1aF5A==", + "requires": { + "@styled-system/core": "^5.1.2" + } + }, + "@styled-system/shadow": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/shadow/-/shadow-5.1.2.tgz", + "integrity": "sha512-wqniqYb7XuZM7K7C0d1Euxc4eGtqEe/lvM0WjuAFsQVImiq6KGT7s7is+0bNI8O4Dwg27jyu4Lfqo/oIQXNzAg==", + "requires": { + "@styled-system/core": "^5.1.2" + } + }, + "@styled-system/should-forward-prop": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/@styled-system/should-forward-prop/-/should-forward-prop-5.1.4.tgz", + "integrity": "sha512-WvKlXdbzz64QX8E66dlt/t+AsHhE5mJPyxMAufKeUKn5DSj+1w7CfLtwVH2oYje7XFcrcZOV9elzaeMWE0znTw==", + "requires": { + "@emotion/is-prop-valid": "^0.8.1", + "@emotion/memoize": "^0.7.1", + "styled-system": "^5.1.4" + } + }, + "@styled-system/space": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/space/-/space-5.1.2.tgz", + "integrity": "sha512-+zzYpR8uvfhcAbaPXhH8QgDAV//flxqxSjHiS9cDFQQUSznXMQmxJegbhcdEF7/eNnJgHeIXv1jmny78kipgBA==", + "requires": { + "@styled-system/core": "^5.1.2" + } + }, + "@styled-system/typography": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@styled-system/typography/-/typography-5.1.2.tgz", + "integrity": "sha512-BxbVUnN8N7hJ4aaPOd7wEsudeT7CxarR+2hns8XCX1zp0DFfbWw4xYa/olA0oQaqx7F1hzDg+eRaGzAJbF+jOg==", + "requires": { + "@styled-system/core": "^5.1.2" + } + }, + "@styled-system/variant": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/@styled-system/variant/-/variant-5.1.4.tgz", + "integrity": "sha512-4bI2AYQfWU/ljvWlysKU8T+6gsVx5xXEI/yBvg2De7Jd6o03ZQ9tsL3OJwbzyMkIKg+UZp7YG190txEOb8K6tg==", + "requires": { + "@styled-system/core": "^5.1.2", + "@styled-system/css": "^5.1.4" + } + }, + "@szmarczak/http-timer": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-1.1.2.tgz", + "integrity": "sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA==", + "requires": { + "defer-to-connect": "^1.0.1" + } + }, + "@types/color-name": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz", + "integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==" + }, + "@types/configstore": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@types/configstore/-/configstore-2.1.1.tgz", + "integrity": "sha1-zR6FU2M60xhcPy8jns/10mQ+krY=" + }, + "@types/debug": { + "version": "0.0.29", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-0.0.29.tgz", + "integrity": "sha1-oeUUrfvZLwOiJLpU1pMRHb8fN1Q=" + }, + "@types/eslint-visitor-keys": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@types/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz", + "integrity": "sha512-OCutwjDZ4aFS6PB1UZ988C4YgwlBHJd6wCeQqaLdmadZ/7e+w79+hbMUFC1QXDNCmdyoRfAFdm0RypzwR+Qpag==" + }, + "@types/events": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/events/-/events-3.0.0.tgz", + "integrity": "sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g==" + }, + "@types/get-port": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/@types/get-port/-/get-port-0.0.4.tgz", + "integrity": "sha1-62u3Qj2fiItjJmDcfS/T5po1ZD4=" + }, + "@types/glob": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.1.1.tgz", + "integrity": "sha512-1Bh06cbWJUHMC97acuD6UMG29nMt0Aqz1vF3guLfG+kHHJhy3AyohZFFxYk2f7Q1SQIrNwvncxAE0N/9s70F2w==", + "requires": { + "@types/events": "*", + "@types/minimatch": "*", + "@types/node": "*" + } + }, + "@types/history": { + "version": "4.7.3", + "resolved": "https://registry.npmjs.org/@types/history/-/history-4.7.3.tgz", + "integrity": "sha512-cS5owqtwzLN5kY+l+KgKdRJ/Cee8tlmQoGQuIE9tWnSmS3JMKzmxo2HIAk2wODMifGwO20d62xZQLYz+RLfXmw==" + }, + "@types/json-schema": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.4.tgz", + "integrity": "sha512-8+KAKzEvSUdeo+kmqnKrqgeE+LcA0tjYWFY7RPProVYwnqDjukzO+3b6dLD56rYX5TdWejnEOLJYOIeh4CXKuA==" + }, + "@types/mdast": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.3.tgz", + "integrity": "sha512-SXPBMnFVQg1s00dlMCc/jCdvPqdE4mXaMMCeRlxLDmTAEoegHT53xKtkDnzDTOcmMHUfcjyf36/YYZ6SxRdnsw==", + "requires": { + "@types/unist": "*" + } + }, + "@types/minimatch": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-3.0.3.tgz", + "integrity": "sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA==" + }, + "@types/mkdirp": { + "version": "0.3.29", + "resolved": "https://registry.npmjs.org/@types/mkdirp/-/mkdirp-0.3.29.tgz", + "integrity": "sha1-fyrX7FX5FEgvybHsS7GuYCjUYGY=" + }, + "@types/node": { + "version": "13.1.4", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.1.4.tgz", + "integrity": "sha512-Lue/mlp2egZJoHXZr4LndxDAd7i/7SQYhV0EjWfb/a4/OZ6tuVwMCVPiwkU5nsEipxEf7hmkSU7Em5VQ8P5NGA==" + }, + "@types/parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==" + }, + "@types/prop-types": { + "version": "15.7.3", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.3.tgz", + "integrity": "sha512-KfRL3PuHmqQLOG+2tGpRO26Ctg+Cq1E01D2DMriKEATHgWLfeNDmq9e29Q9WIky0dQ3NPkd1mzYH8Lm936Z9qw==" + }, + "@types/q": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/@types/q/-/q-1.5.2.tgz", + "integrity": "sha512-ce5d3q03Ex0sy4R14722Rmt6MT07Ua+k4FwDfdcToYJcMKNtRVQvJ6JCAPdAmAnbRb6CsX6aYb9m96NGod9uTw==" + }, + "@types/reach__router": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@types/reach__router/-/reach__router-1.2.6.tgz", + "integrity": "sha512-Oh5DAVr/L2svBvubw6QEFpXGu295Y406BPs4i9t1n2pp7M+q3pmCmhzb9oZV5wncR41KCD3NHl1Yhi7uKnTPsA==", + "requires": { + "@types/history": "*", + "@types/react": "*" + } + }, + "@types/react": { + "version": "16.9.17", + "resolved": "https://registry.npmjs.org/@types/react/-/react-16.9.17.tgz", + "integrity": "sha512-UP27In4fp4sWF5JgyV6pwVPAQM83Fj76JOcg02X5BZcpSu5Wx+fP9RMqc2v0ssBoQIFvD5JdKY41gjJJKmw6Bg==", + "requires": { + "@types/prop-types": "*", + "csstype": "^2.2.0" + } + }, + "@types/tmp": { + "version": "0.0.32", + "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.0.32.tgz", + "integrity": "sha1-DTyzECL4Qn6ljACK8yuA2hJspOM=" + }, + "@types/unist": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.3.tgz", + "integrity": "sha512-FvUupuM3rlRsRtCN+fDudtmytGO6iHJuuRKS1Ss0pG5z8oX0diNEw94UEL7hgDbpN94rgaK5R7sWm6RrSkZuAQ==" + }, + "@types/vfile": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/vfile/-/vfile-3.0.2.tgz", + "integrity": "sha512-b3nLFGaGkJ9rzOcuXRfHkZMdjsawuDD0ENL9fzTophtBg8FJHSGbH7daXkEpcwy3v7Xol3pAvsmlYyFhR4pqJw==", + "requires": { + "@types/node": "*", + "@types/unist": "*", + "@types/vfile-message": "*" + } + }, + "@types/vfile-message": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@types/vfile-message/-/vfile-message-2.0.0.tgz", + "integrity": "sha512-GpTIuDpb9u4zIO165fUy9+fXcULdD8HFRNli04GehoMVbeNq7D6OBnqSmg3lxZnC+UvgUhEWKxdKiwYUkGltIw==", + "requires": { + "vfile-message": "*" + } + }, + "@typescript-eslint/eslint-plugin": { + "version": "2.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-2.15.0.tgz", + "integrity": "sha512-XRJFznI5v4K1WvIrWmjFjBAdQWaUTz4xJEdqR7+wAFsv6Q9dP3mOlE6BMNT3pdlp9eF1+bC5m5LZTmLMqffCVw==", + "requires": { + "@typescript-eslint/experimental-utils": "2.15.0", + "eslint-utils": "^1.4.3", + "functional-red-black-tree": "^1.0.1", + "regexpp": "^3.0.0", + "tsutils": "^3.17.1" + }, + "dependencies": { + "regexpp": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.0.0.tgz", + "integrity": "sha512-Z+hNr7RAVWxznLPuA7DIh8UNX1j9CDrUQxskw9IrBE1Dxue2lyXT+shqEIeLUjrokxIP8CMy1WkjgG3rTsd5/g==" + } + } + }, + "@typescript-eslint/experimental-utils": { + "version": "2.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-2.15.0.tgz", + "integrity": "sha512-Qkxu5zndY5hqlcQkmA88gfLvqQulMpX/TN91XC7OuXsRf4XG5xLGie0sbpX97o/oeccjeZYRMipIsjKk/tjDHA==", + "requires": { + "@types/json-schema": "^7.0.3", + "@typescript-eslint/typescript-estree": "2.15.0", + "eslint-scope": "^5.0.0" + } + }, + "@typescript-eslint/parser": { + "version": "2.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-2.15.0.tgz", + "integrity": "sha512-6iSgQsqAYTaHw59t0tdjzZJluRAjswdGltzKEdLtcJOxR2UVTPHYvZRqkAVGCkaMVb6Fpa60NnuozNCvsSpA9g==", + "requires": { + "@types/eslint-visitor-keys": "^1.0.0", + "@typescript-eslint/experimental-utils": "2.15.0", + "@typescript-eslint/typescript-estree": "2.15.0", + "eslint-visitor-keys": "^1.1.0" + } + }, + "@typescript-eslint/typescript-estree": { + "version": "2.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-2.15.0.tgz", + "integrity": "sha512-L6Pog+w3VZzXkAdyqA0VlwybF8WcwZX+mufso86CMxSdWmcizJ38lgBdpqTbc9bo92iyi0rOvmATKiwl+amjxg==", + "requires": { + "debug": "^4.1.1", + "eslint-visitor-keys": "^1.1.0", + "glob": "^7.1.6", + "is-glob": "^4.0.1", + "lodash.unescape": "4.0.1", + "semver": "^6.3.0", + "tsutils": "^3.17.1" + } + }, + "@webassemblyjs/ast": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.8.5.tgz", + "integrity": "sha512-aJMfngIZ65+t71C3y2nBBg5FFG0Okt9m0XEgWZ7Ywgn1oMAT8cNwx00Uv1cQyHtidq0Xn94R4TAywO+LCQ+ZAQ==", + "requires": { + "@webassemblyjs/helper-module-context": "1.8.5", + "@webassemblyjs/helper-wasm-bytecode": "1.8.5", + "@webassemblyjs/wast-parser": "1.8.5" + } + }, + "@webassemblyjs/floating-point-hex-parser": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.8.5.tgz", + "integrity": "sha512-9p+79WHru1oqBh9ewP9zW95E3XAo+90oth7S5Re3eQnECGq59ly1Ri5tsIipKGpiStHsUYmY3zMLqtk3gTcOtQ==" + }, + "@webassemblyjs/helper-api-error": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.8.5.tgz", + "integrity": "sha512-Za/tnzsvnqdaSPOUXHyKJ2XI7PDX64kWtURyGiJJZKVEdFOsdKUCPTNEVFZq3zJ2R0G5wc2PZ5gvdTRFgm81zA==" + }, + "@webassemblyjs/helper-buffer": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.8.5.tgz", + "integrity": "sha512-Ri2R8nOS0U6G49Q86goFIPNgjyl6+oE1abW1pS84BuhP1Qcr5JqMwRFT3Ah3ADDDYGEgGs1iyb1DGX+kAi/c/Q==" + }, + "@webassemblyjs/helper-code-frame": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.8.5.tgz", + "integrity": "sha512-VQAadSubZIhNpH46IR3yWO4kZZjMxN1opDrzePLdVKAZ+DFjkGD/rf4v1jap744uPVU6yjL/smZbRIIJTOUnKQ==", + "requires": { + "@webassemblyjs/wast-printer": "1.8.5" + } + }, + "@webassemblyjs/helper-fsm": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-fsm/-/helper-fsm-1.8.5.tgz", + "integrity": "sha512-kRuX/saORcg8se/ft6Q2UbRpZwP4y7YrWsLXPbbmtepKr22i8Z4O3V5QE9DbZK908dh5Xya4Un57SDIKwB9eow==" + }, + "@webassemblyjs/helper-module-context": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-module-context/-/helper-module-context-1.8.5.tgz", + "integrity": "sha512-/O1B236mN7UNEU4t9X7Pj38i4VoU8CcMHyy3l2cV/kIF4U5KoHXDVqcDuOs1ltkac90IM4vZdHc52t1x8Yfs3g==", + "requires": { + "@webassemblyjs/ast": "1.8.5", + "mamacro": "^0.0.3" + } + }, + "@webassemblyjs/helper-wasm-bytecode": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.8.5.tgz", + "integrity": "sha512-Cu4YMYG3Ddl72CbmpjU/wbP6SACcOPVbHN1dI4VJNJVgFwaKf1ppeFJrwydOG3NDHxVGuCfPlLZNyEdIYlQ6QQ==" + }, + "@webassemblyjs/helper-wasm-section": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.8.5.tgz", + "integrity": "sha512-VV083zwR+VTrIWWtgIUpqfvVdK4ff38loRmrdDBgBT8ADXYsEZ5mPQ4Nde90N3UYatHdYoDIFb7oHzMncI02tA==", + "requires": { + "@webassemblyjs/ast": "1.8.5", + "@webassemblyjs/helper-buffer": "1.8.5", + "@webassemblyjs/helper-wasm-bytecode": "1.8.5", + "@webassemblyjs/wasm-gen": "1.8.5" + } + }, + "@webassemblyjs/ieee754": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.8.5.tgz", + "integrity": "sha512-aaCvQYrvKbY/n6wKHb/ylAJr27GglahUO89CcGXMItrOBqRarUMxWLJgxm9PJNuKULwN5n1csT9bYoMeZOGF3g==", + "requires": { + "@xtuc/ieee754": "^1.2.0" + } + }, + "@webassemblyjs/leb128": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.8.5.tgz", + "integrity": "sha512-plYUuUwleLIziknvlP8VpTgO4kqNaH57Y3JnNa6DLpu/sGcP6hbVdfdX5aHAV716pQBKrfuU26BJK29qY37J7A==", + "requires": { + "@xtuc/long": "4.2.2" + } + }, + "@webassemblyjs/utf8": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.8.5.tgz", + "integrity": "sha512-U7zgftmQriw37tfD934UNInokz6yTmn29inT2cAetAsaU9YeVCveWEwhKL1Mg4yS7q//NGdzy79nlXh3bT8Kjw==" + }, + "@webassemblyjs/wasm-edit": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.8.5.tgz", + "integrity": "sha512-A41EMy8MWw5yvqj7MQzkDjU29K7UJq1VrX2vWLzfpRHt3ISftOXqrtojn7nlPsZ9Ijhp5NwuODuycSvfAO/26Q==", + "requires": { + "@webassemblyjs/ast": "1.8.5", + "@webassemblyjs/helper-buffer": "1.8.5", + "@webassemblyjs/helper-wasm-bytecode": "1.8.5", + "@webassemblyjs/helper-wasm-section": "1.8.5", + "@webassemblyjs/wasm-gen": "1.8.5", + "@webassemblyjs/wasm-opt": "1.8.5", + "@webassemblyjs/wasm-parser": "1.8.5", + "@webassemblyjs/wast-printer": "1.8.5" + } + }, + "@webassemblyjs/wasm-gen": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.8.5.tgz", + "integrity": "sha512-BCZBT0LURC0CXDzj5FXSc2FPTsxwp3nWcqXQdOZE4U7h7i8FqtFK5Egia6f9raQLpEKT1VL7zr4r3+QX6zArWg==", + "requires": { + "@webassemblyjs/ast": "1.8.5", + "@webassemblyjs/helper-wasm-bytecode": "1.8.5", + "@webassemblyjs/ieee754": "1.8.5", + "@webassemblyjs/leb128": "1.8.5", + "@webassemblyjs/utf8": "1.8.5" + } + }, + "@webassemblyjs/wasm-opt": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.8.5.tgz", + "integrity": "sha512-HKo2mO/Uh9A6ojzu7cjslGaHaUU14LdLbGEKqTR7PBKwT6LdPtLLh9fPY33rmr5wcOMrsWDbbdCHq4hQUdd37Q==", + "requires": { + "@webassemblyjs/ast": "1.8.5", + "@webassemblyjs/helper-buffer": "1.8.5", + "@webassemblyjs/wasm-gen": "1.8.5", + "@webassemblyjs/wasm-parser": "1.8.5" + } + }, + "@webassemblyjs/wasm-parser": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.8.5.tgz", + "integrity": "sha512-pi0SYE9T6tfcMkthwcgCpL0cM9nRYr6/6fjgDtL6q/ZqKHdMWvxitRi5JcZ7RI4SNJJYnYNaWy5UUrHQy998lw==", + "requires": { + "@webassemblyjs/ast": "1.8.5", + "@webassemblyjs/helper-api-error": "1.8.5", + "@webassemblyjs/helper-wasm-bytecode": "1.8.5", + "@webassemblyjs/ieee754": "1.8.5", + "@webassemblyjs/leb128": "1.8.5", + "@webassemblyjs/utf8": "1.8.5" + } + }, + "@webassemblyjs/wast-parser": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-parser/-/wast-parser-1.8.5.tgz", + "integrity": "sha512-daXC1FyKWHF1i11obK086QRlsMsY4+tIOKgBqI1lxAnkp9xe9YMcgOxm9kLe+ttjs5aWV2KKE1TWJCN57/Btsg==", + "requires": { + "@webassemblyjs/ast": "1.8.5", + "@webassemblyjs/floating-point-hex-parser": "1.8.5", + "@webassemblyjs/helper-api-error": "1.8.5", + "@webassemblyjs/helper-code-frame": "1.8.5", + "@webassemblyjs/helper-fsm": "1.8.5", + "@xtuc/long": "4.2.2" + } + }, + "@webassemblyjs/wast-printer": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.8.5.tgz", + "integrity": "sha512-w0U0pD4EhlnvRyeJzBqaVSJAo9w/ce7/WPogeXLzGkO6hzhr4GnQIZ4W4uUt5b9ooAaXPtnXlj0gzsXEOUNYMg==", + "requires": { + "@webassemblyjs/ast": "1.8.5", + "@webassemblyjs/wast-parser": "1.8.5", + "@xtuc/long": "4.2.2" + } + }, + "@xtuc/ieee754": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", + "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==" + }, + "@xtuc/long": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", + "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==" + }, + "accepts": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz", + "integrity": "sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==", + "requires": { + "mime-types": "~2.1.24", + "negotiator": "0.6.2" + } + }, + "acorn": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.0.tgz", + "integrity": "sha512-kL5CuoXA/dgxlBbVrflsflzQ3PAas7RYZB52NOm/6839iVYJgKMJ3cQJD+t2i5+qFa8h3MDpEOJiS64E8JLnSQ==" + }, + "acorn-jsx": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.1.0.tgz", + "integrity": "sha512-tMUqwBWfLFbJbizRmEcWSLw6HnFzfdJs2sOJEOwwtVPMoH/0Ay+E703oZz78VSXZiiDcZrQ5XKjPIUQixhmgVw==" + }, + "address": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/address/-/address-1.1.2.tgz", + "integrity": "sha512-aT6camzM4xEA54YVJYSqxz1kv4IHnQZRtThJJHhUMRExaU5spC7jX5ugSwTaTgJliIgs4VhZOk7htClvQ/LmRA==" + }, + "after": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/after/-/after-0.8.2.tgz", + "integrity": "sha1-/ts5T58OAqqXaOcCvaI7UF+ufh8=" + }, + "aggregate-error": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.0.1.tgz", + "integrity": "sha512-quoaXsZ9/BLNae5yiNoUz+Nhkwz83GhWwtYFglcjEQB2NDHCIpApbqXxIFnm4Pq/Nvhrsq5sYJFyohrrxnTGAA==", + "requires": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + } + }, + "ajv": { + "version": "6.10.2", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.2.tgz", + "integrity": "sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw==", + "requires": { + "fast-deep-equal": "^2.0.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ajv-errors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ajv-errors/-/ajv-errors-1.0.1.tgz", + "integrity": "sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ==" + }, + "ajv-keywords": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.4.1.tgz", + "integrity": "sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ==" + }, + "alphanum-sort": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/alphanum-sort/-/alphanum-sort-1.0.2.tgz", + "integrity": "sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM=" + }, + "ansi-align": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.0.tgz", + "integrity": "sha512-ZpClVKqXN3RGBmKibdfWzqCY4lnjEuoNzU5T0oEFpfd/z5qJHVarukridD4juLO2FXMiwUQxr9WqQtaYa8XRYw==", + "requires": { + "string-width": "^3.0.0" + }, + "dependencies": { + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + } + } + }, + "ansi-colors": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-3.2.4.tgz", + "integrity": "sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA==" + }, + "ansi-escapes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.0.tgz", + "integrity": "sha512-EiYhwo0v255HUL6eDyuLrXEkTi7WwVCLAw+SeOQ7M7qdun1z1pum4DEm/nuqIVbPvi9RPPc9k9LbyBv6H0DwVg==", + "requires": { + "type-fest": "^0.8.1" + } + }, + "ansi-html": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/ansi-html/-/ansi-html-0.0.7.tgz", + "integrity": "sha1-gTWEAhliqenm/QOflA0S9WynhZ4=" + }, + "ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==" + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "requires": { + "color-convert": "^1.9.0" + } + }, + "any-base": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/any-base/-/any-base-1.1.0.tgz", + "integrity": "sha512-uMgjozySS8adZZYePpaWs8cxB9/kdzmpX6SgJZ+wbz1K5eYk5QMYDVJaZKhxyIHUdnnJkfR7SVgStgH7LkGUyg==" + }, + "any-promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha1-q8av7tzqUugJzcA3au0845Y10X8=" + }, + "anymatch": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", + "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", + "requires": { + "micromatch": "^3.1.4", + "normalize-path": "^2.1.1" + } + }, + "aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" + }, + "arch": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/arch/-/arch-2.1.1.tgz", + "integrity": "sha512-BLM56aPo9vLLFVa8+/+pJLnrZ7QGGTVHWsCwieAWT9o9K8UeGaQbzZbGoabWLOo2ksBCztoXdqBZBplqLDDCSg==" + }, + "archive-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/archive-type/-/archive-type-4.0.0.tgz", + "integrity": "sha1-+S5yIzBW38aWlHJ0nCZ72wRrHXA=", + "requires": { + "file-type": "^4.2.0" + }, + "dependencies": { + "file-type": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-4.4.0.tgz", + "integrity": "sha1-G2AOX8ofvcboDApwxxyNul95BsU=" + } + } + }, + "are-we-there-yet": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz", + "integrity": "sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==", + "requires": { + "delegates": "^1.0.0", + "readable-stream": "^2.0.6" + } + }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "requires": { + "sprintf-js": "~1.0.2" + } + }, + "aria-query": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-3.0.0.tgz", + "integrity": "sha1-ZbP8wcoRVajJrmTW7uKX8V1RM8w=", + "requires": { + "ast-types-flow": "0.0.7", + "commander": "^2.11.0" + } + }, + "arr-diff": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz", + "integrity": "sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=" + }, + "arr-flatten": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz", + "integrity": "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==" + }, + "arr-union": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz", + "integrity": "sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=" + }, + "array-filter": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/array-filter/-/array-filter-0.0.1.tgz", + "integrity": "sha1-fajPLiZijtcygDWB/SH2fKzS7uw=" + }, + "array-find-index": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz", + "integrity": "sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E=" + }, + "array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" + }, + "array-includes": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.1.tgz", + "integrity": "sha512-c2VXaCHl7zPsvpkFsw4nxvFie4fh1ur9bpcgsVkIjqn0H/Xwdg+7fv3n2r/isyS8EBj5b06M9kHyZuIr4El6WQ==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0", + "is-string": "^1.0.5" + } + }, + "array-iterate": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/array-iterate/-/array-iterate-1.1.3.tgz", + "integrity": "sha512-7MIv7HE9MuzfK6B2UnWv07oSHBLOaY1UUXAxZ07bIeRM+4IkPTlveMDs9MY//qvxPZPSvCn2XV4bmtQgSkVodg==" + }, + "array-map": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/array-map/-/array-map-0.0.0.tgz", + "integrity": "sha1-iKK6tz0c97zVwbEYoAP2b2ZfpmI=" + }, + "array-reduce": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/array-reduce/-/array-reduce-0.0.0.tgz", + "integrity": "sha1-FziZ0//Rx9k4PkR5Ul2+J4yrXys=" + }, + "array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==" + }, + "array-uniq": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/array-uniq/-/array-uniq-1.0.3.tgz", + "integrity": "sha1-r2rId6Jcx/dOBYiUdThY39sk/bY=" + }, + "array-unique": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz", + "integrity": "sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=" + }, + "array.prototype.flat": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.3.tgz", + "integrity": "sha512-gBlRZV0VSmfPIeWfuuy56XZMvbVfbEUnOXUvt3F/eUUUSyzlgLxhEX4YAEpxNAogRGehPSnfXyPtYyKAhkzQhQ==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1" + } + }, + "arraybuffer.slice": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz", + "integrity": "sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog==" + }, + "arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "optional": true + }, + "asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=" + }, + "asn1": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", + "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", + "requires": { + "safer-buffer": "~2.1.0" + } + }, + "asn1.js": { + "version": "4.10.1", + "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-4.10.1.tgz", + "integrity": "sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw==", + "requires": { + "bn.js": "^4.0.0", + "inherits": "^2.0.1", + "minimalistic-assert": "^1.0.0" + } + }, + "assert": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/assert/-/assert-1.5.0.tgz", + "integrity": "sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA==", + "requires": { + "object-assign": "^4.1.1", + "util": "0.10.3" + }, + "dependencies": { + "inherits": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", + "integrity": "sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE=" + }, + "util": { + "version": "0.10.3", + "resolved": "https://registry.npmjs.org/util/-/util-0.10.3.tgz", + "integrity": "sha1-evsa/lCAUkZInj23/g7TeTNqwPk=", + "requires": { + "inherits": "2.0.1" + } + } + } + }, + "assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" + }, + "assign-symbols": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz", + "integrity": "sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=" + }, + "ast-types-flow": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz", + "integrity": "sha1-9wtzXGvKGlycItmCw+Oef+ujva0=" + }, + "astral-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-1.0.0.tgz", + "integrity": "sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==" + }, + "async": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz", + "integrity": "sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo=" + }, + "async-each": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/async-each/-/async-each-1.0.3.tgz", + "integrity": "sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ==" + }, + "async-limiter": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.1.tgz", + "integrity": "sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==" + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" + }, + "atob": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", + "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==" + }, + "auto-bind": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/auto-bind/-/auto-bind-3.0.0.tgz", + "integrity": "sha512-v0A231a/lfOo6kxQtmEkdBfTApvC21aJYukA8pkKnoTvVqh3Wmm7/Rwy4GBCHTTHVoLVA5qsBDDvf1XY1nIV2g==", + "optional": true + }, + "autoprefixer": { + "version": "9.7.3", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-9.7.3.tgz", + "integrity": "sha512-8T5Y1C5Iyj6PgkPSFd0ODvK9DIleuPKUPYniNxybS47g2k2wFgLZ46lGQHlBuGKIAEV8fbCDfKCCRS1tvOgc3Q==", + "requires": { + "browserslist": "^4.8.0", + "caniuse-lite": "^1.0.30001012", + "chalk": "^2.4.2", + "normalize-range": "^0.1.2", + "num2fraction": "^1.2.2", + "postcss": "^7.0.23", + "postcss-value-parser": "^4.0.2" + }, + "dependencies": { + "browserslist": { + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.8.3.tgz", + "integrity": "sha512-iU43cMMknxG1ClEZ2MDKeonKE1CCrFVkQK2AqO2YWFmvIrx4JWrvQ4w4hQez6EpVI8rHTtqh/ruHHDHSOKxvUg==", + "requires": { + "caniuse-lite": "^1.0.30001017", + "electron-to-chromium": "^1.3.322", + "node-releases": "^1.1.44" + } + } + } + }, + "aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" + }, + "aws4": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.9.0.tgz", + "integrity": "sha512-Uvq6hVe90D0B2WEnUqtdgY1bATGz3mw33nH9Y+dmA+w5DHvUmBgkr5rM/KCHpCsiFNRUfokW/szpPPgMK2hm4A==" + }, + "axios": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.19.0.tgz", + "integrity": "sha512-1uvKqKQta3KBxIz14F2v06AEHZ/dIoeKfbTRkK1E5oqjDnuEerLmYTgJB5AiQZHJcljpg1TuRzdjDR06qNk0DQ==", + "requires": { + "follow-redirects": "1.5.10", + "is-buffer": "^2.0.2" + } + }, + "axobject-query": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-2.1.1.tgz", + "integrity": "sha512-lF98xa/yvy6j3fBHAgQXIYl+J4eZadOSqsPojemUqClzNbBV38wWGpUbQbVEyf4eUF5yF7eHmGgGA2JiHyjeqw==", + "requires": { + "@babel/runtime": "^7.7.4", + "@babel/runtime-corejs3": "^7.7.4" + } + }, + "babel-code-frame": { + "version": "6.26.0", + "resolved": "https://registry.npmjs.org/babel-code-frame/-/babel-code-frame-6.26.0.tgz", + "integrity": "sha1-Y/1D99weO7fONZR9uP42mj9Yx0s=", + "requires": { + "chalk": "^1.1.3", + "esutils": "^2.0.2", + "js-tokens": "^3.0.2" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + }, + "ansi-styles": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=" + }, + "chalk": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", + "requires": { + "ansi-styles": "^2.2.1", + "escape-string-regexp": "^1.0.2", + "has-ansi": "^2.0.0", + "strip-ansi": "^3.0.0", + "supports-color": "^2.0.0" + } + }, + "js-tokens": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-3.0.2.tgz", + "integrity": "sha1-mGbfOVECEw449/mWvOtlRDIJwls=" + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "requires": { + "ansi-regex": "^2.0.0" + } + }, + "supports-color": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=" + } + } + }, + "babel-core": { + "version": "7.0.0-bridge.0", + "resolved": "https://registry.npmjs.org/babel-core/-/babel-core-7.0.0-bridge.0.tgz", + "integrity": "sha512-poPX9mZH/5CSanm50Q+1toVci6pv5KSRv/5TWCwtzQS5XEwn40BcCrgIeMFWP9CKKIniKXNxoIOnOq4VVlGXhg==" + }, + "babel-eslint": { + "version": "10.0.3", + "resolved": "https://registry.npmjs.org/babel-eslint/-/babel-eslint-10.0.3.tgz", + "integrity": "sha512-z3U7eMY6r/3f3/JB9mTsLjyxrv0Yb1zb8PCWCLpguxfCzBIZUwy23R1t/XKewP+8mEN2Ck8Dtr4q20z6ce6SoA==", + "requires": { + "@babel/code-frame": "^7.0.0", + "@babel/parser": "^7.0.0", + "@babel/traverse": "^7.0.0", + "@babel/types": "^7.0.0", + "eslint-visitor-keys": "^1.0.0", + "resolve": "^1.12.0" + } + }, + "babel-extract-comments": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/babel-extract-comments/-/babel-extract-comments-1.0.0.tgz", + "integrity": "sha512-qWWzi4TlddohA91bFwgt6zO/J0X+io7Qp184Fw0m2JYRSTZnJbFR8+07KmzudHCZgOiKRCrjhylwv9Xd8gfhVQ==", + "requires": { + "babylon": "^6.18.0" + } + }, + "babel-loader": { + "version": "8.0.6", + "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.0.6.tgz", + "integrity": "sha512-4BmWKtBOBm13uoUwd08UwjZlaw3O9GWf456R9j+5YykFZ6LUIjIKLc0zEZf+hauxPOJs96C8k6FvYD09vWzhYw==", + "requires": { + "find-cache-dir": "^2.0.0", + "loader-utils": "^1.0.2", + "mkdirp": "^0.5.1", + "pify": "^4.0.1" + } + }, + "babel-plugin-add-module-exports": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/babel-plugin-add-module-exports/-/babel-plugin-add-module-exports-0.3.3.tgz", + "integrity": "sha512-hC37mm7aAdEb1n8SgggG8a1QuhZapsY/XLCi4ETSH6AVjXBCWEa50CXlOsAMPPWLnSx5Ns6mzz39uvuseh0Xjg==", + "requires": { + "chokidar": "^2.0.4" + }, + "dependencies": { + "chokidar": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", + "integrity": "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==", + "optional": true, + "requires": { + "anymatch": "^2.0.0", + "async-each": "^1.0.1", + "braces": "^2.3.2", + "fsevents": "^1.2.7", + "glob-parent": "^3.1.0", + "inherits": "^2.0.3", + "is-binary-path": "^1.0.0", + "is-glob": "^4.0.0", + "normalize-path": "^3.0.0", + "path-is-absolute": "^1.0.0", + "readdirp": "^2.2.1", + "upath": "^1.1.1" + } + }, + "glob-parent": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", + "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", + "optional": true, + "requires": { + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" + }, + "dependencies": { + "is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "optional": true, + "requires": { + "is-extglob": "^2.1.0" + } + } + } + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "optional": true + } + } + }, + "babel-plugin-dynamic-import-node": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz", + "integrity": "sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ==", + "requires": { + "object.assign": "^4.1.0" + } + }, + "babel-plugin-emotion": { + "version": "10.0.27", + "resolved": "https://registry.npmjs.org/babel-plugin-emotion/-/babel-plugin-emotion-10.0.27.tgz", + "integrity": "sha512-SUNYcT4FqhOqvwv0z1oeYhqgheU8qrceLojuHyX17ngo7WtWqN5I9l3IGHzf21Xraj465CVzF4IvOlAF+3ed0A==", + "requires": { + "@babel/helper-module-imports": "^7.0.0", + "@emotion/hash": "0.7.4", + "@emotion/memoize": "0.7.4", + "@emotion/serialize": "^0.11.15", + "babel-plugin-macros": "^2.0.0", + "babel-plugin-syntax-jsx": "^6.18.0", + "convert-source-map": "^1.5.0", + "escape-string-regexp": "^1.0.5", + "find-root": "^1.1.0", + "source-map": "^0.5.7" + } + }, + "babel-plugin-macros": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-2.8.0.tgz", + "integrity": "sha512-SEP5kJpfGYqYKpBrj5XU3ahw5p5GOHJ0U5ssOSQ/WBVdwkD2Dzlce95exQTs3jOVWPPKLBN2rlEWkCK7dSmLvg==", + "requires": { + "@babel/runtime": "^7.7.2", + "cosmiconfig": "^6.0.0", + "resolve": "^1.12.0" + } + }, + "babel-plugin-remove-graphql-queries": { + "version": "2.7.19", + "resolved": "https://registry.npmjs.org/babel-plugin-remove-graphql-queries/-/babel-plugin-remove-graphql-queries-2.7.19.tgz", + "integrity": "sha512-/DS620ztyyrqrqjmz/KHDt++ktn+4RdvfDf5KCUmt6iJOglgNm7uHkE+snuvvL/xhNNuuPBLErc23Q0cR6MSiQ==" + }, + "babel-plugin-styled-components": { + "version": "1.10.6", + "resolved": "https://registry.npmjs.org/babel-plugin-styled-components/-/babel-plugin-styled-components-1.10.6.tgz", + "integrity": "sha512-gyQj/Zf1kQti66100PhrCRjI5ldjaze9O0M3emXRPAN80Zsf8+e1thpTpaXJXVHXtaM4/+dJEgZHyS9Its+8SA==", + "requires": { + "@babel/helper-annotate-as-pure": "^7.0.0", + "@babel/helper-module-imports": "^7.0.0", + "babel-plugin-syntax-jsx": "^6.18.0", + "lodash": "^4.17.11" + } + }, + "babel-plugin-syntax-jsx": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/babel-plugin-syntax-jsx/-/babel-plugin-syntax-jsx-6.18.0.tgz", + "integrity": "sha1-CvMqmm4Tyno/1QaeYtew9Y0NiUY=" + }, + "babel-plugin-syntax-object-rest-spread": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/babel-plugin-syntax-object-rest-spread/-/babel-plugin-syntax-object-rest-spread-6.13.0.tgz", + "integrity": "sha1-/WU28rzhODb/o6VFjEkDpZe7O/U=" + }, + "babel-plugin-transform-object-rest-spread": { + "version": "6.26.0", + "resolved": "https://registry.npmjs.org/babel-plugin-transform-object-rest-spread/-/babel-plugin-transform-object-rest-spread-6.26.0.tgz", + "integrity": "sha1-DzZpLVD+9rfi1LOsFHgTepY7ewY=", + "requires": { + "babel-plugin-syntax-object-rest-spread": "^6.8.0", + "babel-runtime": "^6.26.0" + } + }, + "babel-plugin-transform-react-remove-prop-types": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.4.24.tgz", + "integrity": "sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA==" + }, + "babel-preset-gatsby": { + "version": "0.2.26", + "resolved": "https://registry.npmjs.org/babel-preset-gatsby/-/babel-preset-gatsby-0.2.26.tgz", + "integrity": "sha512-qOM26AhAPW5xetUj579jBFICg16sqFHf3dPptRXi3zS7HpEHbtsOvB9VB68MEUj+WZrrlbR/EQuT69GA1XiBdQ==", + "requires": { + "@babel/plugin-proposal-class-properties": "^7.7.4", + "@babel/plugin-proposal-nullish-coalescing-operator": "^7.7.4", + "@babel/plugin-proposal-optional-chaining": "^7.7.5", + "@babel/plugin-syntax-dynamic-import": "^7.7.4", + "@babel/plugin-transform-runtime": "^7.7.6", + "@babel/plugin-transform-spread": "^7.7.4", + "@babel/preset-env": "^7.7.6", + "@babel/preset-react": "^7.7.4", + "@babel/runtime": "^7.7.6", + "babel-plugin-dynamic-import-node": "^2.3.0", + "babel-plugin-macros": "^2.8.0", + "babel-plugin-transform-react-remove-prop-types": "^0.4.24" + } + }, + "babel-runtime": { + "version": "6.26.0", + "resolved": "https://registry.npmjs.org/babel-runtime/-/babel-runtime-6.26.0.tgz", + "integrity": "sha1-llxwWGaOgrVde/4E/yM3vItWR/4=", + "requires": { + "core-js": "^2.4.0", + "regenerator-runtime": "^0.11.0" + }, + "dependencies": { + "regenerator-runtime": { + "version": "0.11.1", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz", + "integrity": "sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==" + } + } + }, + "babylon": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/babylon/-/babylon-6.18.0.tgz", + "integrity": "sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ==" + }, + "backo2": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/backo2/-/backo2-1.0.2.tgz", + "integrity": "sha1-MasayLEpNjRj41s+u2n038+6eUc=" + }, + "bail": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/bail/-/bail-1.0.4.tgz", + "integrity": "sha512-S8vuDB4w6YpRhICUDET3guPlQpaJl7od94tpZ0Fvnyp+MKW/HyDTcRDck+29C9g+d/qQHnddRH3+94kZdrW0Ww==" + }, + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" + }, + "base": { + "version": "0.11.2", + "resolved": "https://registry.npmjs.org/base/-/base-0.11.2.tgz", + "integrity": "sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==", + "requires": { + "cache-base": "^1.0.1", + "class-utils": "^0.3.5", + "component-emitter": "^1.2.1", + "define-property": "^1.0.0", + "isobject": "^3.0.1", + "mixin-deep": "^1.2.0", + "pascalcase": "^0.1.1" + }, + "dependencies": { + "define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", + "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", + "requires": { + "is-descriptor": "^1.0.0" + } + }, + "is-accessor-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", + "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-data-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", + "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-descriptor": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", + "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", + "requires": { + "is-accessor-descriptor": "^1.0.0", + "is-data-descriptor": "^1.0.0", + "kind-of": "^6.0.2" + } + } + } + }, + "base64-arraybuffer": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz", + "integrity": "sha1-c5JncZI7Whl0etZmqlzUv5xunOg=" + }, + "base64-js": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.1.tgz", + "integrity": "sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g==" + }, + "base64id": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/base64id/-/base64id-2.0.0.tgz", + "integrity": "sha512-lGe34o6EHj9y3Kts9R4ZYs/Gr+6N7MCaMlIFA3F1R2O5/m7K06AxfSeO5530PEERE6/WyEg3lsuyw4GHlPZHog==" + }, + "batch": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz", + "integrity": "sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY=" + }, + "bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "requires": { + "tweetnacl": "^0.14.3" + } + }, + "better-assert": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", + "integrity": "sha1-QIZrnhueC1W0gYlDEeaPr/rrxSI=", + "requires": { + "callsite": "1.0.0" + } + }, + "better-opn": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/better-opn/-/better-opn-1.0.0.tgz", + "integrity": "sha512-q3eO2se4sFbTERB1dFBDdjTiIIpRohMErpwBX21lhPvmgmQNNrcQj0zbWRhMREDesJvyod9kxBS3kOtdAvkB/A==", + "requires": { + "open": "^6.4.0" + } + }, + "better-queue": { + "version": "3.8.10", + "resolved": "https://registry.npmjs.org/better-queue/-/better-queue-3.8.10.tgz", + "integrity": "sha512-e3gwNZgDCnNWl0An0Tz6sUjKDV9m6aB+K9Xg//vYeo8+KiH8pWhLFxkawcXhm6FpM//GfD9IQv/kmvWCAVVpKA==", + "requires": { + "better-queue-memory": "^1.0.1", + "node-eta": "^0.9.0", + "uuid": "^3.0.0" + } + }, + "better-queue-memory": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/better-queue-memory/-/better-queue-memory-1.0.4.tgz", + "integrity": "sha512-SWg5wFIShYffEmJpI6LgbL8/3Dqhku7xI1oEiy6FroP9DbcZlG0ZDjxvPdP9t7hTGW40IpIcC6zVoGT1oxjOuA==" + }, + "big.js": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", + "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==" + }, + "bin-build": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bin-build/-/bin-build-3.0.0.tgz", + "integrity": "sha512-jcUOof71/TNAI2uM5uoUaDq2ePcVBQ3R/qhxAz1rX7UfvduAL/RXD3jXzvn8cVcDJdGVkiR1shal3OH0ImpuhA==", + "requires": { + "decompress": "^4.0.0", + "download": "^6.2.2", + "execa": "^0.7.0", + "p-map-series": "^1.0.0", + "tempfile": "^2.0.0" + }, + "dependencies": { + "cross-spawn": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", + "integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=", + "requires": { + "lru-cache": "^4.0.1", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "download": { + "version": "6.2.5", + "resolved": "https://registry.npmjs.org/download/-/download-6.2.5.tgz", + "integrity": "sha512-DpO9K1sXAST8Cpzb7kmEhogJxymyVUd5qz/vCOSyvwtp2Klj2XcDt5YUuasgxka44SxF0q5RriKIwJmQHG2AuA==", + "requires": { + "caw": "^2.0.0", + "content-disposition": "^0.5.2", + "decompress": "^4.0.0", + "ext-name": "^5.0.0", + "file-type": "5.2.0", + "filenamify": "^2.0.0", + "get-stream": "^3.0.0", + "got": "^7.0.0", + "make-dir": "^1.0.0", + "p-event": "^1.0.0", + "pify": "^3.0.0" + } + }, + "execa": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz", + "integrity": "sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c=", + "requires": { + "cross-spawn": "^5.0.1", + "get-stream": "^3.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + } + }, + "file-type": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz", + "integrity": "sha1-LdvqfHP/42No365J3DOMBYwritY=" + }, + "get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" + }, + "got": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/got/-/got-7.1.0.tgz", + "integrity": "sha512-Y5WMo7xKKq1muPsxD+KmrR8DH5auG7fBdDVueZwETwV6VytKyU9OX/ddpq2/1hp1vIPvVb4T81dKQz3BivkNLw==", + "requires": { + "decompress-response": "^3.2.0", + "duplexer3": "^0.1.4", + "get-stream": "^3.0.0", + "is-plain-obj": "^1.1.0", + "is-retry-allowed": "^1.0.0", + "is-stream": "^1.0.0", + "isurl": "^1.0.0-alpha5", + "lowercase-keys": "^1.0.0", + "p-cancelable": "^0.3.0", + "p-timeout": "^1.1.1", + "safe-buffer": "^5.0.1", + "timed-out": "^4.0.0", + "url-parse-lax": "^1.0.0", + "url-to-options": "^1.0.1" + } + }, + "lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "requires": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "make-dir": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz", + "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==", + "requires": { + "pify": "^3.0.0" + } + }, + "p-cancelable": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-0.3.0.tgz", + "integrity": "sha512-RVbZPLso8+jFeq1MfNvgXtCRED2raz/dKpacfTNxsx6pLEpEomM7gah6VeHSYV3+vo0OAi4MkArtQcWWXuQoyw==" + }, + "p-event": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-event/-/p-event-1.3.0.tgz", + "integrity": "sha1-jmtPT2XHK8W2/ii3XtqHT5akoIU=", + "requires": { + "p-timeout": "^1.1.1" + } + }, + "p-timeout": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-1.2.1.tgz", + "integrity": "sha1-XrOzU7f86Z8QGhA4iAuwVOu+o4Y=", + "requires": { + "p-finally": "^1.0.0" + } + }, + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + }, + "prepend-http": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-1.0.4.tgz", + "integrity": "sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw=" + }, + "url-parse-lax": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-1.0.0.tgz", + "integrity": "sha1-evjzA2Rem9eaJy56FKxovAYJ2nM=", + "requires": { + "prepend-http": "^1.0.1" + } + } + } + }, + "bin-check": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bin-check/-/bin-check-4.1.0.tgz", + "integrity": "sha512-b6weQyEUKsDGFlACWSIOfveEnImkJyK/FGW6FAG42loyoquvjdtOIqO6yBFzHyqyVVhNgNkQxxx09SFLK28YnA==", + "requires": { + "execa": "^0.7.0", + "executable": "^4.1.0" + }, + "dependencies": { + "cross-spawn": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", + "integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=", + "requires": { + "lru-cache": "^4.0.1", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "execa": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz", + "integrity": "sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c=", + "requires": { + "cross-spawn": "^5.0.1", + "get-stream": "^3.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + } + }, + "get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" + }, + "lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "requires": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + } + } + }, + "bin-version": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/bin-version/-/bin-version-3.1.0.tgz", + "integrity": "sha512-Mkfm4iE1VFt4xd4vH+gx+0/71esbfus2LsnCGe8Pi4mndSPyT+NGES/Eg99jx8/lUGWfu3z2yuB/bt5UB+iVbQ==", + "requires": { + "execa": "^1.0.0", + "find-versions": "^3.0.0" + }, + "dependencies": { + "execa": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", + "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", + "requires": { + "cross-spawn": "^6.0.0", + "get-stream": "^4.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + } + } + } + }, + "bin-version-check": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/bin-version-check/-/bin-version-check-4.0.0.tgz", + "integrity": "sha512-sR631OrhC+1f8Cvs8WyVWOA33Y8tgwjETNPyyD/myRBXLkfS/vl74FmH/lFcRl9KY3zwGh7jFhvyk9vV3/3ilQ==", + "requires": { + "bin-version": "^3.0.0", + "semver": "^5.6.0", + "semver-truncate": "^1.1.2" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "bin-wrapper": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bin-wrapper/-/bin-wrapper-4.1.0.tgz", + "integrity": "sha512-hfRmo7hWIXPkbpi0ZltboCMVrU+0ClXR/JgbCKKjlDjQf6igXa7OwdqNcFWQZPZTgiY7ZpzE3+LjjkLiTN2T7Q==", + "requires": { + "bin-check": "^4.1.0", + "bin-version-check": "^4.0.0", + "download": "^7.1.0", + "import-lazy": "^3.1.0", + "os-filter-obj": "^2.0.0", + "pify": "^4.0.1" + }, + "dependencies": { + "import-lazy": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-3.1.0.tgz", + "integrity": "sha512-8/gvXvX2JMn0F+CDlSC4l6kOmVaLOO3XLkksI7CI3Ud95KDYJuYur2b9P/PUt/i/pDAMd/DulQsNbbbmRRsDIQ==" + } + } + }, + "binary-extensions": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.13.1.tgz", + "integrity": "sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==" + }, + "bindings": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "optional": true, + "requires": { + "file-uri-to-path": "1.0.0" + } + }, + "bl": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-3.0.0.tgz", + "integrity": "sha512-EUAyP5UHU5hxF8BPT0LKW8gjYLhq1DQIcneOX/pL/m2Alo+OYDQAJlHq+yseMP50Os2nHXOSic6Ss3vSQeyf4A==", + "requires": { + "readable-stream": "^3.0.1" + }, + "dependencies": { + "readable-stream": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", + "integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, + "blob": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/blob/-/blob-0.0.5.tgz", + "integrity": "sha512-gaqbzQPqOoamawKg0LGVd7SzLgXS+JH61oWprSLH+P+abTczqJbhTR8CmJ2u9/bUYNmHTGJx/UEmn6doAvvuig==" + }, + "bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==" + }, + "bmp-js": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/bmp-js/-/bmp-js-0.1.0.tgz", + "integrity": "sha1-4Fpj95amwf8l9Hcex62twUjAcjM=" + }, + "bn.js": { + "version": "4.11.8", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.8.tgz", + "integrity": "sha512-ItfYfPLkWHUjckQCk8xC+LwxgK8NYcXywGigJgSwOP8Y2iyWT4f2vsZnoOXTTbo+o5yXmIUJ4gn5538SO5S3gA==" + }, + "body-parser": { + "version": "1.19.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz", + "integrity": "sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==", + "requires": { + "bytes": "3.1.0", + "content-type": "~1.0.4", + "debug": "2.6.9", + "depd": "~1.1.2", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", + "on-finished": "~2.3.0", + "qs": "6.7.0", + "raw-body": "2.4.0", + "type-is": "~1.6.17" + }, + "dependencies": { + "bytes": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", + "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" + }, + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "bonjour": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/bonjour/-/bonjour-3.5.0.tgz", + "integrity": "sha1-jokKGD2O6aI5OzhExpGkK897yfU=", + "requires": { + "array-flatten": "^2.1.0", + "deep-equal": "^1.0.1", + "dns-equal": "^1.0.0", + "dns-txt": "^2.0.2", + "multicast-dns": "^6.0.1", + "multicast-dns-service-types": "^1.1.0" + }, + "dependencies": { + "array-flatten": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-2.1.2.tgz", + "integrity": "sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ==" + } + } + }, + "boolbase": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", + "integrity": "sha1-aN/1++YMUes3cl6p4+0xDcwed24=" + }, + "boxen": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/boxen/-/boxen-4.2.0.tgz", + "integrity": "sha512-eB4uT9RGzg2odpER62bBwSLvUeGC+WbRjjyyFhGsKnc8wp/m0+hQsMUvUe3H2V0D5vw0nBdO1hCJoZo5mKeuIQ==", + "requires": { + "ansi-align": "^3.0.0", + "camelcase": "^5.3.1", + "chalk": "^3.0.0", + "cli-boxes": "^2.2.0", + "string-width": "^4.1.0", + "term-size": "^2.1.0", + "type-fest": "^0.8.1", + "widest-line": "^3.1.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", + "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "requires": { + "@types/color-name": "^1.1.1", + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", + "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" + }, + "supports-color": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", + "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "requires": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "brorand": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz", + "integrity": "sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8=" + }, + "browserify-aes": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", + "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==", + "requires": { + "buffer-xor": "^1.0.3", + "cipher-base": "^1.0.0", + "create-hash": "^1.1.0", + "evp_bytestokey": "^1.0.3", + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "browserify-cipher": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/browserify-cipher/-/browserify-cipher-1.0.1.tgz", + "integrity": "sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w==", + "requires": { + "browserify-aes": "^1.0.4", + "browserify-des": "^1.0.0", + "evp_bytestokey": "^1.0.0" + } + }, + "browserify-des": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/browserify-des/-/browserify-des-1.0.2.tgz", + "integrity": "sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A==", + "requires": { + "cipher-base": "^1.0.1", + "des.js": "^1.0.0", + "inherits": "^2.0.1", + "safe-buffer": "^5.1.2" + } + }, + "browserify-rsa": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.0.1.tgz", + "integrity": "sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ=", + "requires": { + "bn.js": "^4.1.0", + "randombytes": "^2.0.1" + } + }, + "browserify-sign": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.0.4.tgz", + "integrity": "sha1-qk62jl17ZYuqa/alfmMMvXqT0pg=", + "requires": { + "bn.js": "^4.1.1", + "browserify-rsa": "^4.0.0", + "create-hash": "^1.1.0", + "create-hmac": "^1.1.2", + "elliptic": "^6.0.0", + "inherits": "^2.0.1", + "parse-asn1": "^5.0.0" + } + }, + "browserify-zlib": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.2.0.tgz", + "integrity": "sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA==", + "requires": { + "pako": "~1.0.5" + } + }, + "browserslist": { + "version": "3.2.8", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-3.2.8.tgz", + "integrity": "sha512-WHVocJYavUwVgVViC0ORikPHQquXwVh939TaelZ4WDqpWgTX/FsGhl/+P4qBUAGcRvtOgDgC+xftNWWp2RUTAQ==", + "requires": { + "caniuse-lite": "^1.0.30000844", + "electron-to-chromium": "^1.3.47" + } + }, + "buffer": { + "version": "4.9.2", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz", + "integrity": "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==", + "requires": { + "base64-js": "^1.0.2", + "ieee754": "^1.1.4", + "isarray": "^1.0.0" + } + }, + "buffer-alloc": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/buffer-alloc/-/buffer-alloc-1.2.0.tgz", + "integrity": "sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow==", + "requires": { + "buffer-alloc-unsafe": "^1.1.0", + "buffer-fill": "^1.0.0" + } + }, + "buffer-alloc-unsafe": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz", + "integrity": "sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg==" + }, + "buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI=" + }, + "buffer-equal": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal/-/buffer-equal-0.0.1.tgz", + "integrity": "sha1-kbx0sR6kBbyRa8aqkI+q+ltKrEs=" + }, + "buffer-fill": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/buffer-fill/-/buffer-fill-1.0.0.tgz", + "integrity": "sha1-+PeLdniYiO858gXNY39o5wISKyw=" + }, + "buffer-from": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", + "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==" + }, + "buffer-indexof": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/buffer-indexof/-/buffer-indexof-1.1.1.tgz", + "integrity": "sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g==" + }, + "buffer-xor": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", + "integrity": "sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk=" + }, + "builtin-modules": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz", + "integrity": "sha512-k0KL0aWZuBt2lrxrcASWDfwOLMnodeQjodT/1SxEQAXsHANgo6ZC/VEaSEHCXt7aSTZ4/4H5LKa+tBXmW7Vtvw==" + }, + "builtin-status-codes": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz", + "integrity": "sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug=" + }, + "bytes": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", + "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=" + }, + "cacache": { + "version": "12.0.3", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-12.0.3.tgz", + "integrity": "sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw==", + "requires": { + "bluebird": "^3.5.5", + "chownr": "^1.1.1", + "figgy-pudding": "^3.5.1", + "glob": "^7.1.4", + "graceful-fs": "^4.1.15", + "infer-owner": "^1.0.3", + "lru-cache": "^5.1.1", + "mississippi": "^3.0.0", + "mkdirp": "^0.5.1", + "move-concurrently": "^1.0.1", + "promise-inflight": "^1.0.1", + "rimraf": "^2.6.3", + "ssri": "^6.0.1", + "unique-filename": "^1.1.1", + "y18n": "^4.0.0" + }, + "dependencies": { + "lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "requires": { + "yallist": "^3.0.2" + } + }, + "yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" + } + } + }, + "cache-base": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz", + "integrity": "sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==", + "requires": { + "collection-visit": "^1.0.0", + "component-emitter": "^1.2.1", + "get-value": "^2.0.6", + "has-value": "^1.0.0", + "isobject": "^3.0.1", + "set-value": "^2.0.0", + "to-object-path": "^0.3.0", + "union-value": "^1.0.0", + "unset-value": "^1.0.0" + } + }, + "cache-manager": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/cache-manager/-/cache-manager-2.10.1.tgz", + "integrity": "sha512-bk17v9IkLqNcbCzggEh82LEJhjHp+COnL57L7a0ESbM/cOuXIIBatdVjD/ps7vOsofI48++zAC14Ye+8v50flg==", + "requires": { + "async": "1.5.2", + "lru-cache": "4.0.0" + } + }, + "cache-manager-fs-hash": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/cache-manager-fs-hash/-/cache-manager-fs-hash-0.0.7.tgz", + "integrity": "sha512-7X+FPItAJf1tKKqJx6ljDJQc0fgSR5B+KPxFQLj+vYSL4q9XdrCbZldgsNb6wueRuIooj01wt0FubB08zaefRg==", + "requires": { + "es6-promisify": "^6.0.0", + "lockfile": "^1.0.4" + } + }, + "cacheable-request": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz", + "integrity": "sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==", + "requires": { + "clone-response": "^1.0.2", + "get-stream": "^5.1.0", + "http-cache-semantics": "^4.0.0", + "keyv": "^3.0.0", + "lowercase-keys": "^2.0.0", + "normalize-url": "^4.1.0", + "responselike": "^1.0.2" + }, + "dependencies": { + "get-stream": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.1.0.tgz", + "integrity": "sha512-EXr1FOzrzTfGeL0gQdeFEvOMm2mzMOglyiOXSTpPC+iAjAKftbr3jpCMWynogwYnM+eSj9sHGc6wjIcDvYiygw==", + "requires": { + "pump": "^3.0.0" + } + }, + "lowercase-keys": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", + "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==" + }, + "normalize-url": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.0.tgz", + "integrity": "sha512-2s47yzUxdexf1OhyRi4Em83iQk0aPvwTddtFz4hnSSw9dCEsLEGf6SwIO8ss/19S9iBb5sJaOuTvTGDeZI00BQ==" + } + } + }, + "call-me-maybe": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/call-me-maybe/-/call-me-maybe-1.0.1.tgz", + "integrity": "sha1-JtII6onje1y95gJQoV8DHBak1ms=" + }, + "caller-callsite": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/caller-callsite/-/caller-callsite-2.0.0.tgz", + "integrity": "sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ=", + "requires": { + "callsites": "^2.0.0" + }, + "dependencies": { + "callsites": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-2.0.0.tgz", + "integrity": "sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA=" + } + } + }, + "caller-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/caller-path/-/caller-path-2.0.0.tgz", + "integrity": "sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ=", + "requires": { + "caller-callsite": "^2.0.0" + } + }, + "callsite": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz", + "integrity": "sha1-KAOY5dZkvXQDi28JBRU+borxvCA=" + }, + "callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==" + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==" + }, + "camelcase-keys": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-2.1.0.tgz", + "integrity": "sha1-MIvur/3ygRkFHvodkyITyRuPkuc=", + "requires": { + "camelcase": "^2.0.0", + "map-obj": "^1.0.0" + }, + "dependencies": { + "camelcase": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-2.1.1.tgz", + "integrity": "sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8=" + } + } + }, + "camelize": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/camelize/-/camelize-1.0.0.tgz", + "integrity": "sha1-FkpUg+Yw+kMh5a8HAg5TGDGyYJs=" + }, + "caniuse-api": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/caniuse-api/-/caniuse-api-3.0.0.tgz", + "integrity": "sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==", + "requires": { + "browserslist": "^4.0.0", + "caniuse-lite": "^1.0.0", + "lodash.memoize": "^4.1.2", + "lodash.uniq": "^4.5.0" + }, + "dependencies": { + "browserslist": { + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.8.3.tgz", + "integrity": "sha512-iU43cMMknxG1ClEZ2MDKeonKE1CCrFVkQK2AqO2YWFmvIrx4JWrvQ4w4hQez6EpVI8rHTtqh/ruHHDHSOKxvUg==", + "requires": { + "caniuse-lite": "^1.0.30001017", + "electron-to-chromium": "^1.3.322", + "node-releases": "^1.1.44" + } + } + } + }, + "caniuse-lite": { + "version": "1.0.30001019", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001019.tgz", + "integrity": "sha512-6ljkLtF1KM5fQ+5ZN0wuyVvvebJxgJPTmScOMaFuQN2QuOzvRJnWSKfzQskQU5IOU4Gap3zasYPIinzwUjoj/g==" + }, + "caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" + }, + "caw": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/caw/-/caw-2.0.1.tgz", + "integrity": "sha512-Cg8/ZSBEa8ZVY9HspcGUYaK63d/bN7rqS3CYCzEGUxuYv6UlmcjzDUz2fCFFHyTvUW5Pk0I+3hkA3iXlIj6guA==", + "requires": { + "get-proxy": "^2.0.0", + "isurl": "^1.0.0-alpha5", + "tunnel-agent": "^0.6.0", + "url-to-options": "^1.0.1" + } + }, + "ccount": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-1.0.4.tgz", + "integrity": "sha512-fpZ81yYfzentuieinmGnphk0pLkOTMm6MZdVqwd77ROvhko6iujLNGrHH5E7utq3ygWklwfmwuG+A7P+NpqT6w==" + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "character-entities": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-1.2.3.tgz", + "integrity": "sha512-yB4oYSAa9yLcGyTbB4ItFwHw43QHdH129IJ5R+WvxOkWlyFnR5FAaBNnUq4mcxsTVZGh28bHoeTHMKXH1wZf3w==" + }, + "character-entities-html4": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-1.1.3.tgz", + "integrity": "sha512-SwnyZ7jQBCRHELk9zf2CN5AnGEc2nA+uKMZLHvcqhpPprjkYhiLn0DywMHgN5ttFZuITMATbh68M6VIVKwJbcg==" + }, + "character-entities-legacy": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.3.tgz", + "integrity": "sha512-YAxUpPoPwxYFsslbdKkhrGnXAtXoHNgYjlBM3WMXkWGTl5RsY3QmOyhwAgL8Nxm9l5LBThXGawxKPn68y6/fww==" + }, + "character-reference-invalid": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-1.1.3.tgz", + "integrity": "sha512-VOq6PRzQBam/8Jm6XBGk2fNEnHXAdGd6go0rtd4weAGECBamHDwwCQSOT12TACIYUZegUXnV6xBXqUssijtxIg==" + }, + "chardet": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", + "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==" + }, + "charenc": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz", + "integrity": "sha1-wKHS86cJLgN3S/qD8UwPxXkKhmc=" + }, + "cheerio": { + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.3.tgz", + "integrity": "sha512-0td5ijfUPuubwLUu0OBoe98gZj8C/AA+RW3v67GPlGOrvxWjZmBXiBCRU+I8VEiNyJzjth40POfHiz2RB3gImA==", + "requires": { + "css-select": "~1.2.0", + "dom-serializer": "~0.1.1", + "entities": "~1.1.1", + "htmlparser2": "^3.9.1", + "lodash": "^4.15.0", + "parse5": "^3.0.1" + }, + "dependencies": { + "dom-serializer": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.1.1.tgz", + "integrity": "sha512-l0IU0pPzLWSHBcieZbpOKgkIn3ts3vAh7ZuFyXNwJxJXk/c4Gwj9xaTJwIDVQCXawWD0qb3IzMGH5rglQaO0XA==", + "requires": { + "domelementtype": "^1.3.0", + "entities": "^1.1.1" + } + }, + "entities": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/entities/-/entities-1.1.2.tgz", + "integrity": "sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==" + } + } + }, + "chokidar": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.3.0.tgz", + "integrity": "sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A==", + "requires": { + "anymatch": "~3.1.1", + "braces": "~3.0.2", + "fsevents": "~2.1.1", + "glob-parent": "~5.1.0", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.2.0" + }, + "dependencies": { + "anymatch": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz", + "integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==", + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, + "binary-extensions": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.0.0.tgz", + "integrity": "sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow==" + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "requires": { + "fill-range": "^7.0.1" + } + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "fsevents": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.2.tgz", + "integrity": "sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA==", + "optional": true + }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "requires": { + "binary-extensions": "^2.0.0" + } + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==" + }, + "readdirp": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.2.0.tgz", + "integrity": "sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ==", + "requires": { + "picomatch": "^2.0.4" + } + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "requires": { + "is-number": "^7.0.0" + } + } + } + }, + "chownr": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.3.tgz", + "integrity": "sha512-i70fVHhmV3DtTl6nqvZOnIjbY0Pe4kAUjwHj8z0zAdgBtYrJyYwLKCCuRBQ5ppkyL0AkN7HKRnETdmdp1zqNXw==" + }, + "chrome-trace-event": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.2.tgz", + "integrity": "sha512-9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ==", + "requires": { + "tslib": "^1.9.0" + } + }, + "ci-info": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", + "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==" + }, + "cipher-base": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", + "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", + "requires": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "class-utils": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz", + "integrity": "sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==", + "requires": { + "arr-union": "^3.1.0", + "define-property": "^0.2.5", + "isobject": "^3.0.0", + "static-extend": "^0.1.1" + }, + "dependencies": { + "define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "requires": { + "is-descriptor": "^0.1.0" + } + } + } + }, + "clean-css": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-4.2.1.tgz", + "integrity": "sha512-4ZxI6dy4lrY6FHzfiy1aEOXgu4LIsW2MhwG0VBKdcoGoH/XLFgaHSdLTGr4O8Be6A8r3MOphEiI8Gc1n0ecf3g==", + "requires": { + "source-map": "~0.6.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } + } + }, + "clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==" + }, + "cli-boxes": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-2.2.0.tgz", + "integrity": "sha512-gpaBrMAizVEANOpfZp/EEUixTXDyGt7DFzdK5hU+UbWt/J0lB0w20ncZj59Z9a93xHb9u12zF5BS6i9RKbtg4w==" + }, + "cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "requires": { + "restore-cursor": "^3.1.0" + } + }, + "cli-spinners": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-1.3.1.tgz", + "integrity": "sha512-1QL4544moEsDVH9T/l6Cemov/37iv1RtoKf7NJ04A60+4MREXNfx/QvavbH6QoGdsD4N4Mwy49cmaINR/o2mdg==", + "optional": true + }, + "cli-table3": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.5.1.tgz", + "integrity": "sha512-7Qg2Jrep1S/+Q3EceiZtQcDPWxhAvBw+ERf1162v4sikJrvojMHFqXt8QIVha8UlH9rgU0BeWPytZ9/TzYqlUw==", + "requires": { + "colors": "^1.1.2", + "object-assign": "^4.1.0", + "string-width": "^2.1.1" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "requires": { + "ansi-regex": "^3.0.0" + } + } + } + }, + "cli-truncate": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz", + "integrity": "sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==", + "optional": true, + "requires": { + "slice-ansi": "^3.0.0", + "string-width": "^4.2.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", + "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "optional": true, + "requires": { + "@types/color-name": "^1.1.1", + "color-convert": "^2.0.1" + } + }, + "astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", + "optional": true + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "optional": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "optional": true + }, + "slice-ansi": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-3.0.0.tgz", + "integrity": "sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==", + "optional": true, + "requires": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + } + } + } + }, + "cli-width": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz", + "integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk=" + }, + "clipboard": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/clipboard/-/clipboard-2.0.4.tgz", + "integrity": "sha512-Vw26VSLRpJfBofiVaFb/I8PVfdI1OxKcYShe6fm0sP/DtmiWQNCjhM/okTvdCo0G+lMMm1rMYbk4IK4x1X+kgQ==", + "optional": true, + "requires": { + "good-listener": "^1.2.2", + "select": "^1.1.2", + "tiny-emitter": "^2.0.0" + } + }, + "clipboardy": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/clipboardy/-/clipboardy-2.1.0.tgz", + "integrity": "sha512-2pzOUxWcLlXWtn+Jd6js3o12TysNOOVes/aQfg+MT/35vrxWzedHlLwyoJpXjsFKWm95BTNEcMGD9+a7mKzZkQ==", + "requires": { + "arch": "^2.1.1", + "execa": "^1.0.0" + }, + "dependencies": { + "execa": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", + "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", + "requires": { + "cross-spawn": "^6.0.0", + "get-stream": "^4.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + } + } + } + }, + "cliui": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "requires": { + "string-width": "^3.1.0", + "strip-ansi": "^5.2.0", + "wrap-ansi": "^5.1.0" + }, + "dependencies": { + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + } + } + }, + "clone-response": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz", + "integrity": "sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws=", + "requires": { + "mimic-response": "^1.0.0" + } + }, + "coa": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/coa/-/coa-2.0.2.tgz", + "integrity": "sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA==", + "requires": { + "@types/q": "^1.5.1", + "chalk": "^2.4.1", + "q": "^1.1.2" + } + }, + "code-point-at": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", + "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" + }, + "collapse-white-space": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/collapse-white-space/-/collapse-white-space-1.0.5.tgz", + "integrity": "sha512-703bOOmytCYAX9cXYqoikYIx6twmFCXsnzRQheBcTG3nzKYBR4P/+wkYeH+Mvj7qUz8zZDtdyzbxfnEi/kYzRQ==" + }, + "collection-visit": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz", + "integrity": "sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA=", + "requires": { + "map-visit": "^1.0.0", + "object-visit": "^1.0.0" + } + }, + "color": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/color/-/color-3.1.2.tgz", + "integrity": "sha512-vXTJhHebByxZn3lDvDJYw4lR5+uB3vuoHsuYA5AKuxRVn5wzzIfQKGLBmgdVRHKTJYeK5rvJcHnrd0Li49CFpg==", + "requires": { + "color-convert": "^1.9.1", + "color-string": "^1.5.2" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + }, + "color-string": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.5.3.tgz", + "integrity": "sha512-dC2C5qeWoYkxki5UAXapdjqO672AM4vZuPGRQfO8b5HKuKGBbKWpITyDYN7TOFKvRW7kOgAn3746clDBMDJyQw==", + "requires": { + "color-name": "^1.0.0", + "simple-swizzle": "^0.2.2" + } + }, + "colors": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz", + "integrity": "sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==", + "optional": true + }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "comma-separated-tokens": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-1.0.7.tgz", + "integrity": "sha512-Jrx3xsP4pPv4AwJUDWY9wOXGtwPXARej6Xd99h4TUGotmf8APuquKMpK+dnD3UgyxK7OEWaisjZz+3b5jtL6xQ==" + }, + "command-exists": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/command-exists/-/command-exists-1.2.8.tgz", + "integrity": "sha512-PM54PkseWbiiD/mMsbvW351/u+dafwTJ0ye2qB60G1aGQP9j3xK2gmMDc+R34L3nDtx4qMCitXT75mkbkGJDLw==" + }, + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, + "common-tags": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.0.tgz", + "integrity": "sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==" + }, + "commondir": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", + "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=" + }, + "component-bind": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/component-bind/-/component-bind-1.0.0.tgz", + "integrity": "sha1-AMYIq33Nk4l8AAllGx06jh5zu9E=" + }, + "component-emitter": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", + "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" + }, + "component-inherit": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/component-inherit/-/component-inherit-0.0.3.tgz", + "integrity": "sha1-ZF/ErfWLcrZJ1crmUTVhnbJv8UM=" + }, + "compressible": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", + "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", + "requires": { + "mime-db": ">= 1.43.0 < 2" + } + }, + "compression": { + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz", + "integrity": "sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==", + "requires": { + "accepts": "~1.3.5", + "bytes": "3.0.0", + "compressible": "~2.0.16", + "debug": "2.6.9", + "on-headers": "~1.0.2", + "safe-buffer": "5.1.2", + "vary": "~1.1.2" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "concat-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", + "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", + "requires": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + } + }, + "config-chain": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.12.tgz", + "integrity": "sha512-a1eOIcu8+7lUInge4Rpf/n4Krkf3Dd9lqhljRzII1/Zno/kRtUWnznPO3jOKBmTEktkt3fkxisUcivoj0ebzoA==", + "requires": { + "ini": "^1.3.4", + "proto-list": "~1.2.1" + } + }, + "configstore": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/configstore/-/configstore-3.1.2.tgz", + "integrity": "sha512-vtv5HtGjcYUgFrXc6Kx747B83MRRVS5R1VTEQoXvuP+kMI+if6uywV0nDGoiydJRy4yk7h9od5Og0kxx4zUXmw==", + "requires": { + "dot-prop": "^4.1.0", + "graceful-fs": "^4.1.2", + "make-dir": "^1.0.0", + "unique-string": "^1.0.0", + "write-file-atomic": "^2.0.0", + "xdg-basedir": "^3.0.0" + }, + "dependencies": { + "make-dir": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz", + "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==", + "requires": { + "pify": "^3.0.0" + } + }, + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + } + } + }, + "confusing-browser-globals": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.9.tgz", + "integrity": "sha512-KbS1Y0jMtyPgIxjO7ZzMAuUpAKMt1SzCL9fsrKsX6b0zJPTaT0SiSPmewwVZg9UAO83HVIlEhZF84LIjZ0lmAw==" + }, + "connect-history-api-fallback": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz", + "integrity": "sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg==" + }, + "console-browserify": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.2.0.tgz", + "integrity": "sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA==" + }, + "console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=" + }, + "console-stream": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/console-stream/-/console-stream-0.1.1.tgz", + "integrity": "sha1-oJX+B7IEZZVfL6/Si11yvM2UnUQ=" + }, + "constants-browserify": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/constants-browserify/-/constants-browserify-1.0.0.tgz", + "integrity": "sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U=" + }, + "contains-path": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/contains-path/-/contains-path-0.1.0.tgz", + "integrity": "sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo=" + }, + "content-disposition": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.3.tgz", + "integrity": "sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==", + "requires": { + "safe-buffer": "5.1.2" + } + }, + "content-type": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", + "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" + }, + "convert-hrtime": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/convert-hrtime/-/convert-hrtime-3.0.0.tgz", + "integrity": "sha512-7V+KqSvMiHp8yWDuwfww06XleMWVVB9b9tURBx+G7UTADuo5hYPuowKloz4OzOqbPezxgo+fdQ1522WzPG4OeA==" + }, + "convert-source-map": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.7.0.tgz", + "integrity": "sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==", + "requires": { + "safe-buffer": "~5.1.1" + } + }, + "cookie": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz", + "integrity": "sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==" + }, + "cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" + }, + "copy-concurrently": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/copy-concurrently/-/copy-concurrently-1.0.5.tgz", + "integrity": "sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A==", + "requires": { + "aproba": "^1.1.1", + "fs-write-stream-atomic": "^1.0.8", + "iferr": "^0.1.5", + "mkdirp": "^0.5.1", + "rimraf": "^2.5.4", + "run-queue": "^1.0.0" + } + }, + "copy-descriptor": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz", + "integrity": "sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=" + }, + "copyfiles": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/copyfiles/-/copyfiles-2.1.1.tgz", + "integrity": "sha512-y6DZHve80whydXzBal7r70TBgKMPKesVRR1Sn/raUu7Jh/i7iSLSyGvYaq0eMJ/3Y/CKghwzjY32q1WzEnpp3Q==", + "requires": { + "glob": "^7.0.5", + "minimatch": "^3.0.3", + "mkdirp": "^0.5.1", + "noms": "0.0.0", + "through2": "^2.0.1", + "yargs": "^13.2.4" + } + }, + "core-js": { + "version": "2.6.11", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.11.tgz", + "integrity": "sha512-5wjnpaT/3dV+XB4borEsnAYQchn00XSgTAWKDkEqv+K8KevjbzmofK6hfJ9TZIlpj2N0xQpazy7PiRQiWHqzWg==" + }, + "core-js-compat": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.6.2.tgz", + "integrity": "sha512-+G28dzfYGtAM+XGvB1C5AS1ZPKfQ47HLhcdeIQdZgQnJVdp7/D0m+W/TErwhgsX6CujRUk/LebB6dCrKrtJrvQ==", + "requires": { + "browserslist": "^4.8.3", + "semver": "7.0.0" + }, + "dependencies": { + "browserslist": { + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.8.3.tgz", + "integrity": "sha512-iU43cMMknxG1ClEZ2MDKeonKE1CCrFVkQK2AqO2YWFmvIrx4JWrvQ4w4hQez6EpVI8rHTtqh/ruHHDHSOKxvUg==", + "requires": { + "caniuse-lite": "^1.0.30001017", + "electron-to-chromium": "^1.3.322", + "node-releases": "^1.1.44" + } + }, + "semver": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", + "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==" + } + } + }, + "core-js-pure": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.6.2.tgz", + "integrity": "sha512-PRasaCPjjCB65au2dMBPtxuIR6LM8MVNdbIbN57KxcDV1FAYQWlF0pqje/HC2sM6nm/s9KqSTkMTU75pozaghA==" + }, + "core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + }, + "cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "requires": { + "object-assign": "^4", + "vary": "^1" + } + }, + "cosmiconfig": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-6.0.0.tgz", + "integrity": "sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg==", + "requires": { + "@types/parse-json": "^4.0.0", + "import-fresh": "^3.1.0", + "parse-json": "^5.0.0", + "path-type": "^4.0.0", + "yaml": "^1.7.2" + } + }, + "create-ecdh": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.3.tgz", + "integrity": "sha512-GbEHQPMOswGpKXM9kCWVrremUcBmjteUaQ01T9rkKCPDXfUHX0IoP9LpHYo2NPFampa4e+/pFDc3jQdxrxQLaw==", + "requires": { + "bn.js": "^4.1.0", + "elliptic": "^6.0.0" + } + }, + "create-hash": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", + "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", + "requires": { + "cipher-base": "^1.0.1", + "inherits": "^2.0.1", + "md5.js": "^1.3.4", + "ripemd160": "^2.0.1", + "sha.js": "^2.4.0" + } + }, + "create-hmac": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", + "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==", + "requires": { + "cipher-base": "^1.0.3", + "create-hash": "^1.1.0", + "inherits": "^2.0.1", + "ripemd160": "^2.0.0", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + } + }, + "create-react-context": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/create-react-context/-/create-react-context-0.2.3.tgz", + "integrity": "sha512-CQBmD0+QGgTaxDL3OX1IDXYqjkp2It4RIbcb99jS6AEg27Ga+a9G3JtK6SIu0HBwPLZlmwt9F7UwWA4Bn92Rag==", + "requires": { + "fbjs": "^0.8.0", + "gud": "^1.0.0" + } + }, + "cross-fetch": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-2.2.2.tgz", + "integrity": "sha1-pH/09/xxLauo9qaVoRyUhEDUVyM=", + "requires": { + "node-fetch": "2.1.2", + "whatwg-fetch": "2.0.4" + }, + "dependencies": { + "node-fetch": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.1.2.tgz", + "integrity": "sha1-q4hOjn5X44qUR1POxwb3iNF2i7U=" + }, + "whatwg-fetch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-2.0.4.tgz", + "integrity": "sha512-dcQ1GWpOD/eEQ97k66aiEVpNnapVj90/+R+SXTPYGHpYBBypfKJEQjLrvMZ7YXbKm21gXd4NcuxUTjiv1YtLng==" + } + } + }, + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "crypt": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz", + "integrity": "sha1-iNf/fsDfuG9xPch7u0LQRNPmxBs=" + }, + "crypto-browserify": { + "version": "3.12.0", + "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-3.12.0.tgz", + "integrity": "sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg==", + "requires": { + "browserify-cipher": "^1.0.0", + "browserify-sign": "^4.0.0", + "create-ecdh": "^4.0.0", + "create-hash": "^1.1.0", + "create-hmac": "^1.1.0", + "diffie-hellman": "^5.0.0", + "inherits": "^2.0.1", + "pbkdf2": "^3.0.3", + "public-encrypt": "^4.0.0", + "randombytes": "^2.0.0", + "randomfill": "^1.0.3" + } + }, + "crypto-random-string": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-1.0.0.tgz", + "integrity": "sha1-ojD2T1aDEOFJgAmUB5DsmVRbyn4=" + }, + "css-color-keywords": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/css-color-keywords/-/css-color-keywords-1.0.0.tgz", + "integrity": "sha1-/qJhbcZ2spYmhrOvjb2+GAskTgU=" + }, + "css-color-names": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/css-color-names/-/css-color-names-0.0.4.tgz", + "integrity": "sha1-gIrcLnnPhHOAabZGyyDsJ762KeA=" + }, + "css-declaration-sorter": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-4.0.1.tgz", + "integrity": "sha512-BcxQSKTSEEQUftYpBVnsH4SF05NTuBokb19/sBt6asXGKZ/6VP7PLG1CBCkFDYOnhXhPh0jMhO6xZ71oYHXHBA==", + "requires": { + "postcss": "^7.0.1", + "timsort": "^0.3.0" + } + }, + "css-loader": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-1.0.1.tgz", + "integrity": "sha512-+ZHAZm/yqvJ2kDtPne3uX0C+Vr3Zn5jFn2N4HywtS5ujwvsVkyg0VArEXpl3BgczDA8anieki1FIzhchX4yrDw==", + "requires": { + "babel-code-frame": "^6.26.0", + "css-selector-tokenizer": "^0.7.0", + "icss-utils": "^2.1.0", + "loader-utils": "^1.0.2", + "lodash": "^4.17.11", + "postcss": "^6.0.23", + "postcss-modules-extract-imports": "^1.2.0", + "postcss-modules-local-by-default": "^1.2.0", + "postcss-modules-scope": "^1.1.0", + "postcss-modules-values": "^1.3.0", + "postcss-value-parser": "^3.3.0", + "source-list-map": "^2.0.0" + }, + "dependencies": { + "postcss": { + "version": "6.0.23", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.23.tgz", + "integrity": "sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==", + "requires": { + "chalk": "^2.4.1", + "source-map": "^0.6.1", + "supports-color": "^5.4.0" + } + }, + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } + } + }, + "css-select": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-1.2.0.tgz", + "integrity": "sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg=", + "requires": { + "boolbase": "~1.0.0", + "css-what": "2.1", + "domutils": "1.5.1", + "nth-check": "~1.0.1" + } + }, + "css-select-base-adapter": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz", + "integrity": "sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w==" + }, + "css-selector-parser": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/css-selector-parser/-/css-selector-parser-1.3.0.tgz", + "integrity": "sha1-XxrUPi2O77/cME/NOaUhZklD4+s=" + }, + "css-selector-tokenizer": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/css-selector-tokenizer/-/css-selector-tokenizer-0.7.1.tgz", + "integrity": "sha512-xYL0AMZJ4gFzJQsHUKa5jiWWi2vH77WVNg7JYRyewwj6oPh4yb/y6Y9ZCw9dsj/9UauMhtuxR+ogQd//EdEVNA==", + "requires": { + "cssesc": "^0.1.0", + "fastparse": "^1.1.1", + "regexpu-core": "^1.0.0" + }, + "dependencies": { + "jsesc": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", + "integrity": "sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=" + }, + "regexpu-core": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-1.0.0.tgz", + "integrity": "sha1-hqdj9Y7k18L2sQLkdkBQ3n7ZDGs=", + "requires": { + "regenerate": "^1.2.1", + "regjsgen": "^0.2.0", + "regjsparser": "^0.1.4" + } + }, + "regjsgen": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.2.0.tgz", + "integrity": "sha1-bAFq3qxVT3WCP+N6wFuS1aTtsfc=" + }, + "regjsparser": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.1.5.tgz", + "integrity": "sha1-fuj4Tcb6eS0/0K4ijSS9lJ6tIFw=", + "requires": { + "jsesc": "~0.5.0" + } + } + } + }, + "css-to-react-native": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/css-to-react-native/-/css-to-react-native-2.3.2.tgz", + "integrity": "sha512-VOFaeZA053BqvvvqIA8c9n0+9vFppVBAHCp6JgFTtTMU3Mzi+XnelJ9XC9ul3BqFzZyQ5N+H0SnwsWT2Ebchxw==", + "requires": { + "camelize": "^1.0.0", + "css-color-keywords": "^1.0.0", + "postcss-value-parser": "^3.3.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "css-tree": { + "version": "1.0.0-alpha.37", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.0.0-alpha.37.tgz", + "integrity": "sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg==", + "requires": { + "mdn-data": "2.0.4", + "source-map": "^0.6.1" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } + } + }, + "css-unit-converter": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/css-unit-converter/-/css-unit-converter-1.1.1.tgz", + "integrity": "sha1-2bkoGtz9jO2TW9urqDeGiX9k6ZY=" + }, + "css-what": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-2.1.3.tgz", + "integrity": "sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg==" + }, + "cssesc": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-0.1.0.tgz", + "integrity": "sha1-yBSQPkViM3GgR3tAEJqq++6t27Q=" + }, + "cssnano": { + "version": "4.1.10", + "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-4.1.10.tgz", + "integrity": "sha512-5wny+F6H4/8RgNlaqab4ktc3e0/blKutmq8yNlBFXA//nSFFAqAngjNVRzUvCgYROULmZZUoosL/KSoZo5aUaQ==", + "requires": { + "cosmiconfig": "^5.0.0", + "cssnano-preset-default": "^4.0.7", + "is-resolvable": "^1.0.0", + "postcss": "^7.0.0" + }, + "dependencies": { + "cosmiconfig": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-5.2.1.tgz", + "integrity": "sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA==", + "requires": { + "import-fresh": "^2.0.0", + "is-directory": "^0.3.1", + "js-yaml": "^3.13.1", + "parse-json": "^4.0.0" + } + }, + "import-fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-2.0.0.tgz", + "integrity": "sha1-2BNVwVYS04bGH53dOSLUMEgipUY=", + "requires": { + "caller-path": "^2.0.0", + "resolve-from": "^3.0.0" + } + }, + "parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", + "requires": { + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" + } + }, + "resolve-from": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", + "integrity": "sha1-six699nWiBvItuZTM17rywoYh0g=" + } + } + }, + "cssnano-preset-default": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-4.0.7.tgz", + "integrity": "sha512-x0YHHx2h6p0fCl1zY9L9roD7rnlltugGu7zXSKQx6k2rYw0Hi3IqxcoAGF7u9Q5w1nt7vK0ulxV8Lo+EvllGsA==", + "requires": { + "css-declaration-sorter": "^4.0.1", + "cssnano-util-raw-cache": "^4.0.1", + "postcss": "^7.0.0", + "postcss-calc": "^7.0.1", + "postcss-colormin": "^4.0.3", + "postcss-convert-values": "^4.0.1", + "postcss-discard-comments": "^4.0.2", + "postcss-discard-duplicates": "^4.0.2", + "postcss-discard-empty": "^4.0.1", + "postcss-discard-overridden": "^4.0.1", + "postcss-merge-longhand": "^4.0.11", + "postcss-merge-rules": "^4.0.3", + "postcss-minify-font-values": "^4.0.2", + "postcss-minify-gradients": "^4.0.2", + "postcss-minify-params": "^4.0.2", + "postcss-minify-selectors": "^4.0.2", + "postcss-normalize-charset": "^4.0.1", + "postcss-normalize-display-values": "^4.0.2", + "postcss-normalize-positions": "^4.0.2", + "postcss-normalize-repeat-style": "^4.0.2", + "postcss-normalize-string": "^4.0.2", + "postcss-normalize-timing-functions": "^4.0.2", + "postcss-normalize-unicode": "^4.0.1", + "postcss-normalize-url": "^4.0.1", + "postcss-normalize-whitespace": "^4.0.2", + "postcss-ordered-values": "^4.1.2", + "postcss-reduce-initial": "^4.0.3", + "postcss-reduce-transforms": "^4.0.2", + "postcss-svgo": "^4.0.2", + "postcss-unique-selectors": "^4.0.1" + } + }, + "cssnano-util-get-arguments": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/cssnano-util-get-arguments/-/cssnano-util-get-arguments-4.0.0.tgz", + "integrity": "sha1-7ToIKZ8h11dBsg87gfGU7UnMFQ8=" + }, + "cssnano-util-get-match": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/cssnano-util-get-match/-/cssnano-util-get-match-4.0.0.tgz", + "integrity": "sha1-wOTKB/U4a7F+xeUiULT1lhNlFW0=" + }, + "cssnano-util-raw-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/cssnano-util-raw-cache/-/cssnano-util-raw-cache-4.0.1.tgz", + "integrity": "sha512-qLuYtWK2b2Dy55I8ZX3ky1Z16WYsx544Q0UWViebptpwn/xDBmog2TLg4f+DBMg1rJ6JDWtn96WHbOKDWt1WQA==", + "requires": { + "postcss": "^7.0.0" + } + }, + "cssnano-util-same-parent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/cssnano-util-same-parent/-/cssnano-util-same-parent-4.0.1.tgz", + "integrity": "sha512-WcKx5OY+KoSIAxBW6UBBRay1U6vkYheCdjyVNDm85zt5K9mHoGOfsOsqIszfAqrQQFIIKgjh2+FDgIj/zsl21Q==" + }, + "csso": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/csso/-/csso-4.0.2.tgz", + "integrity": "sha512-kS7/oeNVXkHWxby5tHVxlhjizRCSv8QdU7hB2FpdAibDU8FjTAolhNjKNTiLzXtUrKT6HwClE81yXwEk1309wg==", + "requires": { + "css-tree": "1.0.0-alpha.37" + } + }, + "csstype": { + "version": "2.6.8", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-2.6.8.tgz", + "integrity": "sha512-msVS9qTuMT5zwAGCVm4mxfrZ18BNc6Csd0oJAtiFMZ1FAx1CCvy2+5MDmYoix63LM/6NDbNtodCiGYGmFgO0dA==" + }, + "currently-unhandled": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/currently-unhandled/-/currently-unhandled-0.4.1.tgz", + "integrity": "sha1-mI3zP+qxke95mmE2nddsF635V+o=", + "requires": { + "array-find-index": "^1.0.1" + } + }, + "cwebp-bin": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/cwebp-bin/-/cwebp-bin-5.1.0.tgz", + "integrity": "sha512-BsPKStaNr98zfxwejWWLIGELbPERULJoD2v5ijvpeutSAGsegX7gmABgnkRK7MUucCPROXXfaPqkLAwI509JzA==", + "requires": { + "bin-build": "^3.0.0", + "bin-wrapper": "^4.0.1", + "logalot": "^2.1.0" + } + }, + "cyclist": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/cyclist/-/cyclist-1.0.1.tgz", + "integrity": "sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk=" + }, + "damerau-levenshtein": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.5.tgz", + "integrity": "sha512-CBCRqFnpu715iPmw1KrdOrzRqbdFwQTwAWyyyYS42+iAgHCuXZ+/TdMgQkUENPomxEz9z1BEzuQU2Xw0kUuAgA==" + }, + "dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "requires": { + "assert-plus": "^1.0.0" + } + }, + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "requires": { + "ms": "^2.1.1" + } + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=" + }, + "decode-uri-component": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.0.tgz", + "integrity": "sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=" + }, + "decompress": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/decompress/-/decompress-4.2.0.tgz", + "integrity": "sha1-eu3YVCflqS2s/lVnSnxQXpbQH50=", + "requires": { + "decompress-tar": "^4.0.0", + "decompress-tarbz2": "^4.0.0", + "decompress-targz": "^4.0.0", + "decompress-unzip": "^4.0.1", + "graceful-fs": "^4.1.10", + "make-dir": "^1.0.0", + "pify": "^2.3.0", + "strip-dirs": "^2.0.0" + }, + "dependencies": { + "make-dir": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz", + "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==", + "requires": { + "pify": "^3.0.0" + }, + "dependencies": { + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + } + } + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=" + } + } + }, + "decompress-response": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz", + "integrity": "sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M=", + "requires": { + "mimic-response": "^1.0.0" + } + }, + "decompress-tar": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/decompress-tar/-/decompress-tar-4.1.1.tgz", + "integrity": "sha512-JdJMaCrGpB5fESVyxwpCx4Jdj2AagLmv3y58Qy4GE6HMVjWz1FeVQk1Ct4Kye7PftcdOo/7U7UKzYBJgqnGeUQ==", + "requires": { + "file-type": "^5.2.0", + "is-stream": "^1.1.0", + "tar-stream": "^1.5.2" + }, + "dependencies": { + "bl": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/bl/-/bl-1.2.2.tgz", + "integrity": "sha512-e8tQYnZodmebYDWGH7KMRvtzKXaJHx3BbilrgZCfvyLUYdKpK1t5PSPmpkny/SgiTSCnjfLW7v5rlONXVFkQEA==", + "requires": { + "readable-stream": "^2.3.5", + "safe-buffer": "^5.1.1" + } + }, + "file-type": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz", + "integrity": "sha1-LdvqfHP/42No365J3DOMBYwritY=" + }, + "tar-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.6.2.tgz", + "integrity": "sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A==", + "requires": { + "bl": "^1.0.0", + "buffer-alloc": "^1.2.0", + "end-of-stream": "^1.0.0", + "fs-constants": "^1.0.0", + "readable-stream": "^2.3.0", + "to-buffer": "^1.1.1", + "xtend": "^4.0.0" + } + } + } + }, + "decompress-tarbz2": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/decompress-tarbz2/-/decompress-tarbz2-4.1.1.tgz", + "integrity": "sha512-s88xLzf1r81ICXLAVQVzaN6ZmX4A6U4z2nMbOwobxkLoIIfjVMBg7TeguTUXkKeXni795B6y5rnvDw7rxhAq9A==", + "requires": { + "decompress-tar": "^4.1.0", + "file-type": "^6.1.0", + "is-stream": "^1.1.0", + "seek-bzip": "^1.0.5", + "unbzip2-stream": "^1.0.9" + }, + "dependencies": { + "file-type": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-6.2.0.tgz", + "integrity": "sha512-YPcTBDV+2Tm0VqjybVd32MHdlEGAtuxS3VAYsumFokDSMG+ROT5wawGlnHDoz7bfMcMDt9hxuXvXwoKUx2fkOg==" + } + } + }, + "decompress-targz": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/decompress-targz/-/decompress-targz-4.1.1.tgz", + "integrity": "sha512-4z81Znfr6chWnRDNfFNqLwPvm4db3WuZkqV+UgXQzSngG3CEKdBkw5jrv3axjjL96glyiiKjsxJG3X6WBZwX3w==", + "requires": { + "decompress-tar": "^4.1.1", + "file-type": "^5.2.0", + "is-stream": "^1.1.0" + }, + "dependencies": { + "file-type": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz", + "integrity": "sha1-LdvqfHP/42No365J3DOMBYwritY=" + } + } + }, + "decompress-unzip": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/decompress-unzip/-/decompress-unzip-4.0.1.tgz", + "integrity": "sha1-3qrM39FK6vhVePczroIQ+bSEj2k=", + "requires": { + "file-type": "^3.8.0", + "get-stream": "^2.2.0", + "pify": "^2.3.0", + "yauzl": "^2.4.2" + }, + "dependencies": { + "file-type": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-3.9.0.tgz", + "integrity": "sha1-JXoHg4TR24CHvESdEH1SpSZyuek=" + }, + "get-stream": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-2.3.1.tgz", + "integrity": "sha1-Xzj5PzRgCWZu4BUKBUFn+Rvdld4=", + "requires": { + "object-assign": "^4.0.1", + "pinkie-promise": "^2.0.0" + } + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=" + } + } + }, + "deep-equal": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-1.1.1.tgz", + "integrity": "sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g==", + "requires": { + "is-arguments": "^1.0.4", + "is-date-object": "^1.0.1", + "is-regex": "^1.0.4", + "object-is": "^1.0.1", + "object-keys": "^1.1.1", + "regexp.prototype.flags": "^1.2.0" + } + }, + "deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==" + }, + "deep-is": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", + "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=" + }, + "deepmerge": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz", + "integrity": "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==" + }, + "default-gateway": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-4.2.0.tgz", + "integrity": "sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA==", + "requires": { + "execa": "^1.0.0", + "ip-regex": "^2.1.0" + }, + "dependencies": { + "execa": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", + "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", + "requires": { + "cross-spawn": "^6.0.0", + "get-stream": "^4.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + } + } + } + }, + "defer-to-connect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-1.1.1.tgz", + "integrity": "sha512-J7thop4u3mRTkYRQ+Vpfwy2G5Ehoy82I14+14W4YMDLKdWloI9gSzRbV30s/NckQGVJtPkWNcW4oMAUigTdqiQ==" + }, + "define-properties": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", + "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "requires": { + "object-keys": "^1.0.12" + } + }, + "define-property": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-2.0.2.tgz", + "integrity": "sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==", + "requires": { + "is-descriptor": "^1.0.2", + "isobject": "^3.0.1" + }, + "dependencies": { + "is-accessor-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", + "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-data-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", + "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-descriptor": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", + "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", + "requires": { + "is-accessor-descriptor": "^1.0.0", + "is-data-descriptor": "^1.0.0", + "kind-of": "^6.0.2" + } + } + } + }, + "del": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/del/-/del-5.1.0.tgz", + "integrity": "sha512-wH9xOVHnczo9jN2IW68BabcecVPxacIA3g/7z6vhSU/4stOKQzeCRK0yD0A24WiAAUJmmVpWqrERcTxnLo3AnA==", + "requires": { + "globby": "^10.0.1", + "graceful-fs": "^4.2.2", + "is-glob": "^4.0.1", + "is-path-cwd": "^2.2.0", + "is-path-inside": "^3.0.1", + "p-map": "^3.0.0", + "rimraf": "^3.0.0", + "slash": "^3.0.0" + }, + "dependencies": { + "rimraf": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.0.tgz", + "integrity": "sha512-NDGVxTsjqfunkds7CqsOiEnxln4Bo7Nddl3XhS4pXg5OzwkLqJ971ZVAAnB+DDLnF76N+VnDEiBHaVV8I06SUg==", + "requires": { + "glob": "^7.1.3" + } + } + } + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" + }, + "delegate": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/delegate/-/delegate-3.2.0.tgz", + "integrity": "sha512-IofjkYBZaZivn0V8nnsMJGBr4jVLxHDheKSW88PyxS5QC4Vo9ZbZVvhzlSxY87fVq3STR6r+4cGepyHkcWOQSw==", + "optional": true + }, + "delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=" + }, + "depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" + }, + "des.js": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/des.js/-/des.js-1.0.1.tgz", + "integrity": "sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA==", + "requires": { + "inherits": "^2.0.1", + "minimalistic-assert": "^1.0.0" + } + }, + "destroy": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", + "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" + }, + "detab": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/detab/-/detab-2.0.2.tgz", + "integrity": "sha512-Q57yPrxScy816TTE1P/uLRXLDKjXhvYTbfxS/e6lPD+YrqghbsMlGB9nQzj/zVtSPaF0DFPSdO916EWO4sQUyQ==", + "requires": { + "repeat-string": "^1.5.4" + } + }, + "detect-indent": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.0.0.tgz", + "integrity": "sha512-oSyFlqaTHCItVRGK5RmrmjB+CmaMOW7IaNA/kdxqhoa6d17j/5ce9O9eWXmV/KEdRwqpQA+Vqe8a8Bsybu4YnA==" + }, + "detect-libc": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", + "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=" + }, + "detect-node": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.0.4.tgz", + "integrity": "sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw==" + }, + "detect-port": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/detect-port/-/detect-port-1.3.0.tgz", + "integrity": "sha512-E+B1gzkl2gqxt1IhUzwjrxBKRqx1UzC3WLONHinn8S3T6lwV/agVCyitiFOsGJ/eYuEUBvD71MZHy3Pv1G9doQ==", + "requires": { + "address": "^1.0.1", + "debug": "^2.6.0" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "devcert-san": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/devcert-san/-/devcert-san-0.3.3.tgz", + "integrity": "sha1-qnckR0Gy2DF3HAEfIu4l45atS6k=", + "requires": { + "@types/configstore": "^2.1.1", + "@types/debug": "^0.0.29", + "@types/get-port": "^0.0.4", + "@types/glob": "^5.0.30", + "@types/mkdirp": "^0.3.29", + "@types/node": "^7.0.11", + "@types/tmp": "^0.0.32", + "command-exists": "^1.2.2", + "configstore": "^3.0.0", + "debug": "^2.6.3", + "eol": "^0.8.1", + "get-port": "^3.0.0", + "glob": "^7.1.1", + "mkdirp": "^0.5.1", + "tmp": "^0.0.31", + "tslib": "^1.6.0" + }, + "dependencies": { + "@types/glob": { + "version": "5.0.36", + "resolved": "https://registry.npmjs.org/@types/glob/-/glob-5.0.36.tgz", + "integrity": "sha512-KEzSKuP2+3oOjYYjujue6Z3Yqis5HKA1BsIC+jZ1v3lrRNdsqyNNtX0rQf6LSuI4DJJ2z5UV//zBZCcvM0xikg==", + "requires": { + "@types/events": "*", + "@types/minimatch": "*", + "@types/node": "*" + } + }, + "@types/node": { + "version": "7.10.9", + "resolved": "https://registry.npmjs.org/@types/node/-/node-7.10.9.tgz", + "integrity": "sha512-usSpgoUsRtO5xNV5YEPU8PPnHisFx8u0rokj1BPVn/hDF7zwUDzVLiuKZM38B7z8V2111Fj6kd4rGtQFUZpNOw==" + }, + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "tmp": { + "version": "0.0.31", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.31.tgz", + "integrity": "sha1-jzirlDjhcxXl29izZX6L+yd65Kc=", + "requires": { + "os-tmpdir": "~1.0.1" + } + } + } + }, + "diffie-hellman": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz", + "integrity": "sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==", + "requires": { + "bn.js": "^4.1.0", + "miller-rabin": "^4.0.0", + "randombytes": "^2.0.0" + } + }, + "dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "requires": { + "path-type": "^4.0.0" + } + }, + "dns-equal": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz", + "integrity": "sha1-s55/HabrCnW6nBcySzR1PEfgZU0=" + }, + "dns-packet": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-1.3.1.tgz", + "integrity": "sha512-0UxfQkMhYAUaZI+xrNZOz/as5KgDU0M/fQ9b6SpkyLbk3GEswDi6PADJVaYJradtRVsRIlF1zLyOodbcTCDzUg==", + "requires": { + "ip": "^1.1.0", + "safe-buffer": "^5.0.1" + } + }, + "dns-txt": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/dns-txt/-/dns-txt-2.0.2.tgz", + "integrity": "sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY=", + "requires": { + "buffer-indexof": "^1.0.0" + } + }, + "doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "requires": { + "esutils": "^2.0.2" + } + }, + "dom-converter": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz", + "integrity": "sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==", + "requires": { + "utila": "~0.4" + } + }, + "dom-helpers": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-3.4.0.tgz", + "integrity": "sha512-LnuPJ+dwqKDIyotW1VzmOZ5TONUN7CwkCR5hrgawTUbkBGYdeoNLZo6nNfGkCrjtE1nXXaj7iMMpDa8/d9WoIA==", + "requires": { + "@babel/runtime": "^7.1.2" + } + }, + "dom-serializer": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.2.2.tgz", + "integrity": "sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g==", + "requires": { + "domelementtype": "^2.0.1", + "entities": "^2.0.0" + }, + "dependencies": { + "domelementtype": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.0.1.tgz", + "integrity": "sha512-5HOHUDsYZWV8FGWN0Njbr/Rn7f/eWSQi1v7+HsUVwXgn8nWWlL64zKDkS0n8ZmQ3mlWOMuXOnR+7Nx/5tMO5AQ==" + } + } + }, + "dom-walk": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.1.tgz", + "integrity": "sha1-ZyIm3HTI95mtNTB9+TaroRrNYBg=" + }, + "domain-browser": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/domain-browser/-/domain-browser-1.2.0.tgz", + "integrity": "sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA==" + }, + "domelementtype": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.3.1.tgz", + "integrity": "sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w==" + }, + "domhandler": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-2.4.2.tgz", + "integrity": "sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA==", + "requires": { + "domelementtype": "1" + } + }, + "domutils": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-1.5.1.tgz", + "integrity": "sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8=", + "requires": { + "dom-serializer": "0", + "domelementtype": "1" + } + }, + "dot-prop": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-4.2.0.tgz", + "integrity": "sha512-tUMXrxlExSW6U2EXiiKGSBVdYgtV8qlHL+C10TsW4PURY/ic+eaysnSkwB4kA/mBlCyy/IKDJ+Lc3wbWeaXtuQ==", + "requires": { + "is-obj": "^1.0.0" + } + }, + "dotenv": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.2.0.tgz", + "integrity": "sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw==" + }, + "download": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/download/-/download-7.1.0.tgz", + "integrity": "sha512-xqnBTVd/E+GxJVrX5/eUJiLYjCGPwMpdL+jGhGU57BvtcA7wwhtHVbXBeUk51kOpW3S7Jn3BQbN9Q1R1Km2qDQ==", + "requires": { + "archive-type": "^4.0.0", + "caw": "^2.0.1", + "content-disposition": "^0.5.2", + "decompress": "^4.2.0", + "ext-name": "^5.0.0", + "file-type": "^8.1.0", + "filenamify": "^2.0.0", + "get-stream": "^3.0.0", + "got": "^8.3.1", + "make-dir": "^1.2.0", + "p-event": "^2.1.0", + "pify": "^3.0.0" + }, + "dependencies": { + "get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" + }, + "make-dir": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz", + "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==", + "requires": { + "pify": "^3.0.0" + } + }, + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + } + } + }, + "duplexer": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.1.tgz", + "integrity": "sha1-rOb/gIwc5mtX0ev5eXessCM0z8E=" + }, + "duplexer3": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.4.tgz", + "integrity": "sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI=" + }, + "duplexify": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", + "integrity": "sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g==", + "requires": { + "end-of-stream": "^1.0.0", + "inherits": "^2.0.1", + "readable-stream": "^2.0.0", + "stream-shift": "^1.0.0" + } + }, + "ecc-jsbn": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", + "requires": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } + }, + "ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" + }, + "electron-to-chromium": { + "version": "1.3.327", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.327.tgz", + "integrity": "sha512-DNMd91VtKt44LIkFtpICxAWu/GSGFLUMDM/kFINJ3Oe47OimSnbMvO3ChkUCdUyit+pRdhdCcM3+i5bpli5gqg==" + }, + "elliptic": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.2.tgz", + "integrity": "sha512-f4x70okzZbIQl/NSRLkI/+tteV/9WqL98zx+SQ69KbXxmVrmjwsNUPn/gYJJ0sHvEak24cZgHIPegRePAtA/xw==", + "requires": { + "bn.js": "^4.4.0", + "brorand": "^1.0.1", + "hash.js": "^1.0.0", + "hmac-drbg": "^1.0.0", + "inherits": "^2.0.1", + "minimalistic-assert": "^1.0.0", + "minimalistic-crypto-utils": "^1.0.0" + } + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "emojis-list": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-2.1.0.tgz", + "integrity": "sha1-TapNnbAPmBmIDHn6RXrlsJof04k=" + }, + "encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" + }, + "encoding": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.12.tgz", + "integrity": "sha1-U4tm8+5izRq1HsMjgp0flIDHS+s=", + "requires": { + "iconv-lite": "~0.4.13" + } + }, + "end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "requires": { + "once": "^1.4.0" + } + }, + "engine.io": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-3.4.0.tgz", + "integrity": "sha512-XCyYVWzcHnK5cMz7G4VTu2W7zJS7SM1QkcelghyIk/FmobWBtXE7fwhBusEKvCSqc3bMh8fNFMlUkCKTFRxH2w==", + "requires": { + "accepts": "~1.3.4", + "base64id": "2.0.0", + "cookie": "0.3.1", + "debug": "~4.1.0", + "engine.io-parser": "~2.2.0", + "ws": "^7.1.2" + }, + "dependencies": { + "cookie": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", + "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" + } + } + }, + "engine.io-client": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-3.4.0.tgz", + "integrity": "sha512-a4J5QO2k99CM2a0b12IznnyQndoEvtA4UAldhGzKqnHf42I3Qs2W5SPnDvatZRcMaNZs4IevVicBPayxYt6FwA==", + "requires": { + "component-emitter": "1.2.1", + "component-inherit": "0.0.3", + "debug": "~4.1.0", + "engine.io-parser": "~2.2.0", + "has-cors": "1.1.0", + "indexof": "0.0.1", + "parseqs": "0.0.5", + "parseuri": "0.0.5", + "ws": "~6.1.0", + "xmlhttprequest-ssl": "~1.5.4", + "yeast": "0.1.2" + }, + "dependencies": { + "component-emitter": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.2.1.tgz", + "integrity": "sha1-E3kY1teCg/ffemt8WmPhQOaUJeY=" + }, + "ws": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/ws/-/ws-6.1.4.tgz", + "integrity": "sha512-eqZfL+NE/YQc1/ZynhojeV8q+H050oR8AZ2uIev7RU10svA9ZnJUddHcOUZTJLinZ9yEfdA2kSATS2qZK5fhJA==", + "requires": { + "async-limiter": "~1.0.0" + } + } + } + }, + "engine.io-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-2.2.0.tgz", + "integrity": "sha512-6I3qD9iUxotsC5HEMuuGsKA0cXerGz+4uGcXQEkfBidgKf0amsjrrtwcbwK/nzpZBxclXlV7gGl9dgWvu4LF6w==", + "requires": { + "after": "0.8.2", + "arraybuffer.slice": "~0.0.7", + "base64-arraybuffer": "0.1.5", + "blob": "0.0.5", + "has-binary2": "~1.0.2" + } + }, + "enhanced-resolve": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-4.1.1.tgz", + "integrity": "sha512-98p2zE+rL7/g/DzMHMTF4zZlCgeVdJ7yr6xzEpJRYwFYrGi9ANdn5DnJURg6RpBkyk60XYDnWIv51VfIhfNGuA==", + "requires": { + "graceful-fs": "^4.1.2", + "memory-fs": "^0.5.0", + "tapable": "^1.0.0" + }, + "dependencies": { + "memory-fs": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/memory-fs/-/memory-fs-0.5.0.tgz", + "integrity": "sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA==", + "requires": { + "errno": "^0.1.3", + "readable-stream": "^2.0.1" + } + } + } + }, + "entities": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-2.0.0.tgz", + "integrity": "sha512-D9f7V0JSRwIxlRI2mjMqufDrRDnx8p+eEOz7aUM9SuvF8gsBzra0/6tbjl1m8eQHrZlYj6PxqE00hZ1SAIKPLw==" + }, + "envinfo": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.5.0.tgz", + "integrity": "sha512-jDgnJaF/Btomk+m3PZDTTCb5XIIIX3zYItnCRfF73zVgvinLoRomuhi75Y4su0PtQxWz4v66XnLLckyvyJTOIQ==" + }, + "eol": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/eol/-/eol-0.8.1.tgz", + "integrity": "sha1-3vwyJJkMfspzuzRGGlbPncJHYdA=" + }, + "errno": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.7.tgz", + "integrity": "sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg==", + "requires": { + "prr": "~1.0.1" + } + }, + "error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "requires": { + "is-arrayish": "^0.2.1" + } + }, + "error-stack-parser": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/error-stack-parser/-/error-stack-parser-2.0.6.tgz", + "integrity": "sha512-d51brTeqC+BHlwF0BhPtcYgF5nlzf9ZZ0ZIUQNZpc9ZB9qw5IJ2diTrBY9jlCJkTLITYPjmiX6OWCwH+fuyNgQ==", + "requires": { + "stackframe": "^1.1.1" + } + }, + "es-abstract": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.0.tgz", + "integrity": "sha512-yYkE07YF+6SIBmg1MsJ9dlub5L48Ek7X0qz+c/CPCHS9EBXfESorzng4cJQjJW5/pB6vDF41u7F8vUhLVDqIug==", + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.1.5", + "is-regex": "^1.0.5", + "object-inspect": "^1.7.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.0", + "string.prototype.trimleft": "^2.1.1", + "string.prototype.trimright": "^2.1.1" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, + "es6-promisify": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-6.0.2.tgz", + "integrity": "sha512-eO6vFm0JvqGzjWIQA6QVKjxpmELfhWbDUWHm1rPfIbn55mhKPiAa5xpLmQWJrNa629ZIeQ8ZvMAi13kvrjK6Mg==" + }, + "escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" + }, + "eslint": { + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.8.0.tgz", + "integrity": "sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==", + "requires": { + "@babel/code-frame": "^7.0.0", + "ajv": "^6.10.0", + "chalk": "^2.1.0", + "cross-spawn": "^6.0.5", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "eslint-scope": "^5.0.0", + "eslint-utils": "^1.4.3", + "eslint-visitor-keys": "^1.1.0", + "espree": "^6.1.2", + "esquery": "^1.0.1", + "esutils": "^2.0.2", + "file-entry-cache": "^5.0.1", + "functional-red-black-tree": "^1.0.1", + "glob-parent": "^5.0.0", + "globals": "^12.1.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "inquirer": "^7.0.0", + "is-glob": "^4.0.0", + "js-yaml": "^3.13.1", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.3.0", + "lodash": "^4.17.14", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "optionator": "^0.8.3", + "progress": "^2.0.0", + "regexpp": "^2.0.1", + "semver": "^6.1.2", + "strip-ansi": "^5.2.0", + "strip-json-comments": "^3.0.1", + "table": "^5.2.3", + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" + } + }, + "eslint-config-react-app": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-react-app/-/eslint-config-react-app-5.1.0.tgz", + "integrity": "sha512-hBaxisHC6HXRVvxX+/t1n8mOdmCVIKgkXsf2WoUkJi7upHJTwYTsdCmx01QPOjKNT34QMQQ9sL0tVBlbiMFjxA==", + "requires": { + "confusing-browser-globals": "^1.0.9" + } + }, + "eslint-import-resolver-node": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.2.tgz", + "integrity": "sha512-sfmTqJfPSizWu4aymbPr4Iidp5yKm8yDkHp+Ir3YiTHiiDfxh69mOUsmiqW6RZ9zRXFaF64GtYmN7e+8GHBv6Q==", + "requires": { + "debug": "^2.6.9", + "resolve": "^1.5.0" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "eslint-loader": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/eslint-loader/-/eslint-loader-2.2.1.tgz", + "integrity": "sha512-RLgV9hoCVsMLvOxCuNjdqOrUqIj9oJg8hF44vzJaYqsAHuY9G2YAeN3joQ9nxP0p5Th9iFSIpKo+SD8KISxXRg==", + "requires": { + "loader-fs-cache": "^1.0.0", + "loader-utils": "^1.0.2", + "object-assign": "^4.0.1", + "object-hash": "^1.1.4", + "rimraf": "^2.6.1" + } + }, + "eslint-module-utils": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.5.0.tgz", + "integrity": "sha512-kCo8pZaNz2dsAW7nCUjuVoI11EBXXpIzfNxmaoLhXoRDOnqXLC4iSGVRdZPhOitfbdEfMEfKOiENaK6wDPZEGw==", + "requires": { + "debug": "^2.6.9", + "pkg-dir": "^2.0.0" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "requires": { + "locate-path": "^2.0.0" + } + }, + "locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "requires": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "requires": { + "p-try": "^1.0.0" + } + }, + "p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "requires": { + "p-limit": "^1.1.0" + } + }, + "p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=" + }, + "pkg-dir": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", + "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", + "requires": { + "find-up": "^2.1.0" + } + } + } + }, + "eslint-plugin-eslint-plugin": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-eslint-plugin/-/eslint-plugin-eslint-plugin-2.1.0.tgz", + "integrity": "sha512-kT3A/ZJftt28gbl/Cv04qezb/NQ1dwYIbi8lyf806XMxkus7DvOVCLIfTXMrorp322Pnoez7+zabXH29tADIDg==" + }, + "eslint-plugin-flowtype": { + "version": "3.13.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-flowtype/-/eslint-plugin-flowtype-3.13.0.tgz", + "integrity": "sha512-bhewp36P+t7cEV0b6OdmoRWJCBYRiHFlqPZAG1oS3SF+Y0LQkeDvFSM4oxoxvczD1OdONCXMlJfQFiWLcV9urw==", + "requires": { + "lodash": "^4.17.15" + } + }, + "eslint-plugin-graphql": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-graphql/-/eslint-plugin-graphql-3.1.1.tgz", + "integrity": "sha512-VNu2AipS8P1BAnE/tcJ2EmBWjFlCnG+1jKdUlFNDQjocWZlFiPpMu9xYNXePoEXK+q+jG51M/6PdhOjEgJZEaQ==", + "requires": { + "graphql-config": "^2.0.1", + "lodash": "^4.11.1" + } + }, + "eslint-plugin-import": { + "version": "2.19.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.19.1.tgz", + "integrity": "sha512-x68131aKoCZlCae7rDXKSAQmbT5DQuManyXo2sK6fJJ0aK5CWAkv6A6HJZGgqC8IhjQxYPgo6/IY4Oz8AFsbBw==", + "requires": { + "array-includes": "^3.0.3", + "array.prototype.flat": "^1.2.1", + "contains-path": "^0.1.0", + "debug": "^2.6.9", + "doctrine": "1.5.0", + "eslint-import-resolver-node": "^0.3.2", + "eslint-module-utils": "^2.4.1", + "has": "^1.0.3", + "minimatch": "^3.0.4", + "object.values": "^1.1.0", + "read-pkg-up": "^2.0.0", + "resolve": "^1.12.0" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "doctrine": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", + "integrity": "sha1-N53Ocw9hZvds76TmcHoVmwLFpvo=", + "requires": { + "esutils": "^2.0.2", + "isarray": "^1.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "eslint-plugin-jsx-a11y": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.2.3.tgz", + "integrity": "sha512-CawzfGt9w83tyuVekn0GDPU9ytYtxyxyFZ3aSWROmnRRFQFT2BiPJd7jvRdzNDi6oLWaS2asMeYSNMjWTV4eNg==", + "requires": { + "@babel/runtime": "^7.4.5", + "aria-query": "^3.0.0", + "array-includes": "^3.0.3", + "ast-types-flow": "^0.0.7", + "axobject-query": "^2.0.2", + "damerau-levenshtein": "^1.0.4", + "emoji-regex": "^7.0.2", + "has": "^1.0.3", + "jsx-ast-utils": "^2.2.1" + }, + "dependencies": { + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + } + } + }, + "eslint-plugin-react": { + "version": "7.17.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.17.0.tgz", + "integrity": "sha512-ODB7yg6lxhBVMeiH1c7E95FLD4E/TwmFjltiU+ethv7KPdCwgiFuOZg9zNRHyufStTDLl/dEFqI2Q1VPmCd78A==", + "requires": { + "array-includes": "^3.0.3", + "doctrine": "^2.1.0", + "eslint-plugin-eslint-plugin": "^2.1.0", + "has": "^1.0.3", + "jsx-ast-utils": "^2.2.3", + "object.entries": "^1.1.0", + "object.fromentries": "^2.0.1", + "object.values": "^1.1.0", + "prop-types": "^15.7.2", + "resolve": "^1.13.1" + }, + "dependencies": { + "doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "requires": { + "esutils": "^2.0.2" + } + } + } + }, + "eslint-plugin-react-hooks": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-1.7.0.tgz", + "integrity": "sha512-iXTCFcOmlWvw4+TOE8CLWj6yX1GwzT0Y6cUfHHZqWnSk144VmVIRcVGtUAzrLES7C798lmvnt02C7rxaOX1HNA==" + }, + "eslint-scope": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.0.0.tgz", + "integrity": "sha512-oYrhJW7S0bxAFDvWqzvMPRm6pcgcnWc4QnofCAqRTRfQC0JcwenzGglTtsLyIuuWFfkqDG9vz67cnttSd53djw==", + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "eslint-utils": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", + "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + }, + "eslint-visitor-keys": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.1.0.tgz", + "integrity": "sha512-8y9YjtM1JBJU/A9Kc+SbaOV4y29sSWckBwMHa+FGtVj5gN/sbnKDf6xJUl+8g7FAij9LVaP8C24DUiH/f/2Z9A==" + }, + "espree": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/espree/-/espree-6.1.2.tgz", + "integrity": "sha512-2iUPuuPP+yW1PZaMSDM9eyVf8D5P0Hi8h83YtZ5bPc/zHYjII5khoixIUTMO794NOY8F/ThF1Bo8ncZILarUTA==", + "requires": { + "acorn": "^7.1.0", + "acorn-jsx": "^5.1.0", + "eslint-visitor-keys": "^1.1.0" + } + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==" + }, + "esquery": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.0.1.tgz", + "integrity": "sha512-SmiyZ5zIWH9VM+SRUReLS5Q8a7GxtRdxEBVZpm98rJM7Sb+A9DVCndXfkeFUd3byderg+EbDkfnevfCwynWaNA==", + "requires": { + "estraverse": "^4.0.0" + } + }, + "esrecurse": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.2.1.tgz", + "integrity": "sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ==", + "requires": { + "estraverse": "^4.1.0" + } + }, + "estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==" + }, + "esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==" + }, + "etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" + }, + "event-source-polyfill": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/event-source-polyfill/-/event-source-polyfill-1.0.11.tgz", + "integrity": "sha512-fbo96OutP0Bb+gIYTTy8LGhNWySdetsFElCn/vhOzQL3cXWsS70TP/aRUe32U7F+PuOZH/tvb40tZgoJV8/Ilw==" + }, + "eventemitter3": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.0.tgz", + "integrity": "sha512-qerSRB0p+UDEssxTtm6EDKcE7W4OaoisfIMl4CngyEhjpYglocpNg6UEqCvemdGhosAsg4sO2dXJOdyBifPGCg==" + }, + "events": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.0.0.tgz", + "integrity": "sha512-Dc381HFWJzEOhQ+d8pkNon++bk9h6cdAoAj4iE6Q4y6xgTzySWXlKn05/TVNpjnfRqi/X0EpJEJohPjNI3zpVA==" + }, + "eventsource": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-0.1.6.tgz", + "integrity": "sha1-Cs7ehJ7X3RzMMsgRuxG5RNTykjI=", + "requires": { + "original": ">=0.0.5" + } + }, + "evp_bytestokey": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", + "integrity": "sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==", + "requires": { + "md5.js": "^1.3.4", + "safe-buffer": "^5.1.1" + } + }, + "exec-buffer": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/exec-buffer/-/exec-buffer-3.2.0.tgz", + "integrity": "sha512-wsiD+2Tp6BWHoVv3B+5Dcx6E7u5zky+hUwOHjuH2hKSLR3dvRmX8fk8UD8uqQixHs4Wk6eDmiegVrMPjKj7wpA==", + "requires": { + "execa": "^0.7.0", + "p-finally": "^1.0.0", + "pify": "^3.0.0", + "rimraf": "^2.5.4", + "tempfile": "^2.0.0" + }, + "dependencies": { + "cross-spawn": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", + "integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=", + "requires": { + "lru-cache": "^4.0.1", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "execa": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz", + "integrity": "sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c=", + "requires": { + "cross-spawn": "^5.0.1", + "get-stream": "^3.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + } + }, + "get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" + }, + "lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "requires": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + } + } + }, + "execa": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-3.4.0.tgz", + "integrity": "sha512-r9vdGQk4bmCuK1yKQu1KTwcT2zwfWdbdaXfCtAh+5nU/4fSX+JAb7vZGvI5naJrQlvONrEB20jeruESI69530g==", + "requires": { + "cross-spawn": "^7.0.0", + "get-stream": "^5.0.0", + "human-signals": "^1.1.1", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.0", + "onetime": "^5.1.0", + "p-finally": "^2.0.0", + "signal-exit": "^3.0.2", + "strip-final-newline": "^2.0.0" + }, + "dependencies": { + "cross-spawn": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.1.tgz", + "integrity": "sha512-u7v4o84SwFpD32Z8IIcPZ6z1/ie24O6RU3RbtL5Y316l3KuHVPx9ItBgWQ6VlfAFnRnTtMUrsQ9MUUTuEZjogg==", + "requires": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + } + }, + "get-stream": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.1.0.tgz", + "integrity": "sha512-EXr1FOzrzTfGeL0gQdeFEvOMm2mzMOglyiOXSTpPC+iAjAKftbr3jpCMWynogwYnM+eSj9sHGc6wjIcDvYiygw==", + "requires": { + "pump": "^3.0.0" + } + }, + "is-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", + "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==" + }, + "npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "requires": { + "path-key": "^3.0.0" + } + }, + "p-finally": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-2.0.1.tgz", + "integrity": "sha512-vpm09aKwq6H9phqRQzecoDpD8TmVyGw70qmWlyq5onxY7tqyTTFVvxMykxQSQKILBSFlbXpypIw2T1Ml7+DDtw==" + }, + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" + }, + "shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "requires": { + "shebang-regex": "^3.0.0" + } + }, + "shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "requires": { + "isexe": "^2.0.0" + } + } + } + }, + "executable": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", + "integrity": "sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==", + "requires": { + "pify": "^2.2.0" + }, + "dependencies": { + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=" + } + } + }, + "exif-parser": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/exif-parser/-/exif-parser-0.1.12.tgz", + "integrity": "sha1-WKnS1ywCwfbwKg70qRZicrd2CSI=" + }, + "expand-brackets": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-2.1.4.tgz", + "integrity": "sha1-t3c14xXOMPa27/D4OwQVGiJEliI=", + "requires": { + "debug": "^2.3.3", + "define-property": "^0.2.5", + "extend-shallow": "^2.0.1", + "posix-character-classes": "^0.1.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "requires": { + "is-descriptor": "^0.1.0" + } + }, + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "requires": { + "is-extendable": "^0.1.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "expand-template": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", + "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==" + }, + "expand-tilde": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/expand-tilde/-/expand-tilde-2.0.2.tgz", + "integrity": "sha1-l+gBqgUt8CRU3kawK/YhZCzchQI=", + "requires": { + "homedir-polyfill": "^1.0.1" + } + }, + "express": { + "version": "4.17.1", + "resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz", + "integrity": "sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==", + "requires": { + "accepts": "~1.3.7", + "array-flatten": "1.1.1", + "body-parser": "1.19.0", + "content-disposition": "0.5.3", + "content-type": "~1.0.4", + "cookie": "0.4.0", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "~1.1.2", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "~1.1.2", + "fresh": "0.5.2", + "merge-descriptors": "1.0.1", + "methods": "~1.1.2", + "on-finished": "~2.3.0", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.7", + "proxy-addr": "~2.0.5", + "qs": "6.7.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.1.2", + "send": "0.17.1", + "serve-static": "1.14.1", + "setprototypeof": "1.1.1", + "statuses": "~1.5.0", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "express-graphql": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/express-graphql/-/express-graphql-0.9.0.tgz", + "integrity": "sha512-wccd9Lb6oeJ8yHpUs/8LcnGjFUUQYmOG9A5BNLybRdCzGw0PeUrtBxsIR8bfiur6uSW4OvPkVDoYH06z6/N9+w==", + "requires": { + "accepts": "^1.3.7", + "content-type": "^1.0.4", + "http-errors": "^1.7.3", + "raw-body": "^2.4.1" + }, + "dependencies": { + "bytes": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", + "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" + }, + "http-errors": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.3.tgz", + "integrity": "sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw==", + "requires": { + "depd": "~1.1.2", + "inherits": "2.0.4", + "setprototypeof": "1.1.1", + "statuses": ">= 1.5.0 < 2", + "toidentifier": "1.0.0" + } + }, + "raw-body": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.1.tgz", + "integrity": "sha512-9WmIKF6mkvA0SLmA2Knm9+qj89e+j1zqgyn8aXGd7+nAduPoqgI9lO57SAZNn/Byzo5P7JhXTyg9PzaJbH73bA==", + "requires": { + "bytes": "3.1.0", + "http-errors": "1.7.3", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + } + } + } + }, + "ext-list": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/ext-list/-/ext-list-2.2.2.tgz", + "integrity": "sha512-u+SQgsubraE6zItfVA0tBuCBhfU9ogSRnsvygI7wht9TS510oLkBRXBsqopeUG/GBOIQyKZO9wjTqIu/sf5zFA==", + "requires": { + "mime-db": "^1.28.0" + } + }, + "ext-name": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ext-name/-/ext-name-5.0.0.tgz", + "integrity": "sha512-yblEwXAbGv1VQDmow7s38W77hzAgJAO50ztBLMcUyUBfxv1HC+LGwtiEN+Co6LtlqT/5uwVOxsD4TNIilWhwdQ==", + "requires": { + "ext-list": "^2.0.0", + "sort-keys-length": "^1.0.0" + } + }, + "extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + }, + "extend-shallow": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz", + "integrity": "sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg=", + "requires": { + "assign-symbols": "^1.0.0", + "is-extendable": "^1.0.1" + }, + "dependencies": { + "is-extendable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", + "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "requires": { + "is-plain-object": "^2.0.4" + } + } + } + }, + "external-editor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", + "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", + "requires": { + "chardet": "^0.7.0", + "iconv-lite": "^0.4.24", + "tmp": "^0.0.33" + } + }, + "extglob": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/extglob/-/extglob-2.0.4.tgz", + "integrity": "sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==", + "requires": { + "array-unique": "^0.3.2", + "define-property": "^1.0.0", + "expand-brackets": "^2.1.4", + "extend-shallow": "^2.0.1", + "fragment-cache": "^0.2.1", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + }, + "dependencies": { + "define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", + "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", + "requires": { + "is-descriptor": "^1.0.0" + } + }, + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "requires": { + "is-extendable": "^0.1.0" + } + }, + "is-accessor-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", + "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-data-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", + "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-descriptor": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", + "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", + "requires": { + "is-accessor-descriptor": "^1.0.0", + "is-data-descriptor": "^1.0.0", + "kind-of": "^6.0.2" + } + } + } + }, + "extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" + }, + "fast-deep-equal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", + "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=" + }, + "fast-glob": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.1.1.tgz", + "integrity": "sha512-nTCREpBY8w8r+boyFYAx21iL6faSsQynliPHM4Uf56SbkyohCNxpVPEH9xrF5TXKy+IsjkPUHDKiUkzBVRXn9g==", + "requires": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.0", + "merge2": "^1.3.0", + "micromatch": "^4.0.2" + }, + "dependencies": { + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "requires": { + "fill-range": "^7.0.1" + } + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" + }, + "micromatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.2.tgz", + "integrity": "sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q==", + "requires": { + "braces": "^3.0.1", + "picomatch": "^2.0.5" + } + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "requires": { + "is-number": "^7.0.0" + } + } + } + }, + "fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + }, + "fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=" + }, + "fastparse": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/fastparse/-/fastparse-1.1.2.tgz", + "integrity": "sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ==" + }, + "fastq": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.6.0.tgz", + "integrity": "sha512-jmxqQ3Z/nXoeyDmWAzF9kH1aGZSis6e/SbfPmJpUnyZ0ogr6iscHQaml4wsEepEWSdtmpy+eVXmCRIMpxaXqOA==", + "requires": { + "reusify": "^1.0.0" + } + }, + "faye-websocket": { + "version": "0.11.3", + "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.3.tgz", + "integrity": "sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA==", + "requires": { + "websocket-driver": ">=0.5.1" + } + }, + "fbjs": { + "version": "0.8.17", + "resolved": "https://registry.npmjs.org/fbjs/-/fbjs-0.8.17.tgz", + "integrity": "sha1-xNWY6taUkRJlPWWIsBpc3Nn5D90=", + "requires": { + "core-js": "^1.0.0", + "isomorphic-fetch": "^2.1.1", + "loose-envify": "^1.0.0", + "object-assign": "^4.1.0", + "promise": "^7.1.1", + "setimmediate": "^1.0.5", + "ua-parser-js": "^0.7.18" + }, + "dependencies": { + "core-js": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-1.2.7.tgz", + "integrity": "sha1-ZSKUwUZR2yj6k70tX/KYOk8IxjY=" + } + } + }, + "fd-slicer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", + "integrity": "sha1-JcfInLH5B3+IkbvmHY85Dq4lbx4=", + "requires": { + "pend": "~1.2.0" + } + }, + "figgy-pudding": { + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/figgy-pudding/-/figgy-pudding-3.5.1.tgz", + "integrity": "sha512-vNKxJHTEKNThjfrdJwHc7brvM6eVevuO5nTj6ez8ZQ1qbXTvGthucRF7S4vf2cr71QVnT70V34v0S1DyQsti0w==" + }, + "figures": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.1.0.tgz", + "integrity": "sha512-ravh8VRXqHuMvZt/d8GblBeqDMkdJMBdv/2KntFH+ra5MXkO7nxNKpzQ3n6QD/2da1kH0aWmNISdvhM7gl2gVg==", + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "file-entry-cache": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", + "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", + "requires": { + "flat-cache": "^2.0.1" + } + }, + "file-loader": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/file-loader/-/file-loader-1.1.11.tgz", + "integrity": "sha512-TGR4HU7HUsGg6GCOPJnFk06RhWgEWFLAGWiT6rcD+GRC2keU3s9RGJ+b3Z6/U73jwwNb2gKLJ7YCrp+jvU4ALg==", + "requires": { + "loader-utils": "^1.0.2", + "schema-utils": "^0.4.5" + } + }, + "file-type": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-8.1.0.tgz", + "integrity": "sha512-qyQ0pzAy78gVoJsmYeNgl8uH8yKhr1lVhW7JbzJmnlRi0I4R2eEDEJZVKG8agpDnLpacwNbDhLNG/LMdxHD2YQ==" + }, + "file-uri-to-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", + "optional": true + }, + "filename-reserved-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/filename-reserved-regex/-/filename-reserved-regex-2.0.0.tgz", + "integrity": "sha1-q/c9+rc10EVECr/qLZHzieu/oik=" + }, + "filenamify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/filenamify/-/filenamify-2.1.0.tgz", + "integrity": "sha512-ICw7NTT6RsDp2rnYKVd8Fu4cr6ITzGy3+u4vUujPkabyaz+03F24NWEX7fs5fp+kBonlaqPH8fAO2NM+SXt/JA==", + "requires": { + "filename-reserved-regex": "^2.0.0", + "strip-outer": "^1.0.0", + "trim-repeated": "^1.0.0" + } + }, + "filesize": { + "version": "3.5.11", + "resolved": "https://registry.npmjs.org/filesize/-/filesize-3.5.11.tgz", + "integrity": "sha512-ZH7loueKBoDb7yG9esn1U+fgq7BzlzW6NRi5/rMdxIZ05dj7GFD/Xc5rq2CDt5Yq86CyfSYVyx4242QQNZbx1g==" + }, + "fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=", + "requires": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "finalhandler": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", + "integrity": "sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==", + "requires": { + "debug": "2.6.9", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "on-finished": "~2.3.0", + "parseurl": "~1.3.3", + "statuses": "~1.5.0", + "unpipe": "~1.0.0" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "find-cache-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", + "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "requires": { + "commondir": "^1.0.1", + "make-dir": "^2.0.0", + "pkg-dir": "^3.0.0" + } + }, + "find-root": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/find-root/-/find-root-1.1.0.tgz", + "integrity": "sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng==" + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, + "find-versions": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/find-versions/-/find-versions-3.2.0.tgz", + "integrity": "sha512-P8WRou2S+oe222TOCHitLy8zj+SIsVJh52VP4lvXkaFVnOFFdoWv1H1Jjvel1aI6NCFOAaeAVm8qrI0odiLcww==", + "requires": { + "semver-regex": "^2.0.0" + } + }, + "flat": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/flat/-/flat-4.1.0.tgz", + "integrity": "sha512-Px/TiLIznH7gEDlPXcUD4KnBusa6kR6ayRUVcnEAbreRIuhkqow/mun59BuRXwoYk7ZQOLW1ZM05ilIvK38hFw==", + "requires": { + "is-buffer": "~2.0.3" + } + }, + "flat-cache": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", + "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", + "requires": { + "flatted": "^2.0.0", + "rimraf": "2.6.3", + "write": "1.0.3" + } + }, + "flatted": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.1.tgz", + "integrity": "sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg==" + }, + "flush-write-stream": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/flush-write-stream/-/flush-write-stream-1.1.1.tgz", + "integrity": "sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w==", + "requires": { + "inherits": "^2.0.3", + "readable-stream": "^2.3.6" + } + }, + "follow-redirects": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", + "integrity": "sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ==", + "requires": { + "debug": "=3.1.0" + }, + "dependencies": { + "debug": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "for-in": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", + "integrity": "sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=" + }, + "forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" + }, + "form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, + "forwarded": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", + "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=" + }, + "fragment-cache": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz", + "integrity": "sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk=", + "requires": { + "map-cache": "^0.2.2" + } + }, + "fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" + }, + "from2": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz", + "integrity": "sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8=", + "requires": { + "inherits": "^2.0.1", + "readable-stream": "^2.0.0" + } + }, + "fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==" + }, + "fs-exists-cached": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-exists-cached/-/fs-exists-cached-1.0.0.tgz", + "integrity": "sha1-zyVVTKBQ3EmuZla0HeQiWJidy84=" + }, + "fs-extra": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz", + "integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==", + "requires": { + "graceful-fs": "^4.2.0", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + } + }, + "fs-minipass": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.0.0.tgz", + "integrity": "sha512-40Qz+LFXmd9tzYVnnBmZvFfvAADfUA14TXPK1s7IfElJTIZ97rA8w4Kin7Wt5JBrC3ShnnFJO/5vPjPEeJIq9A==", + "requires": { + "minipass": "^3.0.0" + } + }, + "fs-write-stream-atomic": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz", + "integrity": "sha1-tH31NJPvkR33VzHnCp3tAYnbQMk=", + "requires": { + "graceful-fs": "^4.1.2", + "iferr": "^0.1.5", + "imurmurhash": "^0.1.4", + "readable-stream": "1 || 2" + } + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" + }, + "fsevents": { + "version": "1.2.11", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.11.tgz", + "integrity": "sha512-+ux3lx6peh0BpvY0JebGyZoiR4D+oYzdPZMKJwkZ+sFkNJzpL7tXc/wehS49gUAxg3tmMHPHZkA8JU2rhhgDHw==", + "optional": true, + "requires": { + "bindings": "^1.5.0", + "nan": "^2.12.1", + "node-pre-gyp": "*" + }, + "dependencies": { + "abbrev": { + "version": "1.1.1", + "bundled": true, + "optional": true + }, + "ansi-regex": { + "version": "2.1.1", + "bundled": true, + "optional": true + }, + "aproba": { + "version": "1.2.0", + "bundled": true, + "optional": true + }, + "are-we-there-yet": { + "version": "1.1.5", + "bundled": true, + "optional": true, + "requires": { + "delegates": "^1.0.0", + "readable-stream": "^2.0.6" + } + }, + "balanced-match": { + "version": "1.0.0", + "bundled": true, + "optional": true + }, + "brace-expansion": { + "version": "1.1.11", + "bundled": true, + "optional": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "chownr": { + "version": "1.1.3", + "bundled": true, + "optional": true + }, + "code-point-at": { + "version": "1.1.0", + "bundled": true, + "optional": true + }, + "concat-map": { + "version": "0.0.1", + "bundled": true, + "optional": true + }, + "console-control-strings": { + "version": "1.1.0", + "bundled": true, + "optional": true + }, + "core-util-is": { + "version": "1.0.2", + "bundled": true, + "optional": true + }, + "debug": { + "version": "3.2.6", + "bundled": true, + "optional": true, + "requires": { + "ms": "^2.1.1" + } + }, + "deep-extend": { + "version": "0.6.0", + "bundled": true, + "optional": true + }, + "delegates": { + "version": "1.0.0", + "bundled": true, + "optional": true + }, + "detect-libc": { + "version": "1.0.3", + "bundled": true, + "optional": true + }, + "fs-minipass": { + "version": "1.2.7", + "bundled": true, + "optional": true, + "requires": { + "minipass": "^2.6.0" + } + }, + "fs.realpath": { + "version": "1.0.0", + "bundled": true, + "optional": true + }, + "gauge": { + "version": "2.7.4", + "bundled": true, + "optional": true, + "requires": { + "aproba": "^1.0.3", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.0", + "object-assign": "^4.1.0", + "signal-exit": "^3.0.0", + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wide-align": "^1.1.0" + } + }, + "glob": { + "version": "7.1.6", + "bundled": true, + "optional": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "has-unicode": { + "version": "2.0.1", + "bundled": true, + "optional": true + }, + "iconv-lite": { + "version": "0.4.24", + "bundled": true, + "optional": true, + "requires": { + "safer-buffer": ">= 2.1.2 < 3" + } + }, + "ignore-walk": { + "version": "3.0.3", + "bundled": true, + "optional": true, + "requires": { + "minimatch": "^3.0.4" + } + }, + "inflight": { + "version": "1.0.6", + "bundled": true, + "optional": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "bundled": true, + "optional": true + }, + "ini": { + "version": "1.3.5", + "bundled": true, + "optional": true + }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "bundled": true, + "optional": true, + "requires": { + "number-is-nan": "^1.0.0" + } + }, + "isarray": { + "version": "1.0.0", + "bundled": true, + "optional": true + }, + "minimatch": { + "version": "3.0.4", + "bundled": true, + "optional": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "0.0.8", + "bundled": true, + "optional": true + }, + "minipass": { + "version": "2.9.0", + "bundled": true, + "optional": true, + "requires": { + "safe-buffer": "^5.1.2", + "yallist": "^3.0.0" + } + }, + "minizlib": { + "version": "1.3.3", + "bundled": true, + "optional": true, + "requires": { + "minipass": "^2.9.0" + } + }, + "mkdirp": { + "version": "0.5.1", + "bundled": true, + "optional": true, + "requires": { + "minimist": "0.0.8" + } + }, + "ms": { + "version": "2.1.2", + "bundled": true, + "optional": true + }, + "needle": { + "version": "2.4.0", + "bundled": true, + "optional": true, + "requires": { + "debug": "^3.2.6", + "iconv-lite": "^0.4.4", + "sax": "^1.2.4" + } + }, + "node-pre-gyp": { + "version": "0.14.0", + "bundled": true, + "optional": true, + "requires": { + "detect-libc": "^1.0.2", + "mkdirp": "^0.5.1", + "needle": "^2.2.1", + "nopt": "^4.0.1", + "npm-packlist": "^1.1.6", + "npmlog": "^4.0.2", + "rc": "^1.2.7", + "rimraf": "^2.6.1", + "semver": "^5.3.0", + "tar": "^4.4.2" + } + }, + "nopt": { + "version": "4.0.1", + "bundled": true, + "optional": true, + "requires": { + "abbrev": "1", + "osenv": "^0.1.4" + } + }, + "npm-bundled": { + "version": "1.1.1", + "bundled": true, + "optional": true, + "requires": { + "npm-normalize-package-bin": "^1.0.1" + } + }, + "npm-normalize-package-bin": { + "version": "1.0.1", + "bundled": true, + "optional": true + }, + "npm-packlist": { + "version": "1.4.7", + "bundled": true, + "optional": true, + "requires": { + "ignore-walk": "^3.0.1", + "npm-bundled": "^1.0.1" + } + }, + "npmlog": { + "version": "4.1.2", + "bundled": true, + "optional": true, + "requires": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.7.3", + "set-blocking": "~2.0.0" + } + }, + "number-is-nan": { + "version": "1.0.1", + "bundled": true, + "optional": true + }, + "object-assign": { + "version": "4.1.1", + "bundled": true, + "optional": true + }, + "once": { + "version": "1.4.0", + "bundled": true, + "optional": true, + "requires": { + "wrappy": "1" + } + }, + "os-homedir": { + "version": "1.0.2", + "bundled": true, + "optional": true + }, + "os-tmpdir": { + "version": "1.0.2", + "bundled": true, + "optional": true + }, + "osenv": { + "version": "0.1.5", + "bundled": true, + "optional": true, + "requires": { + "os-homedir": "^1.0.0", + "os-tmpdir": "^1.0.0" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "bundled": true, + "optional": true + }, + "process-nextick-args": { + "version": "2.0.1", + "bundled": true, + "optional": true + }, + "rc": { + "version": "1.2.8", + "bundled": true, + "optional": true, + "requires": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "dependencies": { + "minimist": { + "version": "1.2.0", + "bundled": true, + "optional": true + } + } + }, + "readable-stream": { + "version": "2.3.6", + "bundled": true, + "optional": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "rimraf": { + "version": "2.7.1", + "bundled": true, + "optional": true, + "requires": { + "glob": "^7.1.3" + } + }, + "safe-buffer": { + "version": "5.1.2", + "bundled": true, + "optional": true + }, + "safer-buffer": { + "version": "2.1.2", + "bundled": true, + "optional": true + }, + "sax": { + "version": "1.2.4", + "bundled": true, + "optional": true + }, + "semver": { + "version": "5.7.1", + "bundled": true, + "optional": true + }, + "set-blocking": { + "version": "2.0.0", + "bundled": true, + "optional": true + }, + "signal-exit": { + "version": "3.0.2", + "bundled": true, + "optional": true + }, + "string-width": { + "version": "1.0.2", + "bundled": true, + "optional": true, + "requires": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + } + }, + "string_decoder": { + "version": "1.1.1", + "bundled": true, + "optional": true, + "requires": { + "safe-buffer": "~5.1.0" + } + }, + "strip-ansi": { + "version": "3.0.1", + "bundled": true, + "optional": true, + "requires": { + "ansi-regex": "^2.0.0" + } + }, + "strip-json-comments": { + "version": "2.0.1", + "bundled": true, + "optional": true + }, + "tar": { + "version": "4.4.13", + "bundled": true, + "optional": true, + "requires": { + "chownr": "^1.1.1", + "fs-minipass": "^1.2.5", + "minipass": "^2.8.6", + "minizlib": "^1.2.1", + "mkdirp": "^0.5.0", + "safe-buffer": "^5.1.2", + "yallist": "^3.0.3" + } + }, + "util-deprecate": { + "version": "1.0.2", + "bundled": true, + "optional": true + }, + "wide-align": { + "version": "1.1.3", + "bundled": true, + "optional": true, + "requires": { + "string-width": "^1.0.2 || 2" + } + }, + "wrappy": { + "version": "1.0.2", + "bundled": true, + "optional": true + }, + "yallist": { + "version": "3.1.1", + "bundled": true, + "optional": true + } + } + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=" + }, + "gatsby": { + "version": "2.18.18", + "resolved": "https://registry.npmjs.org/gatsby/-/gatsby-2.18.18.tgz", + "integrity": "sha512-+XH0eLlGfe2+ziIMnDSklf2628zjR2Emej3CdOoW1gHQ/fhpbrZLcuNpPrekW0fE+tQyTS8vDPHsIE0FDSsVRw==", + "requires": { + "@babel/code-frame": "^7.5.5", + "@babel/core": "^7.7.5", + "@babel/parser": "^7.7.5", + "@babel/polyfill": "^7.7.0", + "@babel/runtime": "^7.7.6", + "@babel/traverse": "^7.7.4", + "@hapi/joi": "^15.1.1", + "@mikaelkristiansson/domready": "^1.0.9", + "@pieh/friendly-errors-webpack-plugin": "1.7.0-chalk-2", + "@reach/router": "^1.2.1", + "@typescript-eslint/eslint-plugin": "^2.11.0", + "@typescript-eslint/parser": "^2.11.0", + "address": "1.1.2", + "autoprefixer": "^9.7.3", + "axios": "^0.19.0", + "babel-core": "7.0.0-bridge.0", + "babel-eslint": "^10.0.3", + "babel-loader": "^8.0.6", + "babel-plugin-add-module-exports": "^0.3.3", + "babel-plugin-dynamic-import-node": "^2.3.0", + "babel-plugin-remove-graphql-queries": "^2.7.19", + "babel-preset-gatsby": "^0.2.26", + "better-opn": "1.0.0", + "better-queue": "^3.8.10", + "bluebird": "^3.7.2", + "browserslist": "3.2.8", + "cache-manager": "^2.10.1", + "cache-manager-fs-hash": "^0.0.7", + "chalk": "^2.4.2", + "chokidar": "3.3.0", + "common-tags": "^1.8.0", + "compression": "^1.7.4", + "convert-hrtime": "^3.0.0", + "copyfiles": "^2.1.1", + "core-js": "^2.6.11", + "cors": "^2.8.5", + "css-loader": "^1.0.1", + "debug": "^3.2.6", + "del": "^5.1.0", + "detect-port": "^1.3.0", + "devcert-san": "^0.3.3", + "dotenv": "^8.2.0", + "eslint": "^6.7.2", + "eslint-config-react-app": "^5.1.0", + "eslint-loader": "^2.2.1", + "eslint-plugin-flowtype": "^3.13.0", + "eslint-plugin-graphql": "^3.1.0", + "eslint-plugin-import": "^2.19.1", + "eslint-plugin-jsx-a11y": "^6.2.3", + "eslint-plugin-react": "^7.17.0", + "eslint-plugin-react-hooks": "^1.7.0", + "event-source-polyfill": "^1.0.11", + "express": "^4.17.1", + "express-graphql": "^0.9.0", + "fast-levenshtein": "^2.0.6", + "file-loader": "^1.1.11", + "flat": "^4.1.0", + "fs-exists-cached": "1.0.0", + "fs-extra": "^8.1.0", + "gatsby-cli": "^2.8.23", + "gatsby-core-utils": "^1.0.25", + "gatsby-graphiql-explorer": "^0.2.31", + "gatsby-link": "^2.2.27", + "gatsby-plugin-page-creator": "^2.1.37", + "gatsby-react-router-scroll": "^2.1.19", + "gatsby-telemetry": "^1.1.46", + "glob": "^7.1.6", + "got": "8.3.2", + "graphql": "^14.5.8", + "graphql-compose": "^6.3.7", + "graphql-playground-middleware-express": "^1.7.12", + "invariant": "^2.2.4", + "is-relative": "^1.0.0", + "is-relative-url": "^3.0.0", + "is-wsl": "^2.1.1", + "jest-worker": "^24.9.0", + "json-loader": "^0.5.7", + "json-stringify-safe": "^5.0.1", + "lodash": "^4.17.15", + "lokijs": "^1.5.8", + "md5": "^2.2.1", + "md5-file": "^3.2.3", + "micromatch": "^3.1.10", + "mime": "^2.4.4", + "mini-css-extract-plugin": "^0.8.0", + "mitt": "^1.2.0", + "mkdirp": "^0.5.1", + "moment": "^2.24.0", + "name-all-modules-plugin": "^1.0.1", + "normalize-path": "^2.1.1", + "null-loader": "^0.1.1", + "opentracing": "^0.14.4", + "optimize-css-assets-webpack-plugin": "^5.0.3", + "parseurl": "^1.3.3", + "physical-cpu-count": "^2.0.0", + "pnp-webpack-plugin": "^1.5.0", + "postcss-flexbugs-fixes": "^3.3.1", + "postcss-loader": "^2.1.6", + "prompts": "^2.3.0", + "prop-types": "^15.7.2", + "raw-loader": "^0.5.1", + "react-dev-utils": "^4.2.3", + "react-error-overlay": "^3.0.0", + "react-hot-loader": "^4.12.18", + "redux": "^4.0.4", + "redux-thunk": "^2.3.0", + "semver": "^5.7.1", + "shallow-compare": "^1.2.2", + "sift": "^5.1.0", + "signal-exit": "^3.0.2", + "slugify": "^1.3.6", + "socket.io": "^2.3.0", + "stack-trace": "^0.0.10", + "string-similarity": "^1.2.2", + "style-loader": "^0.23.1", + "terser-webpack-plugin": "^1.4.2", + "true-case-path": "^2.2.1", + "type-of": "^2.0.1", + "url-loader": "^1.1.2", + "util.promisify": "^1.0.0", + "uuid": "^3.3.3", + "v8-compile-cache": "^1.1.2", + "webpack": "~4.41.2", + "webpack-dev-middleware": "^3.7.2", + "webpack-dev-server": "^3.9.0", + "webpack-hot-middleware": "^2.25.0", + "webpack-merge": "^4.2.2", + "webpack-stats-plugin": "^0.3.0", + "xstate": "^4.7.2", + "yaml-loader": "^0.5.0" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" + }, + "cliui": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-4.1.0.tgz", + "integrity": "sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ==", + "requires": { + "string-width": "^2.1.1", + "strip-ansi": "^4.0.0", + "wrap-ansi": "^2.0.0" + }, + "dependencies": { + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "requires": { + "ansi-regex": "^3.0.0" + } + } + } + }, + "configstore": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/configstore/-/configstore-5.0.0.tgz", + "integrity": "sha512-eE/hvMs7qw7DlcB5JPRnthmrITuHMmACUJAp89v6PT6iOqzoLS7HRWhBtuHMlhNHo2AhUSA/3Dh1bKNJHcublQ==", + "requires": { + "dot-prop": "^5.1.0", + "graceful-fs": "^4.1.2", + "make-dir": "^3.0.0", + "unique-string": "^2.0.0", + "write-file-atomic": "^3.0.0", + "xdg-basedir": "^4.0.0" + } + }, + "crypto-random-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz", + "integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==" + }, + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "requires": { + "ms": "^2.1.1" + } + }, + "dot-prop": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.2.0.tgz", + "integrity": "sha512-uEUyaDKoSQ1M4Oq8l45hSE26SnTxL6snNnqvK/VWx5wJhmff5z0FUVJDKDanor/6w3kzE3i7XZOk+7wC0EXr1A==", + "requires": { + "is-obj": "^2.0.0" + } + }, + "gatsby-cli": { + "version": "2.8.23", + "resolved": "https://registry.npmjs.org/gatsby-cli/-/gatsby-cli-2.8.23.tgz", + "integrity": "sha512-e/ImmpW8dzZcmjeTqHaOhvr9gxNXPUE6MCLnl34yWlQwm2iMOWp86qhRvba5GfG6NJ2AX070JPYnkbO6RP5i5A==", + "requires": { + "@babel/code-frame": "^7.5.5", + "@babel/runtime": "^7.7.6", + "@hapi/joi": "^15.1.1", + "better-opn": "^1.0.0", + "bluebird": "^3.7.2", + "chalk": "^2.4.2", + "clipboardy": "^2.1.0", + "common-tags": "^1.8.0", + "configstore": "^5.0.0", + "convert-hrtime": "^3.0.0", + "core-js": "^2.6.11", + "envinfo": "^7.5.0", + "execa": "^3.4.0", + "fs-exists-cached": "^1.0.0", + "fs-extra": "^8.1.0", + "gatsby-core-utils": "^1.0.25", + "gatsby-telemetry": "^1.1.46", + "hosted-git-info": "^3.0.2", + "ink": "^2.6.0", + "ink-spinner": "^3.0.1", + "is-valid-path": "^0.1.1", + "lodash": "^4.17.15", + "meant": "^1.0.1", + "node-fetch": "^2.6.0", + "object.entries": "^1.1.0", + "opentracing": "^0.14.4", + "pretty-error": "^2.1.1", + "progress": "^2.0.3", + "prompts": "^2.3.0", + "react": "^16.12.0", + "redux": "^4.0.4", + "resolve-cwd": "^2.0.0", + "semver": "^6.3.0", + "signal-exit": "^3.0.2", + "source-map": "0.7.3", + "stack-trace": "^0.0.10", + "strip-ansi": "^5.2.0", + "update-notifier": "^3.0.1", + "uuid": "3.3.3", + "yargs": "^12.0.5", + "yurnalist": "^1.1.1" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + } + } + }, + "get-caller-file": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", + "integrity": "sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==" + }, + "hosted-git-info": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-3.0.2.tgz", + "integrity": "sha512-ezZMWtHXm7Eb7Rq4Mwnx2vs79WUx2QmRg3+ZqeGroKzfDO+EprOcgRPYghsOP9JuYBfK18VojmRTGCg8Ma+ktw==", + "requires": { + "lru-cache": "^5.1.1" + } + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + }, + "is-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==" + }, + "lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "requires": { + "yallist": "^3.0.2" + } + }, + "make-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.0.0.tgz", + "integrity": "sha512-grNJDhb8b1Jm1qeqW5R/O63wUo4UXo2v2HMic6YT9i/HBlF93S8jkMgH7yugvY9ABDShH4VZMn8I+U8+fCNegw==", + "requires": { + "semver": "^6.0.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + } + } + }, + "node-fetch": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", + "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==" + }, + "require-main-filename": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz", + "integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE=" + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + }, + "source-map": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", + "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==" + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + }, + "dependencies": { + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "requires": { + "ansi-regex": "^3.0.0" + } + } + } + }, + "unique-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz", + "integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==", + "requires": { + "crypto-random-string": "^2.0.0" + } + }, + "v8-compile-cache": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-1.1.2.tgz", + "integrity": "sha512-ejdrifsIydN1XDH7EuR2hn8ZrkRKUYF7tUcBjBy/lhrCvs2K+zRlbW9UHc0IQ9RsYFZJFqJrieoIHfkCa0DBRA==" + }, + "wrap-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", + "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", + "requires": { + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "requires": { + "number-is-nan": "^1.0.0" + } + }, + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "requires": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "requires": { + "ansi-regex": "^2.0.0" + } + } + } + }, + "write-file-atomic": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.1.tgz", + "integrity": "sha512-JPStrIyyVJ6oCSz/691fAjFtefZ6q+fP6tm+OS4Qw6o+TGQxNp1ziY2PgS+X/m0V8OWhZiO/m4xSj+Pr4RrZvw==", + "requires": { + "imurmurhash": "^0.1.4", + "is-typedarray": "^1.0.0", + "signal-exit": "^3.0.2", + "typedarray-to-buffer": "^3.1.5" + } + }, + "xdg-basedir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz", + "integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==" + }, + "yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" + }, + "yargs": { + "version": "12.0.5", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-12.0.5.tgz", + "integrity": "sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw==", + "requires": { + "cliui": "^4.0.0", + "decamelize": "^1.2.0", + "find-up": "^3.0.0", + "get-caller-file": "^1.0.1", + "os-locale": "^3.0.0", + "require-directory": "^2.1.1", + "require-main-filename": "^1.0.1", + "set-blocking": "^2.0.0", + "string-width": "^2.0.0", + "which-module": "^2.0.0", + "y18n": "^3.2.1 || ^4.0.0", + "yargs-parser": "^11.1.1" + } + }, + "yargs-parser": { + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-11.1.1.tgz", + "integrity": "sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ==", + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + } + } + }, + "gatsby-core-utils": { + "version": "1.0.25", + "resolved": "https://registry.npmjs.org/gatsby-core-utils/-/gatsby-core-utils-1.0.25.tgz", + "integrity": "sha512-Pz5xueweH9qv5TIW7wwlTxI/CbyzkiE5xvgLicbHbU+InH+lCNt4VuvGP1gQcIRuHMCIbuVKSiVCC7jnxEbtrA==", + "requires": { + "ci-info": "2.0.0", + "node-object-hash": "^2.0.0" + } + }, + "gatsby-graphiql-explorer": { + "version": "0.2.31", + "resolved": "https://registry.npmjs.org/gatsby-graphiql-explorer/-/gatsby-graphiql-explorer-0.2.31.tgz", + "integrity": "sha512-pAK/HG/Ryw2ZDWb/5rnSBmPf8KsnFGlO/zS9m2IdLjnQS0kA0b9UbfZ5bjkg7pwPPLhWilEX+uON0l51N4gnmg==", + "requires": { + "@babel/runtime": "^7.7.6" + } + }, + "gatsby-image": { + "version": "2.2.37", + "resolved": "https://registry.npmjs.org/gatsby-image/-/gatsby-image-2.2.37.tgz", + "integrity": "sha512-nm1PgitbK4ZR9YQ3mTlAXDXXNM/7JT2LYDFqTlWe1y78nhA6RfIfm+4bsNH5HCEUaYhZiedpkqguJiIYWYPsLg==", + "requires": { + "@babel/runtime": "^7.7.6", + "object-fit-images": "^3.2.4", + "prop-types": "^15.7.2" + } + }, + "gatsby-link": { + "version": "2.2.27", + "resolved": "https://registry.npmjs.org/gatsby-link/-/gatsby-link-2.2.27.tgz", + "integrity": "sha512-Jiz6ShReB25plqTKsTAVpfFvN1K/JziNhr1z8Q6p+dPzQaNWfEC61kpRKE69J1WWycvRdxCSZEkOgY/0nlAifg==", + "requires": { + "@babel/runtime": "^7.7.6", + "@types/reach__router": "^1.2.6", + "prop-types": "^15.7.2" + } + }, + "gatsby-page-utils": { + "version": "0.0.36", + "resolved": "https://registry.npmjs.org/gatsby-page-utils/-/gatsby-page-utils-0.0.36.tgz", + "integrity": "sha512-W+9+PdEWKCxO5cEr8D3F0vyN2sDLeoSwROwiuO+J791HHUj+HgnDzRAt97WjBwcD4ghT+ONs5Ao2WyR+bVrIzw==", + "requires": { + "@babel/runtime": "^7.7.6", + "bluebird": "^3.7.2", + "chokidar": "3.3.0", + "fs-exists-cached": "^1.0.0", + "gatsby-core-utils": "^1.0.25", + "glob": "^7.1.6", + "lodash": "^4.17.15", + "micromatch": "^3.1.10" + } + }, + "gatsby-plugin-catch-links": { + "version": "2.1.21", + "resolved": "https://registry.npmjs.org/gatsby-plugin-catch-links/-/gatsby-plugin-catch-links-2.1.21.tgz", + "integrity": "sha512-AHsSVbb4PgdFlbVk4ZmwcJ9UETzYcGqh9Otzk1XeGRFra7msUoB4sCZIUTqSow+sThaHiO/hYgkcGfOlLIrD5g==", + "requires": { + "@babel/runtime": "^7.7.6", + "escape-string-regexp": "^1.0.5" + } + }, + "gatsby-plugin-ipfs": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/gatsby-plugin-ipfs/-/gatsby-plugin-ipfs-2.0.2.tgz", + "integrity": "sha512-Igh4K0axPzfvmP8T0w5Vvo789HTaaC0XHTkHG18jTkwW04J5TV+YPULCxVqRRCGL4cEJgpLuX9TR9iCgEu5igA==", + "requires": { + "globby": "^8.0.1", + "is-text-path": "^1.0.1", + "p-map": "^2.0.0" + }, + "dependencies": { + "@nodelib/fs.stat": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz", + "integrity": "sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw==" + }, + "array-union": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", + "integrity": "sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=", + "requires": { + "array-uniq": "^1.0.1" + } + }, + "arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=" + }, + "dir-glob": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-2.0.0.tgz", + "integrity": "sha512-37qirFDz8cA5fimp9feo43fSuRo2gHwaIn6dXL8Ber1dGwUosDrGZeCCXq57WnIqE4aQ+u3eQZzsk1yOzhdwag==", + "requires": { + "arrify": "^1.0.1", + "path-type": "^3.0.0" + } + }, + "fast-glob": { + "version": "2.2.7", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-2.2.7.tgz", + "integrity": "sha512-g1KuQwHOZAmOZMuBtHdxDtju+T2RT8jgCC9aANsbpdiDDTSnjgfuVsIBNKbUeJI3oKMRExcfNDtJl4OhbffMsw==", + "requires": { + "@mrmlnc/readdir-enhanced": "^2.2.1", + "@nodelib/fs.stat": "^1.1.2", + "glob-parent": "^3.1.0", + "is-glob": "^4.0.0", + "merge2": "^1.2.3", + "micromatch": "^3.1.10" + } + }, + "glob-parent": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", + "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", + "requires": { + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" + }, + "dependencies": { + "is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "requires": { + "is-extglob": "^2.1.0" + } + } + } + }, + "globby": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/globby/-/globby-8.0.2.tgz", + "integrity": "sha512-yTzMmKygLp8RUpG1Ymu2VXPSJQZjNAZPD4ywgYEaG7e4tBJeUQBO8OpXrf1RCNcEs5alsoJYPAMiIHP0cmeC7w==", + "requires": { + "array-union": "^1.0.1", + "dir-glob": "2.0.0", + "fast-glob": "^2.0.2", + "glob": "^7.1.2", + "ignore": "^3.3.5", + "pify": "^3.0.0", + "slash": "^1.0.0" + } + }, + "ignore": { + "version": "3.3.10", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", + "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==" + }, + "p-map": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz", + "integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==" + }, + "path-type": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", + "requires": { + "pify": "^3.0.0" + } + }, + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + }, + "slash": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz", + "integrity": "sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU=" + } + } + }, + "gatsby-plugin-manifest": { + "version": "2.2.34", + "resolved": "https://registry.npmjs.org/gatsby-plugin-manifest/-/gatsby-plugin-manifest-2.2.34.tgz", + "integrity": "sha512-Jo6fXZpJ/QSejrSj9BTfRNikVDjx9FuuKzT4w7il8I6qKtnM4z/lYOIMxRWkAdlK7dj+fpUlbTeClWjhlKRWAQ==", + "requires": { + "@babel/runtime": "^7.7.6", + "gatsby-core-utils": "^1.0.25", + "semver": "^5.7.1", + "sharp": "^0.23.4" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "gatsby-plugin-no-sourcemaps": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/gatsby-plugin-no-sourcemaps/-/gatsby-plugin-no-sourcemaps-2.1.1.tgz", + "integrity": "sha512-IaRscMdXD8AFr922HlgftxN09gqmjsA1/RHuLgtJcv+bYhoGGs83XvSOrOq8Szs+5rLEixj3qdNJb59G0LnO8w==" + }, + "gatsby-plugin-offline": { + "version": "3.0.30", + "resolved": "https://registry.npmjs.org/gatsby-plugin-offline/-/gatsby-plugin-offline-3.0.30.tgz", + "integrity": "sha512-aqv70nWayyjD88kre8mtDJ/AURQJYw8TScsqx9t+G64RgRtp/HTx5qVWdB9ab5NjJUjIFK474Lvjh9bpBPwTvg==", + "requires": { + "@babel/runtime": "^7.7.6", + "cheerio": "^1.0.0-rc.3", + "gatsby-core-utils": "^1.0.25", + "glob": "^7.1.6", + "idb-keyval": "^3.2.0", + "lodash": "^4.17.15", + "workbox-build": "^4.3.1" + } + }, + "gatsby-plugin-page-creator": { + "version": "2.1.37", + "resolved": "https://registry.npmjs.org/gatsby-plugin-page-creator/-/gatsby-plugin-page-creator-2.1.37.tgz", + "integrity": "sha512-EsHDgT4hQx9Zxqf6zUIP9qQh+0Wk1tPyiJtPRADY1jkzwu2dAidJkaUtH4XR8BExhjZ9xYB5uDliSNrGkZg7fw==", + "requires": { + "@babel/runtime": "^7.7.6", + "bluebird": "^3.7.2", + "fs-exists-cached": "^1.0.0", + "gatsby-page-utils": "^0.0.36", + "glob": "^7.1.6", + "lodash": "^4.17.15", + "micromatch": "^3.1.10" + } + }, + "gatsby-plugin-prefetch-google-fonts": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/gatsby-plugin-prefetch-google-fonts/-/gatsby-plugin-prefetch-google-fonts-1.4.3.tgz", + "integrity": "sha512-rrNGpdLkSEQWksM1A1cJnL/wuu9GLfAl8oPQgpn3cmpKd4jnXk+nbLyQOwKQlRYSrzpju59dY8oyf4UfSFbqPg==", + "requires": { + "@babel/runtime": "^7.2.0", + "clean-css": "^4.2.1", + "download": "^7.1.0", + "fs-extra": "^7.0.0", + "get-urls": "^8.0.0", + "globby": "^8.0.1", + "google-fonts-plugin": "2.0.2", + "object-hash": "^1.3.0" + }, + "dependencies": { + "@nodelib/fs.stat": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz", + "integrity": "sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw==" + }, + "array-union": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", + "integrity": "sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=", + "requires": { + "array-uniq": "^1.0.1" + } + }, + "arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=" + }, + "dir-glob": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-2.0.0.tgz", + "integrity": "sha512-37qirFDz8cA5fimp9feo43fSuRo2gHwaIn6dXL8Ber1dGwUosDrGZeCCXq57WnIqE4aQ+u3eQZzsk1yOzhdwag==", + "requires": { + "arrify": "^1.0.1", + "path-type": "^3.0.0" + } + }, + "fast-glob": { + "version": "2.2.7", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-2.2.7.tgz", + "integrity": "sha512-g1KuQwHOZAmOZMuBtHdxDtju+T2RT8jgCC9aANsbpdiDDTSnjgfuVsIBNKbUeJI3oKMRExcfNDtJl4OhbffMsw==", + "requires": { + "@mrmlnc/readdir-enhanced": "^2.2.1", + "@nodelib/fs.stat": "^1.1.2", + "glob-parent": "^3.1.0", + "is-glob": "^4.0.0", + "merge2": "^1.2.3", + "micromatch": "^3.1.10" + } + }, + "fs-extra": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-7.0.1.tgz", + "integrity": "sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==", + "requires": { + "graceful-fs": "^4.1.2", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + } + }, + "glob-parent": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", + "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", + "requires": { + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" + }, + "dependencies": { + "is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "requires": { + "is-extglob": "^2.1.0" + } + } + } + }, + "globby": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/globby/-/globby-8.0.2.tgz", + "integrity": "sha512-yTzMmKygLp8RUpG1Ymu2VXPSJQZjNAZPD4ywgYEaG7e4tBJeUQBO8OpXrf1RCNcEs5alsoJYPAMiIHP0cmeC7w==", + "requires": { + "array-union": "^1.0.1", + "dir-glob": "2.0.0", + "fast-glob": "^2.0.2", + "glob": "^7.1.2", + "ignore": "^3.3.5", + "pify": "^3.0.0", + "slash": "^1.0.0" + } + }, + "ignore": { + "version": "3.3.10", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", + "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==" + }, + "path-type": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", + "requires": { + "pify": "^3.0.0" + } + }, + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + }, + "slash": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz", + "integrity": "sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU=" + } + } + }, + "gatsby-plugin-react-helmet": { + "version": "3.1.18", + "resolved": "https://registry.npmjs.org/gatsby-plugin-react-helmet/-/gatsby-plugin-react-helmet-3.1.18.tgz", + "integrity": "sha512-wwKH6Z5NK0Gl2hDfMnWNGE41GjWpZGq0eKczPydTJc/1SbPykBFCBrjYgrAPB2ZrRnA8AV06cRwPddJC0mY7sw==", + "requires": { + "@babel/runtime": "^7.7.6" + } + }, + "gatsby-plugin-root-import": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/gatsby-plugin-root-import/-/gatsby-plugin-root-import-2.0.5.tgz", + "integrity": "sha512-/yA6rFjfjiFb8D6nCjfFrrGqYQMkOt4J3u2o6s7VYEF/zpA5dw2C9ENJ5fDKkJSCbbwLiEIGVMMee3vMEip2zA==" + }, + "gatsby-plugin-sharp": { + "version": "2.3.10", + "resolved": "https://registry.npmjs.org/gatsby-plugin-sharp/-/gatsby-plugin-sharp-2.3.10.tgz", + "integrity": "sha512-uoVLdSnnrnpWxeI+ZNdQ7nuvx60LI8+nqsuUu0KfyAmEhNgcuuExTDYlM4WDn1BCIifCylTwBBTP0xZU4YvPeA==", + "requires": { + "@babel/runtime": "^7.7.6", + "async": "^2.6.3", + "bluebird": "^3.7.2", + "fs-extra": "^8.1.0", + "gatsby-core-utils": "^1.0.25", + "got": "^8.3.2", + "imagemin": "^6.1.0", + "imagemin-mozjpeg": "^8.0.0", + "imagemin-pngquant": "^6.0.1", + "imagemin-webp": "^5.1.0", + "lodash": "^4.17.15", + "mini-svg-data-uri": "^1.1.3", + "p-defer": "^3.0.0", + "potrace": "^2.1.2", + "probe-image-size": "^4.1.1", + "progress": "^2.0.3", + "semver": "^5.7.1", + "sharp": "^0.23.4", + "svgo": "1.3.2", + "uuid": "^3.3.3" + }, + "dependencies": { + "async": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", + "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==", + "requires": { + "lodash": "^4.17.14" + } + }, + "p-defer": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-defer/-/p-defer-3.0.0.tgz", + "integrity": "sha512-ugZxsxmtTln604yeYd29EGrNhazN2lywetzpKhfmQjW/VJmhpDmWbiX+h0zL8V91R0UXkhb3KtPmyq9PZw3aYw==" + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "gatsby-plugin-styled-components": { + "version": "3.1.16", + "resolved": "https://registry.npmjs.org/gatsby-plugin-styled-components/-/gatsby-plugin-styled-components-3.1.16.tgz", + "integrity": "sha512-ixvMwMCYm70iC23+zkMBmQPNX0UmmumzK+1TtK3LSYicdaXXz7dychaBdiljKD4WMNraXYx49JuZS01yjOZiOw==", + "requires": { + "@babel/runtime": "^7.7.6" + } + }, + "gatsby-react-router-scroll": { + "version": "2.1.19", + "resolved": "https://registry.npmjs.org/gatsby-react-router-scroll/-/gatsby-react-router-scroll-2.1.19.tgz", + "integrity": "sha512-qK6kE48Efzf8zOsmRw6UiZH7VXuv4ti6taGu5je1D9IGDgMpETKXRA0jmKB1up50XQ4WXvyyLASOqWK3UdDNYw==", + "requires": { + "@babel/runtime": "^7.7.6", + "scroll-behavior": "^0.9.10", + "warning": "^3.0.0" + } + }, + "gatsby-remark-autolink-headers": { + "version": "2.1.22", + "resolved": "https://registry.npmjs.org/gatsby-remark-autolink-headers/-/gatsby-remark-autolink-headers-2.1.22.tgz", + "integrity": "sha512-YkqgbLv4CovkqDMG7Mq/RYjkZud6O79pG131TLmaLZmQkcESwZuzzpNxuaD9snIkth+UwUx1RAso207vN0yRVg==", + "requires": { + "@babel/runtime": "^7.7.6", + "github-slugger": "^1.2.1", + "lodash": "^4.17.15", + "mdast-util-to-string": "^1.0.7", + "unist-util-visit": "^1.4.1" + } + }, + "gatsby-remark-prismjs": { + "version": "3.3.29", + "resolved": "https://registry.npmjs.org/gatsby-remark-prismjs/-/gatsby-remark-prismjs-3.3.29.tgz", + "integrity": "sha512-aF7gu+hfW1075nZNDTLo/88GiELaeeLiWLRKHghGql2FBaOszP/HuTBZCV0KaxRb5rGq8Pwdo1KO+wbkL9qYtw==", + "requires": { + "@babel/runtime": "^7.7.6", + "parse-numeric-range": "^0.0.2", + "unist-util-visit": "^1.4.1" + } + }, + "gatsby-source-filesystem": { + "version": "2.1.43", + "resolved": "https://registry.npmjs.org/gatsby-source-filesystem/-/gatsby-source-filesystem-2.1.43.tgz", + "integrity": "sha512-lmq64xMgHE6cXwLgddogcx2THPsMCia/HivWRGK6DCJhCOXDbkWSiNYthpMIgZXkfsGaYFiVjxRZSSn1QVOU9g==", + "requires": { + "@babel/runtime": "^7.7.6", + "better-queue": "^3.8.10", + "bluebird": "^3.7.2", + "chokidar": "3.3.0", + "file-type": "^12.4.0", + "fs-extra": "^8.1.0", + "gatsby-core-utils": "^1.0.25", + "got": "^7.1.0", + "md5-file": "^3.2.3", + "mime": "^2.4.4", + "pretty-bytes": "^5.3.0", + "progress": "^2.0.3", + "read-chunk": "^3.2.0", + "valid-url": "^1.0.9", + "xstate": "^4.7.2" + }, + "dependencies": { + "file-type": { + "version": "12.4.2", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-12.4.2.tgz", + "integrity": "sha512-UssQP5ZgIOKelfsaB5CuGAL+Y+q7EmONuiwF3N5HAH0t27rvrttgi6Ra9k/+DVaY9UF6+ybxu5pOXLUdA8N7Vg==" + }, + "get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" + }, + "got": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/got/-/got-7.1.0.tgz", + "integrity": "sha512-Y5WMo7xKKq1muPsxD+KmrR8DH5auG7fBdDVueZwETwV6VytKyU9OX/ddpq2/1hp1vIPvVb4T81dKQz3BivkNLw==", + "requires": { + "decompress-response": "^3.2.0", + "duplexer3": "^0.1.4", + "get-stream": "^3.0.0", + "is-plain-obj": "^1.1.0", + "is-retry-allowed": "^1.0.0", + "is-stream": "^1.0.0", + "isurl": "^1.0.0-alpha5", + "lowercase-keys": "^1.0.0", + "p-cancelable": "^0.3.0", + "p-timeout": "^1.1.1", + "safe-buffer": "^5.0.1", + "timed-out": "^4.0.0", + "url-parse-lax": "^1.0.0", + "url-to-options": "^1.0.1" + } + }, + "p-cancelable": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-0.3.0.tgz", + "integrity": "sha512-RVbZPLso8+jFeq1MfNvgXtCRED2raz/dKpacfTNxsx6pLEpEomM7gah6VeHSYV3+vo0OAi4MkArtQcWWXuQoyw==" + }, + "p-timeout": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-1.2.1.tgz", + "integrity": "sha1-XrOzU7f86Z8QGhA4iAuwVOu+o4Y=", + "requires": { + "p-finally": "^1.0.0" + } + }, + "prepend-http": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-1.0.4.tgz", + "integrity": "sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw=" + }, + "url-parse-lax": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-1.0.0.tgz", + "integrity": "sha1-evjzA2Rem9eaJy56FKxovAYJ2nM=", + "requires": { + "prepend-http": "^1.0.1" + } + } + } + }, + "gatsby-telemetry": { + "version": "1.1.46", + "resolved": "https://registry.npmjs.org/gatsby-telemetry/-/gatsby-telemetry-1.1.46.tgz", + "integrity": "sha512-HFrrZc/mZtzt2DsMc34HZiIEHpaXt1kKcFZC6s3Q3KqkgUweCZMwE7N52jkhGA+c3aFOZU/D+CtyP9dWcrsM/Q==", + "requires": { + "@babel/code-frame": "^7.5.5", + "@babel/runtime": "^7.7.6", + "bluebird": "^3.7.2", + "boxen": "^4.2.0", + "configstore": "^5.0.0", + "envinfo": "^7.5.0", + "fs-extra": "^8.1.0", + "gatsby-core-utils": "^1.0.25", + "git-up": "4.0.1", + "is-docker": "2.0.0", + "lodash": "^4.17.15", + "node-fetch": "2.6.0", + "resolve-cwd": "^2.0.0", + "source-map": "^0.7.3", + "stack-trace": "^0.0.10", + "stack-utils": "1.0.2", + "uuid": "3.3.3" + }, + "dependencies": { + "configstore": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/configstore/-/configstore-5.0.0.tgz", + "integrity": "sha512-eE/hvMs7qw7DlcB5JPRnthmrITuHMmACUJAp89v6PT6iOqzoLS7HRWhBtuHMlhNHo2AhUSA/3Dh1bKNJHcublQ==", + "requires": { + "dot-prop": "^5.1.0", + "graceful-fs": "^4.1.2", + "make-dir": "^3.0.0", + "unique-string": "^2.0.0", + "write-file-atomic": "^3.0.0", + "xdg-basedir": "^4.0.0" + } + }, + "crypto-random-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz", + "integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==" + }, + "dot-prop": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.2.0.tgz", + "integrity": "sha512-uEUyaDKoSQ1M4Oq8l45hSE26SnTxL6snNnqvK/VWx5wJhmff5z0FUVJDKDanor/6w3kzE3i7XZOk+7wC0EXr1A==", + "requires": { + "is-obj": "^2.0.0" + } + }, + "is-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==" + }, + "make-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.0.0.tgz", + "integrity": "sha512-grNJDhb8b1Jm1qeqW5R/O63wUo4UXo2v2HMic6YT9i/HBlF93S8jkMgH7yugvY9ABDShH4VZMn8I+U8+fCNegw==", + "requires": { + "semver": "^6.0.0" + } + }, + "node-fetch": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", + "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==" + }, + "source-map": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", + "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==" + }, + "unique-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz", + "integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==", + "requires": { + "crypto-random-string": "^2.0.0" + } + }, + "write-file-atomic": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.1.tgz", + "integrity": "sha512-JPStrIyyVJ6oCSz/691fAjFtefZ6q+fP6tm+OS4Qw6o+TGQxNp1ziY2PgS+X/m0V8OWhZiO/m4xSj+Pr4RrZvw==", + "requires": { + "imurmurhash": "^0.1.4", + "is-typedarray": "^1.0.0", + "signal-exit": "^3.0.2", + "typedarray-to-buffer": "^3.1.5" + } + }, + "xdg-basedir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz", + "integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==" + } + } + }, + "gatsby-transformer-remark": { + "version": "2.6.45", + "resolved": "https://registry.npmjs.org/gatsby-transformer-remark/-/gatsby-transformer-remark-2.6.45.tgz", + "integrity": "sha512-3oICZvZmQyecK+vGBpcLOuDMpJio2OsJEGnmEXlQuS0KOWALGvZCfu3sNu4ROHemEgwY8q5z7OANjqZVxIaloQ==", + "requires": { + "@babel/runtime": "^7.7.6", + "bluebird": "^3.7.2", + "gatsby-core-utils": "^1.0.25", + "gray-matter": "^4.0.2", + "hast-util-raw": "^4.0.0", + "hast-util-to-html": "^4.0.1", + "lodash": "^4.17.15", + "mdast-util-to-hast": "^3.0.4", + "mdast-util-to-string": "^1.0.7", + "mdast-util-toc": "^5.0", + "remark": "^10.0.1", + "remark-parse": "^6.0.3", + "remark-retext": "^3.1.3", + "remark-stringify": "^5.0.0", + "retext-english": "^3.0.4", + "sanitize-html": "^1.20.1", + "underscore.string": "^3.3.5", + "unified": "^6.2.0", + "unist-util-remove-position": "^1.1.4", + "unist-util-select": "^1.5.0", + "unist-util-visit": "^1.4.1" + } + }, + "gauge": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", + "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", + "requires": { + "aproba": "^1.0.3", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.0", + "object-assign": "^4.1.0", + "signal-exit": "^3.0.0", + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wide-align": "^1.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "requires": { + "number-is-nan": "^1.0.0" + } + }, + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "requires": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "requires": { + "ansi-regex": "^2.0.0" + } + } + } + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" + }, + "get-own-enumerable-property-symbols": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz", + "integrity": "sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==" + }, + "get-port": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/get-port/-/get-port-3.2.0.tgz", + "integrity": "sha1-3Xzn3hh8Bsi/NTeWrHHgmfCYDrw=" + }, + "get-proxy": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/get-proxy/-/get-proxy-2.1.0.tgz", + "integrity": "sha512-zmZIaQTWnNQb4R4fJUEp/FC51eZsc6EkErspy3xtIYStaq8EB/hDIWipxsal+E8rz0qD7f2sL/NA9Xee4RInJw==", + "requires": { + "npm-conf": "^1.1.0" + } + }, + "get-stdin": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz", + "integrity": "sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4=" + }, + "get-stream": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", + "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", + "requires": { + "pump": "^3.0.0" + } + }, + "get-urls": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/get-urls/-/get-urls-8.0.0.tgz", + "integrity": "sha512-9c6aVD6HqnpFjqWSoRzSGNo69hNnSa8EevNFVeIRSLYqYlIJNvtHgrqiQ1sUjHwbZPBY5gO1FMlVjmElfdneqw==", + "requires": { + "normalize-url": "^3.3.0", + "url-regex": "^4.0.0" + } + }, + "get-value": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz", + "integrity": "sha1-3BXKHGcjh8p2vTesCjlbogQqLCg=" + }, + "getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "requires": { + "assert-plus": "^1.0.0" + } + }, + "git-up": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/git-up/-/git-up-4.0.1.tgz", + "integrity": "sha512-LFTZZrBlrCrGCG07/dm1aCjjpL1z9L3+5aEeI9SBhAqSc+kiA9Or1bgZhQFNppJX6h/f5McrvJt1mQXTFm6Qrw==", + "requires": { + "is-ssh": "^1.3.0", + "parse-url": "^5.0.0" + } + }, + "github-from-package": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", + "integrity": "sha1-l/tdlr/eiXMxPyDoKI75oWf6ZM4=" + }, + "github-slugger": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-1.2.1.tgz", + "integrity": "sha512-SsZUjg/P03KPzQBt7OxJPasGw6NRO5uOgiZ5RGXVud5iSIZ0eNZeNp5rTwCxtavrRUa/A77j8mePVc5lEvk0KQ==", + "requires": { + "emoji-regex": ">=6.0.0 <=6.1.1" + }, + "dependencies": { + "emoji-regex": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-6.1.1.tgz", + "integrity": "sha1-xs0OwbBkLio8Z6ETfvxeeW2k+I4=" + } + } + }, + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "glob-parent": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.0.tgz", + "integrity": "sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw==", + "requires": { + "is-glob": "^4.0.1" + } + }, + "glob-to-regexp": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.3.0.tgz", + "integrity": "sha1-jFoUlNIGbFcMw7/kSWF1rMTVAqs=" + }, + "global": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/global/-/global-4.4.0.tgz", + "integrity": "sha512-wv/LAoHdRE3BeTGz53FAamhGlPLhlssK45usmGFThIi4XqnBmjKQ16u+RNbP7WvigRZDxUsM0J3gcQ5yicaL0w==", + "requires": { + "min-document": "^2.19.0", + "process": "^0.11.10" + } + }, + "global-dirs": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-0.1.1.tgz", + "integrity": "sha1-sxnA3UYH81PzvpzKTHL8FIxJ9EU=", + "requires": { + "ini": "^1.3.4" + } + }, + "global-modules": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/global-modules/-/global-modules-1.0.0.tgz", + "integrity": "sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg==", + "requires": { + "global-prefix": "^1.0.1", + "is-windows": "^1.0.1", + "resolve-dir": "^1.0.0" + } + }, + "global-prefix": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-1.0.2.tgz", + "integrity": "sha1-2/dDxsFJklk8ZVVoy2btMsASLr4=", + "requires": { + "expand-tilde": "^2.0.2", + "homedir-polyfill": "^1.0.1", + "ini": "^1.3.4", + "is-windows": "^1.0.1", + "which": "^1.2.14" + } + }, + "globals": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-12.3.0.tgz", + "integrity": "sha512-wAfjdLgFsPZsklLJvOBUBmzYE8/CwhEqSBEMRXA3qxIiNtyqvjYurAtIfDh6chlEPUfmTY3MnZh5Hfh4q0UlIw==", + "requires": { + "type-fest": "^0.8.1" + } + }, + "globby": { + "version": "10.0.2", + "resolved": "https://registry.npmjs.org/globby/-/globby-10.0.2.tgz", + "integrity": "sha512-7dUi7RvCoT/xast/o/dLN53oqND4yk0nsHkhRgn9w65C4PofCLOoJ39iSOg+qVDdWQPIEj+eszMHQ+aLVwwQSg==", + "requires": { + "@types/glob": "^7.1.1", + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.0.3", + "glob": "^7.1.3", + "ignore": "^5.1.1", + "merge2": "^1.2.3", + "slash": "^3.0.0" + }, + "dependencies": { + "ignore": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.4.tgz", + "integrity": "sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==" + } + } + }, + "good-listener": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/good-listener/-/good-listener-1.2.2.tgz", + "integrity": "sha1-1TswzfkxPf+33JoNR3CWqm0UXFA=", + "optional": true, + "requires": { + "delegate": "^3.1.2" + } + }, + "google-fonts-plugin": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/google-fonts-plugin/-/google-fonts-plugin-2.0.2.tgz", + "integrity": "sha512-pWYFe6zoLA6uIUpSr/pkakf3DwA2fYgpStfe54AmkiKTHMCUILvtqihHaS2f4SqbTpdpEUYVTMMgvs2ur1ge8g==", + "requires": { + "axios": "^0.18.0", + "cssnano": "^4.0.5", + "mkdirp": "^0.5.1", + "neon-js": "^1.1.2", + "path": "^0.12.7" + }, + "dependencies": { + "axios": { + "version": "0.18.1", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.18.1.tgz", + "integrity": "sha512-0BfJq4NSfQXd+SkFdrvFbG7addhYSBA2mQwISr46pD6E5iqkWg02RAs8vyTT/j0RTnoYmeXauBuSv1qKwR179g==", + "requires": { + "follow-redirects": "1.5.10", + "is-buffer": "^2.0.2" + } + } + } + }, + "got": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/got/-/got-8.3.2.tgz", + "integrity": "sha512-qjUJ5U/hawxosMryILofZCkm3C84PLJS/0grRIpjAwu+Lkxxj5cxeCU25BG0/3mDSpXKTyZr8oh8wIgLaH0QCw==", + "requires": { + "@sindresorhus/is": "^0.7.0", + "cacheable-request": "^2.1.1", + "decompress-response": "^3.3.0", + "duplexer3": "^0.1.4", + "get-stream": "^3.0.0", + "into-stream": "^3.1.0", + "is-retry-allowed": "^1.1.0", + "isurl": "^1.0.0-alpha5", + "lowercase-keys": "^1.0.0", + "mimic-response": "^1.0.0", + "p-cancelable": "^0.4.0", + "p-timeout": "^2.0.1", + "pify": "^3.0.0", + "safe-buffer": "^5.1.1", + "timed-out": "^4.0.1", + "url-parse-lax": "^3.0.0", + "url-to-options": "^1.0.1" + }, + "dependencies": { + "@sindresorhus/is": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.7.0.tgz", + "integrity": "sha512-ONhaKPIufzzrlNbqtWFFd+jlnemX6lJAgq9ZeiZtS7I1PIf/la7CW4m83rTXRnVnsMbW2k56pGYu7AUFJD9Pow==" + }, + "cacheable-request": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-2.1.4.tgz", + "integrity": "sha1-DYCIAbY0KtM8kd+dC0TcCbkeXD0=", + "requires": { + "clone-response": "1.0.2", + "get-stream": "3.0.0", + "http-cache-semantics": "3.8.1", + "keyv": "3.0.0", + "lowercase-keys": "1.0.0", + "normalize-url": "2.0.1", + "responselike": "1.0.2" + }, + "dependencies": { + "lowercase-keys": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.0.tgz", + "integrity": "sha1-TjNms55/VFfjXxMkvfb4jQv8cwY=" + } + } + }, + "get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" + }, + "http-cache-semantics": { + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz", + "integrity": "sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w==" + }, + "keyv": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-3.0.0.tgz", + "integrity": "sha512-eguHnq22OE3uVoSYG0LVWNP+4ppamWr9+zWBe1bsNcovIMy6huUJFPgy4mGwCd/rnl3vOLGW1MTlu4c57CT1xA==", + "requires": { + "json-buffer": "3.0.0" + } + }, + "normalize-url": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-2.0.1.tgz", + "integrity": "sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw==", + "requires": { + "prepend-http": "^2.0.0", + "query-string": "^5.0.1", + "sort-keys": "^2.0.0" + } + }, + "p-cancelable": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-0.4.1.tgz", + "integrity": "sha512-HNa1A8LvB1kie7cERyy21VNeHb2CWJJYqyyC2o3klWFfMGlFmWv2Z7sFgZH8ZiaYL95ydToKTFVXgMV/Os0bBQ==" + }, + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + } + } + }, + "graceful-fs": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", + "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==" + }, + "graceful-readlink": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz", + "integrity": "sha1-TK+tdrxi8C+gObL5Tpo906ORpyU=" + }, + "graphql": { + "version": "14.5.8", + "resolved": "https://registry.npmjs.org/graphql/-/graphql-14.5.8.tgz", + "integrity": "sha512-MMwmi0zlVLQKLdGiMfWkgQD7dY/TUKt4L+zgJ/aR0Howebod3aNgP5JkgvAULiR2HPVZaP2VEElqtdidHweLkg==", + "requires": { + "iterall": "^1.2.2" + } + }, + "graphql-compose": { + "version": "6.3.8", + "resolved": "https://registry.npmjs.org/graphql-compose/-/graphql-compose-6.3.8.tgz", + "integrity": "sha512-o0/jzQEMIpSjryLKwmD1vGrCubiPxD0LxlGTgWDSu38TBepu2GhugC9gYgTEbtiCZAHPtvkZ90SzzABOWZyQLA==", + "requires": { + "graphql-type-json": "^0.2.4", + "object-path": "^0.11.4" + } + }, + "graphql-config": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/graphql-config/-/graphql-config-2.2.1.tgz", + "integrity": "sha512-U8+1IAhw9m6WkZRRcyj8ZarK96R6lQBQ0an4lp76Ps9FyhOXENC5YQOxOFGm5CxPrX2rD0g3Je4zG5xdNJjwzQ==", + "requires": { + "graphql-import": "^0.7.1", + "graphql-request": "^1.5.0", + "js-yaml": "^3.10.0", + "lodash": "^4.17.4", + "minimatch": "^3.0.4" + } + }, + "graphql-import": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/graphql-import/-/graphql-import-0.7.1.tgz", + "integrity": "sha512-YpwpaPjRUVlw2SN3OPljpWbVRWAhMAyfSba5U47qGMOSsPLi2gYeJtngGpymjm9nk57RFWEpjqwh4+dpYuFAPw==", + "requires": { + "lodash": "^4.17.4", + "resolve-from": "^4.0.0" + } + }, + "graphql-playground-html": { + "version": "1.6.12", + "resolved": "https://registry.npmjs.org/graphql-playground-html/-/graphql-playground-html-1.6.12.tgz", + "integrity": "sha512-yOYFwwSMBL0MwufeL8bkrNDgRE7eF/kTHiwrqn9FiR9KLcNIl1xw9l9a+6yIRZM56JReQOHpbQFXTZn1IuSKRg==" + }, + "graphql-playground-middleware-express": { + "version": "1.7.12", + "resolved": "https://registry.npmjs.org/graphql-playground-middleware-express/-/graphql-playground-middleware-express-1.7.12.tgz", + "integrity": "sha512-17szgonnVSxWVrgblLRHHLjWnMUONfkULIwSunaMvYx8k5oG3yL86cyGCbHuDFUFkyr2swLhdfYl4mDfDXuvOA==", + "requires": { + "graphql-playground-html": "1.6.12" + } + }, + "graphql-request": { + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/graphql-request/-/graphql-request-1.8.2.tgz", + "integrity": "sha512-dDX2M+VMsxXFCmUX0Vo0TopIZIX4ggzOtiCsThgtrKR4niiaagsGTDIHj3fsOMFETpa064vzovI+4YV4QnMbcg==", + "requires": { + "cross-fetch": "2.2.2" + } + }, + "graphql-type-json": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/graphql-type-json/-/graphql-type-json-0.2.4.tgz", + "integrity": "sha512-/tq02ayMQjrG4oDFDRLLrPk0KvJXue0nVXoItBe7uAdbNXjQUu+HYCBdAmPLQoseVzUKKMzrhq2P/sfI76ON6w==" + }, + "gray-matter": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.2.tgz", + "integrity": "sha512-7hB/+LxrOjq/dd8APlK0r24uL/67w7SkYnfwhNFwg/VDIGWGmduTDYf3WNstLW2fbbmRwrDGCVSJ2isuf2+4Hw==", + "requires": { + "js-yaml": "^3.11.0", + "kind-of": "^6.0.2", + "section-matter": "^1.0.0", + "strip-bom-string": "^1.0.0" + } + }, + "gud": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/gud/-/gud-1.0.0.tgz", + "integrity": "sha512-zGEOVKFM5sVPPrYs7J5/hYEw2Pof8KCyOwyhG8sAF26mCAeUFAcYPu1mwB7hhpIP29zOIBaDqwuHdLp0jvZXjw==" + }, + "gzip-size": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-3.0.0.tgz", + "integrity": "sha1-VGGI6b3DN/Zzdy+BZgRks4nc5SA=", + "requires": { + "duplexer": "^0.1.1" + } + }, + "handle-thing": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.0.tgz", + "integrity": "sha512-d4sze1JNC454Wdo2fkuyzCr6aHcbL6PGGuFAz0Li/NcOm1tCHGnWDRmJP85dh9IhQErTc2svWFEX5xHIOo//kQ==" + }, + "har-schema": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" + }, + "har-validator": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz", + "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==", + "requires": { + "ajv": "^6.5.5", + "har-schema": "^2.0.0" + } + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-ansi": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", + "requires": { + "ansi-regex": "^2.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + } + } + }, + "has-binary2": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-binary2/-/has-binary2-1.0.3.tgz", + "integrity": "sha512-G1LWKhDSvhGeAQ8mPVQlqNcOB2sJdwATtZKl2pDKKHfpf/rYj24lkinxf69blJbnsvtqqNU+L3SL50vzZhXOnw==", + "requires": { + "isarray": "2.0.1" + }, + "dependencies": { + "isarray": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.1.tgz", + "integrity": "sha1-o32U7ZzaLVmGXJ92/llu4fM4dB4=" + } + } + }, + "has-cors": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-cors/-/has-cors-1.1.0.tgz", + "integrity": "sha1-XkdHk/fqmEPRu5nCPu9J/xJv/zk=" + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" + }, + "has-symbol-support-x": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/has-symbol-support-x/-/has-symbol-support-x-1.4.2.tgz", + "integrity": "sha512-3ToOva++HaW+eCpgqZrCfN51IPB+7bJNVT6CUATzueB5Heb8o6Nam0V3HG5dlDvZU1Gn5QLcbahiKw/XVk5JJw==" + }, + "has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==" + }, + "has-to-string-tag-x": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/has-to-string-tag-x/-/has-to-string-tag-x-1.4.1.tgz", + "integrity": "sha512-vdbKfmw+3LoOYVr+mtxHaX5a96+0f3DljYd8JOqvOLsf5mw2Otda2qCDT9qRqLAhrjyQ0h7ual5nOiASpsGNFw==", + "requires": { + "has-symbol-support-x": "^1.4.1" + } + }, + "has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=" + }, + "has-value": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-value/-/has-value-1.0.0.tgz", + "integrity": "sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc=", + "requires": { + "get-value": "^2.0.6", + "has-values": "^1.0.0", + "isobject": "^3.0.0" + } + }, + "has-values": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-values/-/has-values-1.0.0.tgz", + "integrity": "sha1-lbC2P+whRmGab+V/51Yo1aOe/k8=", + "requires": { + "is-number": "^3.0.0", + "kind-of": "^4.0.0" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + }, + "kind-of": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz", + "integrity": "sha1-IIE989cSkosgc3hpGkUGb65y3Vc=", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "has-yarn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/has-yarn/-/has-yarn-2.1.0.tgz", + "integrity": "sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw==" + }, + "hash-base": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.0.4.tgz", + "integrity": "sha1-X8hoaEfs1zSZQDMZprCj8/auSRg=", + "requires": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "hash.js": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.1.7.tgz", + "integrity": "sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==", + "requires": { + "inherits": "^2.0.3", + "minimalistic-assert": "^1.0.1" + } + }, + "hast-to-hyperscript": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-5.0.0.tgz", + "integrity": "sha512-DLl3eYTz8uwwzEubDUdCChsR5t5b2ne+yvHrA2h58Suq/JnN3+Gsb9Tc4iZoCCsykmFUc6UUpwxTmQXs0akSeg==", + "requires": { + "comma-separated-tokens": "^1.0.0", + "property-information": "^4.0.0", + "space-separated-tokens": "^1.0.0", + "style-to-object": "^0.2.1", + "unist-util-is": "^2.0.0", + "web-namespaces": "^1.1.2" + }, + "dependencies": { + "unist-util-is": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-2.1.3.tgz", + "integrity": "sha512-4WbQX2iwfr/+PfM4U3zd2VNXY+dWtZsN1fLnWEi2QQXA4qyDYAZcDMfXUX0Cu6XZUHHAO9q4nyxxLT4Awk1qUA==" + } + } + }, + "hast-util-from-parse5": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-4.0.2.tgz", + "integrity": "sha512-I6dtjsGtDqz4fmGSiFClFyiXdKhj5bPceS6intta7k/VDuiKz9P61C6hO6WMiNNmEm1b/EtBH8f+juvz4o0uwQ==", + "requires": { + "ccount": "^1.0.3", + "hastscript": "^4.0.0", + "property-information": "^4.0.0", + "web-namespaces": "^1.1.2", + "xtend": "^4.0.1" + } + }, + "hast-util-is-element": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-1.0.3.tgz", + "integrity": "sha512-C62CVn7jbjp89yOhhy7vrkSaB7Vk906Gtcw/Ihd+Iufnq+2pwOZjdPmpzpKLWJXPJBMDX3wXg4FqmdOayPcewA==" + }, + "hast-util-parse-selector": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.3.tgz", + "integrity": "sha512-nxbeqjQNxsvo/uYYAw9kij6td05YVUlf1qti09rVfbWSLT5H6wo3c+USIwX6nzXWk5kFZzXnEqO82856r0aM2Q==" + }, + "hast-util-raw": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-4.0.0.tgz", + "integrity": "sha512-5xYHyEJMCf8lX/NT4iA5z6N43yoFsrJqXJ5GWwAbLn815URbIz+UNNFEgid33F9paZuDlqVKvB+K3Aqu5+DdSw==", + "requires": { + "hast-util-from-parse5": "^4.0.2", + "hast-util-to-parse5": "^4.0.1", + "html-void-elements": "^1.0.1", + "parse5": "^5.0.0", + "unist-util-position": "^3.0.0", + "web-namespaces": "^1.0.0", + "xtend": "^4.0.1", + "zwitch": "^1.0.0" + }, + "dependencies": { + "parse5": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-5.1.1.tgz", + "integrity": "sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug==" + } + } + }, + "hast-util-to-html": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-4.0.1.tgz", + "integrity": "sha512-2emzwyf0xEsc4TBIPmDJmBttIw8R4SXAJiJZoiRR/s47ODYWgOqNoDbf2SJAbMbfNdFWMiCSOrI3OVnX6Qq2Mg==", + "requires": { + "ccount": "^1.0.0", + "comma-separated-tokens": "^1.0.1", + "hast-util-is-element": "^1.0.0", + "hast-util-whitespace": "^1.0.0", + "html-void-elements": "^1.0.0", + "property-information": "^4.0.0", + "space-separated-tokens": "^1.0.0", + "stringify-entities": "^1.0.1", + "unist-util-is": "^2.0.0", + "xtend": "^4.0.1" + }, + "dependencies": { + "unist-util-is": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-2.1.3.tgz", + "integrity": "sha512-4WbQX2iwfr/+PfM4U3zd2VNXY+dWtZsN1fLnWEi2QQXA4qyDYAZcDMfXUX0Cu6XZUHHAO9q4nyxxLT4Awk1qUA==" + } + } + }, + "hast-util-to-parse5": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-4.0.1.tgz", + "integrity": "sha512-U/61W+fsNfBpCyJBB5Pt3l5ypIfgXqEyW9pyrtxF7XrqDJHzcFrYpnC94d0JDYjvobLpYCzcU9srhMRPEO1YXw==", + "requires": { + "hast-to-hyperscript": "^5.0.0", + "property-information": "^4.0.0", + "web-namespaces": "^1.0.0", + "xtend": "^4.0.1", + "zwitch": "^1.0.0" + } + }, + "hast-util-whitespace": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-1.0.3.tgz", + "integrity": "sha512-AlkYiLTTwPOyxZ8axq2/bCwRUPjIPBfrHkXuCR92B38b3lSdU22R5F/Z4DL6a2kxWpekWq1w6Nj48tWat6GeRA==" + }, + "hastscript": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-4.1.0.tgz", + "integrity": "sha512-bOTn9hEfzewvHyXdbYGKqOr/LOz+2zYhKbC17U2YAjd16mnjqB1BQ0nooM/RdMy/htVyli0NAznXiBtwDi1cmQ==", + "requires": { + "comma-separated-tokens": "^1.0.0", + "hast-util-parse-selector": "^2.2.0", + "property-information": "^4.0.0", + "space-separated-tokens": "^1.0.0" + } + }, + "hex-color-regex": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/hex-color-regex/-/hex-color-regex-1.1.0.tgz", + "integrity": "sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ==" + }, + "hmac-drbg": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz", + "integrity": "sha1-0nRXAQJabHdabFRXk+1QL8DGSaE=", + "requires": { + "hash.js": "^1.0.3", + "minimalistic-assert": "^1.0.0", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "hoist-non-react-statics": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.1.tgz", + "integrity": "sha512-wbg3bpgA/ZqWrZuMOeJi8+SKMhr7X9TesL/rXMjTzh0p0JUBo3II8DHboYbuIXWRlttrUFxwcu/5kygrCw8fJw==", + "requires": { + "react-is": "^16.7.0" + } + }, + "homedir-polyfill": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz", + "integrity": "sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA==", + "requires": { + "parse-passwd": "^1.0.0" + } + }, + "hosted-git-info": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.5.tgz", + "integrity": "sha512-kssjab8CvdXfcXMXVcvsXum4Hwdq9XGtRD3TteMEvEbq0LXyiNQr6AprqKqfeaDXze7SxWvRxdpwE6ku7ikLkg==" + }, + "hpack.js": { + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/hpack.js/-/hpack.js-2.1.6.tgz", + "integrity": "sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI=", + "requires": { + "inherits": "^2.0.1", + "obuf": "^1.0.0", + "readable-stream": "^2.0.1", + "wbuf": "^1.1.0" + } + }, + "hsl-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/hsl-regex/-/hsl-regex-1.0.0.tgz", + "integrity": "sha1-1JMwx4ntgZ4nakwNJy3/owsY/m4=" + }, + "hsla-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/hsla-regex/-/hsla-regex-1.0.0.tgz", + "integrity": "sha1-wc56MWjIxmFAM6S194d/OyJfnDg=" + }, + "html-comment-regex": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/html-comment-regex/-/html-comment-regex-1.1.2.tgz", + "integrity": "sha512-P+M65QY2JQ5Y0G9KKdlDpo0zK+/OHptU5AaBwUfAIDJZk1MYf32Frm84EcOytfJE0t5JvkAnKlmjsXDnWzCJmQ==" + }, + "html-entities": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-1.2.1.tgz", + "integrity": "sha1-DfKTUfByEWNRXfueVUPl9u7VFi8=" + }, + "html-void-elements": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-1.0.4.tgz", + "integrity": "sha512-yMk3naGPLrfvUV9TdDbuYXngh/TpHbA6TrOw3HL9kS8yhwx7i309BReNg7CbAJXGE+UMJ6je5OqJ7lC63o6YuQ==" + }, + "htmlparser2": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-3.10.1.tgz", + "integrity": "sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ==", + "requires": { + "domelementtype": "^1.3.1", + "domhandler": "^2.3.0", + "domutils": "^1.5.1", + "entities": "^1.1.1", + "inherits": "^2.0.1", + "readable-stream": "^3.1.1" + }, + "dependencies": { + "entities": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/entities/-/entities-1.1.2.tgz", + "integrity": "sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==" + }, + "readable-stream": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", + "integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, + "http-cache-semantics": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.0.3.tgz", + "integrity": "sha512-TcIMG3qeVLgDr1TEd2XvHaTnMPwYQUQMIBLy+5pLSDKYFc7UIqj39w8EGzZkaxoLv/l2K8HaI0t5AVA+YYgUew==" + }, + "http-deceiver": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz", + "integrity": "sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc=" + }, + "http-errors": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz", + "integrity": "sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==", + "requires": { + "depd": "~1.1.2", + "inherits": "2.0.3", + "setprototypeof": "1.1.1", + "statuses": ">= 1.5.0 < 2", + "toidentifier": "1.0.0" + }, + "dependencies": { + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + } + } + }, + "http-parser-js": { + "version": "0.4.10", + "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.4.10.tgz", + "integrity": "sha1-ksnBN0w1CF912zWexWzCV8u5P6Q=" + }, + "http-proxy": { + "version": "1.18.0", + "resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.0.tgz", + "integrity": "sha512-84I2iJM/n1d4Hdgc6y2+qY5mDaz2PUVjlg9znE9byl+q0uC3DeByqBGReQu5tpLK0TAqTIXScRUV+dg7+bUPpQ==", + "requires": { + "eventemitter3": "^4.0.0", + "follow-redirects": "^1.0.0", + "requires-port": "^1.0.0" + } + }, + "http-proxy-middleware": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz", + "integrity": "sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q==", + "requires": { + "http-proxy": "^1.17.0", + "is-glob": "^4.0.0", + "lodash": "^4.17.11", + "micromatch": "^3.1.10" + } + }, + "http-signature": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "requires": { + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" + } + }, + "https-browserify": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/https-browserify/-/https-browserify-1.0.0.tgz", + "integrity": "sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM=" + }, + "human-signals": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", + "integrity": "sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==" + }, + "iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "requires": { + "safer-buffer": ">= 2.1.2 < 3" + } + }, + "icss-replace-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz", + "integrity": "sha1-Bupvg2ead0njhs/h/oEq5dsiPe0=" + }, + "icss-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/icss-utils/-/icss-utils-2.1.0.tgz", + "integrity": "sha1-g/Cg7DeL8yRheLbCrZE28TWxyWI=", + "requires": { + "postcss": "^6.0.1" + }, + "dependencies": { + "postcss": { + "version": "6.0.23", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.23.tgz", + "integrity": "sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==", + "requires": { + "chalk": "^2.4.1", + "source-map": "^0.6.1", + "supports-color": "^5.4.0" + } + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } + } + }, + "idb-keyval": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/idb-keyval/-/idb-keyval-3.2.0.tgz", + "integrity": "sha512-slx8Q6oywCCSfKgPgL0sEsXtPVnSbTLWpyiDcu6msHOyKOLari1TD1qocXVCft80umnkk3/Qqh3lwoFt8T/BPQ==" + }, + "ieee754": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", + "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==" + }, + "iferr": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/iferr/-/iferr-0.1.5.tgz", + "integrity": "sha1-xg7taebY/bazEEofy8ocGS3FtQE=" + }, + "ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==" + }, + "imagemin": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/imagemin/-/imagemin-6.1.0.tgz", + "integrity": "sha512-8ryJBL1CN5uSHpiBMX0rJw79C9F9aJqMnjGnrd/1CafegpNuA81RBAAru/jQQEOWlOJJlpRnlcVFF6wq+Ist0A==", + "requires": { + "file-type": "^10.7.0", + "globby": "^8.0.1", + "make-dir": "^1.0.0", + "p-pipe": "^1.1.0", + "pify": "^4.0.1", + "replace-ext": "^1.0.0" + }, + "dependencies": { + "@nodelib/fs.stat": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz", + "integrity": "sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw==" + }, + "array-union": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", + "integrity": "sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=", + "requires": { + "array-uniq": "^1.0.1" + } + }, + "arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=" + }, + "dir-glob": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-2.0.0.tgz", + "integrity": "sha512-37qirFDz8cA5fimp9feo43fSuRo2gHwaIn6dXL8Ber1dGwUosDrGZeCCXq57WnIqE4aQ+u3eQZzsk1yOzhdwag==", + "requires": { + "arrify": "^1.0.1", + "path-type": "^3.0.0" + } + }, + "fast-glob": { + "version": "2.2.7", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-2.2.7.tgz", + "integrity": "sha512-g1KuQwHOZAmOZMuBtHdxDtju+T2RT8jgCC9aANsbpdiDDTSnjgfuVsIBNKbUeJI3oKMRExcfNDtJl4OhbffMsw==", + "requires": { + "@mrmlnc/readdir-enhanced": "^2.2.1", + "@nodelib/fs.stat": "^1.1.2", + "glob-parent": "^3.1.0", + "is-glob": "^4.0.0", + "merge2": "^1.2.3", + "micromatch": "^3.1.10" + } + }, + "file-type": { + "version": "10.11.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-10.11.0.tgz", + "integrity": "sha512-uzk64HRpUZyTGZtVuvrjP0FYxzQrBf4rojot6J65YMEbwBLB0CWm0CLojVpwpmFmxcE/lkvYICgfcGozbBq6rw==" + }, + "glob-parent": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", + "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", + "requires": { + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" + }, + "dependencies": { + "is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "requires": { + "is-extglob": "^2.1.0" + } + } + } + }, + "globby": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/globby/-/globby-8.0.2.tgz", + "integrity": "sha512-yTzMmKygLp8RUpG1Ymu2VXPSJQZjNAZPD4ywgYEaG7e4tBJeUQBO8OpXrf1RCNcEs5alsoJYPAMiIHP0cmeC7w==", + "requires": { + "array-union": "^1.0.1", + "dir-glob": "2.0.0", + "fast-glob": "^2.0.2", + "glob": "^7.1.2", + "ignore": "^3.3.5", + "pify": "^3.0.0", + "slash": "^1.0.0" + }, + "dependencies": { + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + } + } + }, + "ignore": { + "version": "3.3.10", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", + "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==" + }, + "make-dir": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz", + "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==", + "requires": { + "pify": "^3.0.0" + }, + "dependencies": { + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + } + } + }, + "path-type": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", + "requires": { + "pify": "^3.0.0" + }, + "dependencies": { + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + } + } + }, + "slash": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz", + "integrity": "sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU=" + } + } + }, + "imagemin-mozjpeg": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/imagemin-mozjpeg/-/imagemin-mozjpeg-8.0.0.tgz", + "integrity": "sha512-+EciPiIjCb8JWjQNr1q8sYWYf7GDCNDxPYnkD11TNIjjWNzaV+oTg4DpOPQjl5ZX/KRCPMEgS79zLYAQzLitIA==", + "requires": { + "execa": "^1.0.0", + "is-jpg": "^2.0.0", + "mozjpeg": "^6.0.0" + }, + "dependencies": { + "execa": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", + "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", + "requires": { + "cross-spawn": "^6.0.0", + "get-stream": "^4.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + } + } + } + }, + "imagemin-pngquant": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/imagemin-pngquant/-/imagemin-pngquant-6.0.1.tgz", + "integrity": "sha512-Stk+fZCLxZznV8MFNA/T3AY/VRKevsiP9uZOLV0RCXoi0vUUFriySYuz/83IGp9D254EW8miGyyQ69zKouFr7w==", + "requires": { + "execa": "^0.10.0", + "is-png": "^1.0.0", + "is-stream": "^1.1.0", + "pngquant-bin": "^5.0.0" + }, + "dependencies": { + "execa": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-0.10.0.tgz", + "integrity": "sha512-7XOMnz8Ynx1gGo/3hyV9loYNPWM94jG3+3T3Y8tsfSstFmETmENCMU/A/zj8Lyaj1lkgEepKepvd6240tBRvlw==", + "requires": { + "cross-spawn": "^6.0.0", + "get-stream": "^3.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + } + }, + "get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" + } + } + }, + "imagemin-webp": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/imagemin-webp/-/imagemin-webp-5.1.0.tgz", + "integrity": "sha512-BsPTpobgbDPFBBsI3UflnU/cpIVa15qInEDBcYBw16qI/6XiB4vDF/dGp9l4aM3pfFDDYqR0mANMcKpBD7wbCw==", + "requires": { + "cwebp-bin": "^5.0.0", + "exec-buffer": "^3.0.0", + "is-cwebp-readable": "^2.0.1" + } + }, + "import-cwd": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/import-cwd/-/import-cwd-2.1.0.tgz", + "integrity": "sha1-qmzzbnInYShcs3HsZRn1PiQ1sKk=", + "requires": { + "import-from": "^2.1.0" + } + }, + "import-fresh": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.2.1.tgz", + "integrity": "sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ==", + "requires": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + } + }, + "import-from": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/import-from/-/import-from-2.1.0.tgz", + "integrity": "sha1-M1238qev/VOqpHHUuAId7ja387E=", + "requires": { + "resolve-from": "^3.0.0" + }, + "dependencies": { + "resolve-from": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", + "integrity": "sha1-six699nWiBvItuZTM17rywoYh0g=" + } + } + }, + "import-lazy": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-2.1.0.tgz", + "integrity": "sha1-BWmOPUXIjo1+nZLLBYTnfwlvPkM=" + }, + "import-local": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-2.0.0.tgz", + "integrity": "sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ==", + "requires": { + "pkg-dir": "^3.0.0", + "resolve-cwd": "^2.0.0" + } + }, + "imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=" + }, + "indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==" + }, + "indexes-of": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/indexes-of/-/indexes-of-1.0.1.tgz", + "integrity": "sha1-8w9xbI4r00bHtn0985FVZqfAVgc=" + }, + "indexof": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz", + "integrity": "sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10=" + }, + "infer-owner": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/infer-owner/-/infer-owner-1.0.4.tgz", + "integrity": "sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==" + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "ini": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", + "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==" + }, + "ink": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/ink/-/ink-2.6.0.tgz", + "integrity": "sha512-nD/wlSuB6WnFsFB0nUcOJdy28YvvDer3eo+gezjvZqojGA4Rx5sQpacvN//Aai83DRgwrRTyKBl5aciOcfP3zQ==", + "optional": true, + "requires": { + "ansi-escapes": "^4.2.1", + "arrify": "^2.0.1", + "auto-bind": "^3.0.0", + "chalk": "^3.0.0", + "cli-cursor": "^3.1.0", + "cli-truncate": "^2.0.0", + "is-ci": "^2.0.0", + "lodash.throttle": "^4.1.1", + "log-update": "^3.0.0", + "prop-types": "^15.6.2", + "react-reconciler": "^0.24.0", + "scheduler": "^0.18.0", + "signal-exit": "^3.0.2", + "slice-ansi": "^3.0.0", + "string-length": "^3.1.0", + "widest-line": "^3.1.0", + "wrap-ansi": "^6.2.0", + "yoga-layout-prebuilt": "^1.9.3" + }, + "dependencies": { + "ansi-styles": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", + "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "optional": true, + "requires": { + "@types/color-name": "^1.1.1", + "color-convert": "^2.0.1" + } + }, + "astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", + "optional": true + }, + "chalk": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", + "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "optional": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "optional": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "optional": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "optional": true + }, + "slice-ansi": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-3.0.0.tgz", + "integrity": "sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==", + "optional": true, + "requires": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + } + }, + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "optional": true, + "requires": { + "ansi-regex": "^5.0.0" + } + }, + "supports-color": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", + "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "optional": true, + "requires": { + "has-flag": "^4.0.0" + } + }, + "wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "optional": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + } + } + }, + "ink-spinner": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/ink-spinner/-/ink-spinner-3.0.1.tgz", + "integrity": "sha512-AVR4Z/NXDQ7dT5ltWcCzFS9Dd4T8eaO//E2UO8VYNiJcZpPCSJ11o5A0UVPcMlZxGbGD6ikUFDR3ZgPUQk5haQ==", + "optional": true, + "requires": { + "cli-spinners": "^1.0.0", + "prop-types": "^15.5.10" + } + }, + "inline-style-parser": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz", + "integrity": "sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==" + }, + "inquirer": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.0.2.tgz", + "integrity": "sha512-cZGvHaHwcR9E3xK9EGO5pHKELU+yaeJO7l2qGKIbqk4bCuDuAn15LCoUTS2nSkfv9JybFlnAGrOcVpCDZZOLhw==", + "requires": { + "ansi-escapes": "^4.2.1", + "chalk": "^2.4.2", + "cli-cursor": "^3.1.0", + "cli-width": "^2.0.0", + "external-editor": "^3.0.3", + "figures": "^3.0.0", + "lodash": "^4.17.15", + "mute-stream": "0.0.8", + "run-async": "^2.2.0", + "rxjs": "^6.5.3", + "string-width": "^4.1.0", + "strip-ansi": "^5.1.0", + "through": "^2.3.6" + } + }, + "internal-ip": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/internal-ip/-/internal-ip-4.3.0.tgz", + "integrity": "sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg==", + "requires": { + "default-gateway": "^4.2.0", + "ipaddr.js": "^1.9.0" + } + }, + "into-stream": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/into-stream/-/into-stream-3.1.0.tgz", + "integrity": "sha1-lvsKk2wSur1v8XUqF9BWFqvQlMY=", + "requires": { + "from2": "^2.1.1", + "p-is-promise": "^1.1.0" + }, + "dependencies": { + "p-is-promise": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/p-is-promise/-/p-is-promise-1.1.0.tgz", + "integrity": "sha1-nJRWmJ6fZYgBewQ01WCXZ1w9oF4=" + } + } + }, + "invariant": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", + "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", + "requires": { + "loose-envify": "^1.0.0" + } + }, + "invert-kv": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-2.0.0.tgz", + "integrity": "sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA==" + }, + "ip": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.5.tgz", + "integrity": "sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=" + }, + "ip-regex": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", + "integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=" + }, + "ipaddr.js": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.0.tgz", + "integrity": "sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA==" + }, + "is-absolute-url": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-absolute-url/-/is-absolute-url-3.0.3.tgz", + "integrity": "sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==" + }, + "is-accessor-descriptor": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", + "integrity": "sha1-qeEss66Nh2cn7u84Q/igiXtcmNY=", + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + }, + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "is-alphabetical": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.3.tgz", + "integrity": "sha512-eEMa6MKpHFzw38eKm56iNNi6GJ7lf6aLLio7Kr23sJPAECscgRtZvOBYybejWDQ2bM949Y++61PY+udzj5QMLA==" + }, + "is-alphanumeric": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-alphanumeric/-/is-alphanumeric-1.0.0.tgz", + "integrity": "sha1-Spzvcdr0wAHB2B1j0UDPU/1oifQ=" + }, + "is-alphanumerical": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-1.0.3.tgz", + "integrity": "sha512-A1IGAPO5AW9vSh7omxIlOGwIqEvpW/TA+DksVOPM5ODuxKlZS09+TEM1E3275lJqO2oJ38vDpeAL3DCIiHE6eA==", + "requires": { + "is-alphabetical": "^1.0.0", + "is-decimal": "^1.0.0" + } + }, + "is-arguments": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.0.4.tgz", + "integrity": "sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA==" + }, + "is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=" + }, + "is-binary-path": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz", + "integrity": "sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg=", + "requires": { + "binary-extensions": "^1.0.0" + } + }, + "is-buffer": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.4.tgz", + "integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==" + }, + "is-builtin-module": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-3.0.0.tgz", + "integrity": "sha512-/93sDihsAD652hrMEbJGbMAVBf1qc96kyThHQ0CAOONHaE3aROLpTjDe4WQ5aoC5ITHFxEq1z8XqSU7km+8amw==", + "requires": { + "builtin-modules": "^3.0.0" + } + }, + "is-callable": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==" + }, + "is-ci": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz", + "integrity": "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==", + "requires": { + "ci-info": "^2.0.0" + } + }, + "is-color-stop": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-color-stop/-/is-color-stop-1.1.0.tgz", + "integrity": "sha1-z/9HGu5N1cnhWFmPvhKWe1za00U=", + "requires": { + "css-color-names": "^0.0.4", + "hex-color-regex": "^1.1.0", + "hsl-regex": "^1.0.0", + "hsla-regex": "^1.0.0", + "rgb-regex": "^1.0.1", + "rgba-regex": "^1.0.0" + } + }, + "is-cwebp-readable": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-cwebp-readable/-/is-cwebp-readable-2.0.1.tgz", + "integrity": "sha1-r7k7DAq9CiUQEBauM66ort+SbSY=", + "requires": { + "file-type": "^4.3.0" + }, + "dependencies": { + "file-type": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-4.4.0.tgz", + "integrity": "sha1-G2AOX8ofvcboDApwxxyNul95BsU=" + } + } + }, + "is-data-descriptor": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", + "integrity": "sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y=", + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + }, + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "is-date-object": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", + "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==" + }, + "is-decimal": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.3.tgz", + "integrity": "sha512-bvLSwoDg2q6Gf+E2LEPiklHZxxiSi3XAh4Mav65mKqTfCO1HM3uBs24TjEH8iJX3bbDdLXKJXBTmGzuTUuAEjQ==" + }, + "is-descriptor": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz", + "integrity": "sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==", + "requires": { + "is-accessor-descriptor": "^0.1.6", + "is-data-descriptor": "^0.1.4", + "kind-of": "^5.0.0" + }, + "dependencies": { + "kind-of": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz", + "integrity": "sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==" + } + } + }, + "is-directory": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/is-directory/-/is-directory-0.3.1.tgz", + "integrity": "sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE=" + }, + "is-docker": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.0.0.tgz", + "integrity": "sha512-pJEdRugimx4fBMra5z2/5iRdZ63OhYV0vr0Dwm5+xtW4D1FvRkB8hamMIhnWfyJeDdyr/aa7BDyNbtG38VxgoQ==" + }, + "is-extendable": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", + "integrity": "sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=" + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=" + }, + "is-finite": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-finite/-/is-finite-1.0.2.tgz", + "integrity": "sha1-zGZ3aVYCvlUO8R6LSqYwU0K20Ko=", + "requires": { + "number-is-nan": "^1.0.0" + } + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" + }, + "is-function": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-function/-/is-function-1.0.1.tgz", + "integrity": "sha1-Es+5i2W1fdPRk6MSH19uL0N2ArU=" + }, + "is-glob": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", + "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-hexadecimal": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-1.0.3.tgz", + "integrity": "sha512-zxQ9//Q3D/34poZf8fiy3m3XVpbQc7ren15iKqrTtLPwkPD/t3Scy9Imp63FujULGxuK0ZlCwoo5xNpktFgbOA==" + }, + "is-installed-globally": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.1.0.tgz", + "integrity": "sha1-Df2Y9akRFxbdU13aZJL2e/PSWoA=", + "requires": { + "global-dirs": "^0.1.0", + "is-path-inside": "^1.0.0" + }, + "dependencies": { + "is-path-inside": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-1.0.1.tgz", + "integrity": "sha1-jvW33lBDej/cprToZe96pVy0gDY=", + "requires": { + "path-is-inside": "^1.0.1" + } + } + } + }, + "is-invalid-path": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-invalid-path/-/is-invalid-path-0.1.0.tgz", + "integrity": "sha1-MHqFWzzxqTi0TqcNLGEQYFNxTzQ=", + "requires": { + "is-glob": "^2.0.0" + }, + "dependencies": { + "is-extglob": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz", + "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=" + }, + "is-glob": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz", + "integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=", + "requires": { + "is-extglob": "^1.0.0" + } + } + } + }, + "is-jpg": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-jpg/-/is-jpg-2.0.0.tgz", + "integrity": "sha1-LhmX+m6RZuqsAkLarkQ0A+TvHZc=" + }, + "is-natural-number": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-natural-number/-/is-natural-number-4.0.1.tgz", + "integrity": "sha1-q5124dtM7VHjXeDHLr7PCfc0zeg=" + }, + "is-npm": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-npm/-/is-npm-3.0.0.tgz", + "integrity": "sha512-wsigDr1Kkschp2opC4G3yA6r9EgVA6NjRpWzIi9axXqeIaAATPRJc4uLujXe3Nd9uO8KoDyA4MD6aZSeXTADhA==" + }, + "is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=", + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + }, + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "is-obj": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", + "integrity": "sha1-PkcprB9f3gJc19g6iW2rn09n2w8=" + }, + "is-object": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-object/-/is-object-1.0.1.tgz", + "integrity": "sha1-iVJojF7C/9awPsyF52ngKQMINHA=" + }, + "is-path-cwd": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz", + "integrity": "sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==" + }, + "is-path-in-cwd": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz", + "integrity": "sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ==", + "requires": { + "is-path-inside": "^2.1.0" + }, + "dependencies": { + "is-path-inside": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-2.1.0.tgz", + "integrity": "sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg==", + "requires": { + "path-is-inside": "^1.0.2" + } + } + } + }, + "is-path-inside": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.2.tgz", + "integrity": "sha512-/2UGPSgmtqwo1ktx8NDHjuPwZWmHhO+gj0f93EkhLB5RgW9RZevWYYlIkS6zePc6U2WpOdQYIwHe9YC4DWEBVg==" + }, + "is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha1-caUMhCnfync8kqOQpKA7OfzVHT4=" + }, + "is-plain-object": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "requires": { + "isobject": "^3.0.1" + } + }, + "is-png": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-png/-/is-png-1.1.0.tgz", + "integrity": "sha1-1XSxK/J1wDUEVVcLDltXqwYgd84=" + }, + "is-promise": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", + "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=" + }, + "is-regex": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", + "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "requires": { + "has": "^1.0.3" + } + }, + "is-regexp": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-regexp/-/is-regexp-1.0.0.tgz", + "integrity": "sha1-/S2INUXEa6xaYz57mgnof6LLUGk=" + }, + "is-relative": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-relative/-/is-relative-1.0.0.tgz", + "integrity": "sha512-Kw/ReK0iqwKeu0MITLFuj0jbPAmEiOsIwyIXvvbfa6QfmN9pkD1M+8pdk7Rl/dTKbH34/XBFMbgD4iMJhLQbGA==", + "requires": { + "is-unc-path": "^1.0.0" + } + }, + "is-relative-url": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-relative-url/-/is-relative-url-3.0.0.tgz", + "integrity": "sha512-U1iSYRlY2GIMGuZx7gezlB5dp1Kheaym7zKzO1PV06mOihiWTXejLwm4poEJysPyXF+HtK/BEd0DVlcCh30pEA==", + "requires": { + "is-absolute-url": "^3.0.0" + } + }, + "is-resolvable": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-resolvable/-/is-resolvable-1.1.0.tgz", + "integrity": "sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg==" + }, + "is-retry-allowed": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz", + "integrity": "sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==" + }, + "is-root": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-root/-/is-root-1.0.0.tgz", + "integrity": "sha1-B7bCM7w5TNnQK6FclmvWZg1jQtU=" + }, + "is-ssh": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/is-ssh/-/is-ssh-1.3.1.tgz", + "integrity": "sha512-0eRIASHZt1E68/ixClI8bp2YK2wmBPVWEismTs6M+M099jKgrzl/3E976zIbImSIob48N2/XGe9y7ZiYdImSlg==", + "requires": { + "protocols": "^1.1.0" + } + }, + "is-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=" + }, + "is-string": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", + "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==" + }, + "is-svg": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-svg/-/is-svg-3.0.0.tgz", + "integrity": "sha512-gi4iHK53LR2ujhLVVj+37Ykh9GLqYHX6JOVXbLAucaG/Cqw9xwdFOjDM2qeifLs1sF1npXXFvDu0r5HNgCMrzQ==", + "requires": { + "html-comment-regex": "^1.1.0" + } + }, + "is-symbol": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", + "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "requires": { + "has-symbols": "^1.0.1" + } + }, + "is-text-path": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-text-path/-/is-text-path-1.0.1.tgz", + "integrity": "sha1-Thqg+1G/vLPpJogAE5cgLBd1tm4=", + "requires": { + "text-extensions": "^1.0.0" + } + }, + "is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" + }, + "is-unc-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-unc-path/-/is-unc-path-1.0.0.tgz", + "integrity": "sha512-mrGpVd0fs7WWLfVsStvgF6iEJnbjDFZh9/emhRDcGWTduTfNHd9CHeUwH3gYIjdbwo4On6hunkztwOaAw0yllQ==", + "requires": { + "unc-path-regex": "^0.1.2" + } + }, + "is-utf8": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz", + "integrity": "sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI=" + }, + "is-valid-path": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/is-valid-path/-/is-valid-path-0.1.1.tgz", + "integrity": "sha1-EQ+f90w39mPh7HkV60UfLbk6yd8=", + "requires": { + "is-invalid-path": "^0.1.0" + } + }, + "is-what": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/is-what/-/is-what-3.5.0.tgz", + "integrity": "sha512-00pwt/Jf7IaRh5m2Dp93Iw8LG2cd3OpDj3NrD1XPNUpAWVxPvBP296p4IiGmIU4Ur0f3f56IoIM+fS2pFYF+tQ==" + }, + "is-whitespace-character": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-whitespace-character/-/is-whitespace-character-1.0.3.tgz", + "integrity": "sha512-SNPgMLz9JzPccD3nPctcj8sZlX9DAMJSKH8bP7Z6bohCwuNgX8xbWr1eTAYXX9Vpi/aSn8Y1akL9WgM3t43YNQ==" + }, + "is-windows": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", + "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==" + }, + "is-word-character": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-word-character/-/is-word-character-1.0.3.tgz", + "integrity": "sha512-0wfcrFgOOOBdgRNT9H33xe6Zi6yhX/uoc4U8NBZGeQQB0ctU1dnlNTyL9JM2646bHDTpsDm1Brb3VPoCIMrd/A==" + }, + "is-wsl": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.1.1.tgz", + "integrity": "sha512-umZHcSrwlDHo2TGMXv0DZ8dIUGunZ2Iv68YZnrmCiBPkZ4aaOhtv7pXJKeki9k3qJ3RJr0cDyitcl5wEH3AYog==" + }, + "is-yarn-global": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.3.0.tgz", + "integrity": "sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw==" + }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" + }, + "isobject": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=" + }, + "isomorphic-fetch": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/isomorphic-fetch/-/isomorphic-fetch-2.2.1.tgz", + "integrity": "sha1-YRrhrPFPXoH3KVB0coGf6XM1WKk=", + "requires": { + "node-fetch": "^1.0.1", + "whatwg-fetch": ">=0.10.0" + } + }, + "isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" + }, + "isurl": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isurl/-/isurl-1.0.0.tgz", + "integrity": "sha512-1P/yWsxPlDtn7QeRD+ULKQPaIaN6yF368GZ2vDfv0AL0NwpStafjWCDDdn0k8wgFMWpVAqG7oJhxHnlud42i9w==", + "requires": { + "has-to-string-tag-x": "^1.2.0", + "is-object": "^1.0.1" + } + }, + "iterall": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/iterall/-/iterall-1.3.0.tgz", + "integrity": "sha512-QZ9qOMdF+QLHxy1QIpUHUU1D5pS2CG2P69LF6L6CPjPYA/XMOmKV3PZpawHoAjHNyB0swdVTRxdYT4tbBbxqwg==" + }, + "jest-worker": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-24.9.0.tgz", + "integrity": "sha512-51PE4haMSXcHohnSMdM42anbvZANYTqMrr52tVKPqqsPJMzoP6FYYDVqahX/HrAoKEKz3uUPzSvKs9A3qR4iVw==", + "requires": { + "merge-stream": "^2.0.0", + "supports-color": "^6.1.0" + }, + "dependencies": { + "supports-color": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", + "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", + "requires": { + "has-flag": "^3.0.0" + } + } + } + }, + "jimp": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/jimp/-/jimp-0.6.8.tgz", + "integrity": "sha512-F7emeG7Hp61IM8VFbNvWENLTuHe0ghizWPuP4JS9ujx2r5mCVYEd/zdaz6M2M42ZdN41blxPajLWl9FXo7Mr2Q==", + "requires": { + "@jimp/custom": "^0.6.8", + "@jimp/plugins": "^0.6.8", + "@jimp/types": "^0.6.8", + "core-js": "^2.5.7", + "regenerator-runtime": "^0.13.3" + } + }, + "jpeg-js": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/jpeg-js/-/jpeg-js-0.3.6.tgz", + "integrity": "sha512-MUj2XlMB8kpe+8DJUGH/3UJm4XpI8XEgZQ+CiHDeyrGoKPdW/8FJv6ku+3UiYm5Fz3CWaL+iXmD8Q4Ap6aC1Jw==" + }, + "js-levenshtein": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/js-levenshtein/-/js-levenshtein-1.1.6.tgz", + "integrity": "sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g==" + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + }, + "js-yaml": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, + "jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" + }, + "jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==" + }, + "json-buffer": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz", + "integrity": "sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg=" + }, + "json-loader": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/json-loader/-/json-loader-0.5.7.tgz", + "integrity": "sha512-QLPs8Dj7lnf3e3QYS1zkCo+4ZwqOiF9d/nZnYozTISxXWCfNs9yuky5rJw4/W34s7POaNlbZmQGaB5NiXCbP4w==" + }, + "json-parse-better-errors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==" + }, + "json-schema": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", + "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, + "json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=" + }, + "json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" + }, + "json3": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/json3/-/json3-3.3.3.tgz", + "integrity": "sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA==" + }, + "json5": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.1.1.tgz", + "integrity": "sha512-l+3HXD0GEI3huGq1njuqtzYK8OYJyXMkOLtQ53pjWh89tvWS2h6l+1zMkYWqlb57+SiQodKZyvMEFb2X+KrFhQ==", + "requires": { + "minimist": "^1.2.0" + }, + "dependencies": { + "minimist": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=" + } + } + }, + "jsonfile": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", + "integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=", + "requires": { + "graceful-fs": "^4.1.6" + } + }, + "jsonify": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/jsonify/-/jsonify-0.0.0.tgz", + "integrity": "sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM=" + }, + "jsprim": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", + "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "requires": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.2.3", + "verror": "1.10.0" + } + }, + "jsx-ast-utils": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-2.2.3.tgz", + "integrity": "sha512-EdIHFMm+1BPynpKOpdPqiOsvnIrInRGJD7bzPZdPkjitQEqpdpUuFpq4T0npZFKTiB3RhWFdGN+oqOJIdhDhQA==", + "requires": { + "array-includes": "^3.0.3", + "object.assign": "^4.1.0" + } + }, + "keyv": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-3.1.0.tgz", + "integrity": "sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA==", + "requires": { + "json-buffer": "3.0.0" + } + }, + "killable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/killable/-/killable-1.0.1.tgz", + "integrity": "sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg==" + }, + "kind-of": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.2.tgz", + "integrity": "sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA==" + }, + "kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==" + }, + "last-call-webpack-plugin": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/last-call-webpack-plugin/-/last-call-webpack-plugin-3.0.0.tgz", + "integrity": "sha512-7KI2l2GIZa9p2spzPIVZBYyNKkN+e/SQPpnjlTiPhdbDW3F86tdKKELxKpzJ5sgU19wQWsACULZmpTPYHeWO5w==", + "requires": { + "lodash": "^4.17.5", + "webpack-sources": "^1.1.0" + } + }, + "latest-version": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-5.1.0.tgz", + "integrity": "sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA==", + "requires": { + "package-json": "^6.3.0" + } + }, + "lcid": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/lcid/-/lcid-2.0.0.tgz", + "integrity": "sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA==", + "requires": { + "invert-kv": "^2.0.0" + } + }, + "leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==" + }, + "levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", + "requires": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + } + }, + "lines-and-columns": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz", + "integrity": "sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=" + }, + "load-bmfont": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/load-bmfont/-/load-bmfont-1.4.0.tgz", + "integrity": "sha512-kT63aTAlNhZARowaNYcY29Fn/QYkc52M3l6V1ifRcPewg2lvUZDAj7R6dXjOL9D0sict76op3T5+odumDSF81g==", + "requires": { + "buffer-equal": "0.0.1", + "mime": "^1.3.4", + "parse-bmfont-ascii": "^1.0.3", + "parse-bmfont-binary": "^1.0.5", + "parse-bmfont-xml": "^1.1.4", + "phin": "^2.9.1", + "xhr": "^2.0.1", + "xtend": "^4.0.0" + }, + "dependencies": { + "mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" + } + } + }, + "load-json-file": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", + "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^2.2.0", + "pify": "^2.0.0", + "strip-bom": "^3.0.0" + }, + "dependencies": { + "parse-json": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "requires": { + "error-ex": "^1.2.0" + } + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=" + } + } + }, + "loader-fs-cache": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/loader-fs-cache/-/loader-fs-cache-1.0.2.tgz", + "integrity": "sha512-70IzT/0/L+M20jUlEqZhZyArTU6VKLRTYRDAYN26g4jfzpJqjipLL3/hgYpySqI9PwsVRHHFja0LfEmsx9X2Cw==", + "requires": { + "find-cache-dir": "^0.1.1", + "mkdirp": "0.5.1" + }, + "dependencies": { + "find-cache-dir": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-0.1.1.tgz", + "integrity": "sha1-yN765XyKUqinhPnjHFfHQumToLk=", + "requires": { + "commondir": "^1.0.1", + "mkdirp": "^0.5.1", + "pkg-dir": "^1.0.0" + } + }, + "find-up": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz", + "integrity": "sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8=", + "requires": { + "path-exists": "^2.0.0", + "pinkie-promise": "^2.0.0" + } + }, + "path-exists": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz", + "integrity": "sha1-D+tsZPD8UY2adU3V77YscCJ2H0s=", + "requires": { + "pinkie-promise": "^2.0.0" + } + }, + "pkg-dir": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-1.0.0.tgz", + "integrity": "sha1-ektQio1bstYp1EcFb/TpyTFM89Q=", + "requires": { + "find-up": "^1.0.0" + } + } + } + }, + "loader-runner": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-2.4.0.tgz", + "integrity": "sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw==" + }, + "loader-utils": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.2.3.tgz", + "integrity": "sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA==", + "requires": { + "big.js": "^5.2.2", + "emojis-list": "^2.0.0", + "json5": "^1.0.1" + }, + "dependencies": { + "json5": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", + "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", + "requires": { + "minimist": "^1.2.0" + } + }, + "minimist": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=" + } + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "lockfile": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/lockfile/-/lockfile-1.0.4.tgz", + "integrity": "sha512-cvbTwETRfsFh4nHsL1eGWapU1XFi5Ot9E85sWAwia7Y7EgB7vfqcZhTKZ+l7hCGxSPoushMv5GKhT5PdLv03WA==", + "requires": { + "signal-exit": "^3.0.2" + } + }, + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" + }, + "lodash._reinterpolate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz", + "integrity": "sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0=" + }, + "lodash.clonedeep": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", + "integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=" + }, + "lodash.escaperegexp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.escaperegexp/-/lodash.escaperegexp-4.1.2.tgz", + "integrity": "sha1-ZHYsSGGAglGKw99Mz11YhtriA0c=" + }, + "lodash.every": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/lodash.every/-/lodash.every-4.6.0.tgz", + "integrity": "sha1-64mYS+vENkJ5uzrvu9HKGb+mxqc=" + }, + "lodash.flattendeep": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz", + "integrity": "sha1-+wMJF/hqMTTlvJvsDWngAT3f7bI=" + }, + "lodash.foreach": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.foreach/-/lodash.foreach-4.5.0.tgz", + "integrity": "sha1-Gmo16s5AEoDH8G3d7DUWWrJ+PlM=" + }, + "lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=" + }, + "lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha1-1SfftUVuynzJu5XV2ur4i6VKVFE=" + }, + "lodash.map": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/lodash.map/-/lodash.map-4.6.0.tgz", + "integrity": "sha1-dx7Hg540c9nEzeKLGTlMNWL09tM=" + }, + "lodash.maxby": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/lodash.maxby/-/lodash.maxby-4.6.0.tgz", + "integrity": "sha1-CCJABo88eiJ6oAqDgOTzjPB4bj0=" + }, + "lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4=" + }, + "lodash.mergewith": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.mergewith/-/lodash.mergewith-4.6.2.tgz", + "integrity": "sha512-GK3g5RPZWTRSeLSpgP8Xhra+pnjBC56q9FZYe1d5RN3TJ35dbkGy3YqBSMbyCrlbi+CM9Z3Jk5yTL7RCsqboyQ==" + }, + "lodash.template": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.template/-/lodash.template-4.5.0.tgz", + "integrity": "sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A==", + "requires": { + "lodash._reinterpolate": "^3.0.0", + "lodash.templatesettings": "^4.0.0" + } + }, + "lodash.templatesettings": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/lodash.templatesettings/-/lodash.templatesettings-4.2.0.tgz", + "integrity": "sha512-stgLz+i3Aa9mZgnjr/O+v9ruKZsPsndy7qPZOchbqk2cnTU1ZaldKK+v7m54WoKIyxiuMZTKT2H81F8BeAc3ZQ==", + "requires": { + "lodash._reinterpolate": "^3.0.0" + } + }, + "lodash.throttle": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.throttle/-/lodash.throttle-4.1.1.tgz", + "integrity": "sha1-wj6RtxAkKscMN/HhzaknTMOb8vQ=", + "optional": true + }, + "lodash.toarray": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/lodash.toarray/-/lodash.toarray-4.4.0.tgz", + "integrity": "sha1-JMS/zWsvuji/0FlNsRedjptlZWE=" + }, + "lodash.unescape": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.unescape/-/lodash.unescape-4.0.1.tgz", + "integrity": "sha1-vyJJiGzlFM2hEvrpIYzcBlIR/Jw=" + }, + "lodash.uniq": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", + "integrity": "sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=" + }, + "log-update": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-3.3.0.tgz", + "integrity": "sha512-YSKm5n+YjZoGZT5lfmOqasVH1fIH9xQA9A81Y48nZ99PxAP62vdCCtua+Gcu6oTn0nqtZd/LwRV+Vflo53ZDWA==", + "optional": true, + "requires": { + "ansi-escapes": "^3.2.0", + "cli-cursor": "^2.1.0", + "wrap-ansi": "^5.0.0" + }, + "dependencies": { + "ansi-escapes": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", + "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", + "optional": true + }, + "cli-cursor": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", + "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "optional": true, + "requires": { + "restore-cursor": "^2.0.0" + } + }, + "mimic-fn": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", + "optional": true + }, + "onetime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", + "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "optional": true, + "requires": { + "mimic-fn": "^1.0.0" + } + }, + "restore-cursor": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", + "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "optional": true, + "requires": { + "onetime": "^2.0.0", + "signal-exit": "^3.0.2" + } + } + } + }, + "logalot": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/logalot/-/logalot-2.1.0.tgz", + "integrity": "sha1-X46MkNME7fElMJUaVVSruMXj9VI=", + "requires": { + "figures": "^1.3.5", + "squeak": "^1.0.0" + }, + "dependencies": { + "figures": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-1.7.0.tgz", + "integrity": "sha1-y+Hjr/zxzUS4DK3+0o3Hk6lwHS4=", + "requires": { + "escape-string-regexp": "^1.0.5", + "object-assign": "^4.1.0" + } + } + } + }, + "loglevel": { + "version": "1.6.6", + "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.6.6.tgz", + "integrity": "sha512-Sgr5lbboAUBo3eXCSPL4/KoVz3ROKquOjcctxmHIt+vol2DrqTQe3SwkKKuYhEiWB5kYa13YyopJ69deJ1irzQ==" + }, + "lokijs": { + "version": "1.5.8", + "resolved": "https://registry.npmjs.org/lokijs/-/lokijs-1.5.8.tgz", + "integrity": "sha512-D8E3TBrY35o1ELnonp2MF8b3wKu2tVNl2TqRjvS+95oPMMe7OoIAxNY1qr+5BEZwnWn2V4ErAjVt000DonM+FA==" + }, + "longest": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz", + "integrity": "sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc=" + }, + "longest-streak": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-2.0.3.tgz", + "integrity": "sha512-9lz5IVdpwsKLMzQi0MQ+oD9EA0mIGcWYP7jXMTZVXP8D42PwuAk+M/HBFYQoxt1G5OR8m7aSIgb1UymfWGBWEw==" + }, + "loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "requires": { + "js-tokens": "^3.0.0 || ^4.0.0" + } + }, + "loud-rejection": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/loud-rejection/-/loud-rejection-2.2.0.tgz", + "integrity": "sha512-S0FayMXku80toa5sZ6Ro4C+s+EtFDCsyJNG/AzFMfX3AxD5Si4dZsgzm/kKnbOxHl5Cv8jBlno8+3XYIh2pNjQ==", + "requires": { + "currently-unhandled": "^0.4.1", + "signal-exit": "^3.0.2" + } + }, + "lowercase-keys": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", + "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==" + }, + "lpad-align": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/lpad-align/-/lpad-align-1.1.2.tgz", + "integrity": "sha1-IfYArBwwlcPG5JfuZyce4ISB/p4=", + "requires": { + "get-stdin": "^4.0.1", + "indent-string": "^2.1.0", + "longest": "^1.0.0", + "meow": "^3.3.0" + }, + "dependencies": { + "indent-string": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-2.1.0.tgz", + "integrity": "sha1-ji1INIdCEhtKghi3oTfppSBJ3IA=", + "requires": { + "repeating": "^2.0.0" + } + } + } + }, + "lru-cache": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.0.0.tgz", + "integrity": "sha1-tcvwFVbBaWb+vlTO7A+03JDfbCg=", + "requires": { + "pseudomap": "^1.0.1", + "yallist": "^2.0.0" + } + }, + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "mamacro": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/mamacro/-/mamacro-0.0.3.tgz", + "integrity": "sha512-qMEwh+UujcQ+kbz3T6V+wAmO2U8veoq2w+3wY8MquqwVA3jChfwY+Tk52GZKDfACEPjuZ7r2oJLejwpt8jtwTA==" + }, + "map-age-cleaner": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz", + "integrity": "sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w==", + "requires": { + "p-defer": "^1.0.0" + } + }, + "map-cache": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz", + "integrity": "sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8=" + }, + "map-obj": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", + "integrity": "sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0=" + }, + "map-visit": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/map-visit/-/map-visit-1.0.0.tgz", + "integrity": "sha1-7Nyo8TFE5mDxtb1B8S80edmN+48=", + "requires": { + "object-visit": "^1.0.0" + } + }, + "markdown-escapes": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/markdown-escapes/-/markdown-escapes-1.0.3.tgz", + "integrity": "sha512-XUi5HJhhV5R74k8/0H2oCbCiYf/u4cO/rX8tnGkRvrqhsr5BRNU6Mg0yt/8UIx1iIS8220BNJsDb7XnILhLepw==" + }, + "markdown-table": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-1.1.3.tgz", + "integrity": "sha512-1RUZVgQlpJSPWYbFSpmudq5nHY1doEIv89gBtF0s4gW1GF2XorxcA/70M5vq7rLv0a6mhOUccRsqkwhwLCIQ2Q==" + }, + "md5": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/md5/-/md5-2.2.1.tgz", + "integrity": "sha1-U6s41f48iJG6RlMp6iP6wFQBJvk=", + "requires": { + "charenc": "~0.0.1", + "crypt": "~0.0.1", + "is-buffer": "~1.1.1" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + } + } + }, + "md5-file": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/md5-file/-/md5-file-3.2.3.tgz", + "integrity": "sha512-3Tkp1piAHaworfcCgH0jKbTvj1jWWFgbvh2cXaNCgHwyTCBxxvD1Y04rmfpvdPm1P4oXMOpm6+2H7sr7v9v8Fw==", + "requires": { + "buffer-alloc": "^1.1.0" + } + }, + "md5.js": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", + "integrity": "sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==", + "requires": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1", + "safe-buffer": "^5.1.2" + } + }, + "mdast-util-compact": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mdast-util-compact/-/mdast-util-compact-1.0.4.tgz", + "integrity": "sha512-3YDMQHI5vRiS2uygEFYaqckibpJtKq5Sj2c8JioeOQBU6INpKbdWzfyLqFFnDwEcEnRFIdMsguzs5pC1Jp4Isg==", + "requires": { + "unist-util-visit": "^1.1.0" + } + }, + "mdast-util-definitions": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-1.2.5.tgz", + "integrity": "sha512-CJXEdoLfiISCDc2JB6QLb79pYfI6+GcIH+W2ox9nMc7od0Pz+bovcHsiq29xAQY6ayqe/9CsK2VzkSJdg1pFYA==", + "requires": { + "unist-util-visit": "^1.0.0" + } + }, + "mdast-util-to-hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-3.0.4.tgz", + "integrity": "sha512-/eIbly2YmyVgpJNo+bFLLMCI1XgolO/Ffowhf+pHDq3X4/V6FntC9sGQCDLM147eTS+uSXv5dRzJyFn+o0tazA==", + "requires": { + "collapse-white-space": "^1.0.0", + "detab": "^2.0.0", + "mdast-util-definitions": "^1.2.0", + "mdurl": "^1.0.1", + "trim": "0.0.1", + "trim-lines": "^1.0.0", + "unist-builder": "^1.0.1", + "unist-util-generated": "^1.1.0", + "unist-util-position": "^3.0.0", + "unist-util-visit": "^1.1.0", + "xtend": "^4.0.1" + } + }, + "mdast-util-to-nlcst": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/mdast-util-to-nlcst/-/mdast-util-to-nlcst-3.2.3.tgz", + "integrity": "sha512-hPIsgEg7zCvdU6/qvjcR6lCmJeRuIEpZGY5xBV+pqzuMOvQajyyF8b6f24f8k3Rw8u40GwkI3aAxUXr3bB2xag==", + "requires": { + "nlcst-to-string": "^2.0.0", + "repeat-string": "^1.5.2", + "unist-util-position": "^3.0.0", + "vfile-location": "^2.0.0" + } + }, + "mdast-util-to-string": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-1.0.7.tgz", + "integrity": "sha512-P+gdtssCoHOX+eJUrrC30Sixqao86ZPlVjR5NEAoy0U79Pfxb1Y0Gntei0+GrnQD4T04X9xA8tcugp90cSmNow==" + }, + "mdast-util-toc": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-toc/-/mdast-util-toc-5.0.0.tgz", + "integrity": "sha512-1ExJKC+85/+Q2LfuASOjdGTB7n/ikQperjiv+7OEVCpRbabr/DGZzEXEZfsZr/k4Pd3g/Gim9DV44/rPjczMAw==", + "requires": { + "@types/mdast": "^3.0.3", + "@types/unist": "^2.0.3", + "extend": "^3.0.2", + "github-slugger": "^1.2.1", + "mdast-util-to-string": "^1.0.5", + "unist-util-is": "^4.0.0", + "unist-util-visit": "^2.0.0" + }, + "dependencies": { + "unist-util-is": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.0.1.tgz", + "integrity": "sha512-7NYjErP4LJtkEptPR22wO5RsCPnHZZrop7t2SoQzjvpFedCFer4WW8ujj9GI5DkUX7yVcffXLjoURf6h2QUv6Q==" + }, + "unist-util-visit": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.1.tgz", + "integrity": "sha512-bEDa5S/O8WRDeI1mLaMoKuFFi89AjF+UAoMNxO+bbVdo06q+53Vhq4iiv1PenL6Rx1ZxIpXIzqZoc5HD2I1oMA==", + "requires": { + "@types/unist": "^2.0.0", + "unist-util-is": "^4.0.0", + "unist-util-visit-parents": "^3.0.0" + } + }, + "unist-util-visit-parents": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.0.1.tgz", + "integrity": "sha512-umEOTkm6/y1gIqPrqet55mYqlvGXCia/v1FSc5AveLAI7jFmOAIbqiwcHcviLcusAkEQt1bq2hixCKO9ltMb2Q==", + "requires": { + "@types/unist": "^2.0.0", + "unist-util-is": "^4.0.0" + } + } + } + }, + "mdn-data": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.4.tgz", + "integrity": "sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA==" + }, + "mdurl": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", + "integrity": "sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=" + }, + "meant": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/meant/-/meant-1.0.1.tgz", + "integrity": "sha512-UakVLFjKkbbUwNWJ2frVLnnAtbb7D7DsloxRd3s/gDpI8rdv8W5Hp3NaDb+POBI1fQdeussER6NB8vpcRURvlg==" + }, + "media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" + }, + "mem": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/mem/-/mem-4.3.0.tgz", + "integrity": "sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w==", + "requires": { + "map-age-cleaner": "^0.1.1", + "mimic-fn": "^2.0.0", + "p-is-promise": "^2.0.0" + } + }, + "memoize-one": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/memoize-one/-/memoize-one-5.1.1.tgz", + "integrity": "sha512-HKeeBpWvqiVJD57ZUAsJNm71eHTykffzcLZVYWiVfQeI1rJtuEaS7hQiEpWfVVk18donPwJEcFKIkCmPJNOhHA==" + }, + "memory-fs": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/memory-fs/-/memory-fs-0.4.1.tgz", + "integrity": "sha1-OpoguEYlI+RHz7x+i7gO1me/xVI=", + "requires": { + "errno": "^0.1.3", + "readable-stream": "^2.0.1" + } + }, + "meow": { + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/meow/-/meow-3.7.0.tgz", + "integrity": "sha1-cstmi0JSKCkKu/qFaJJYcwioAfs=", + "requires": { + "camelcase-keys": "^2.0.0", + "decamelize": "^1.1.2", + "loud-rejection": "^1.0.0", + "map-obj": "^1.0.1", + "minimist": "^1.1.3", + "normalize-package-data": "^2.3.4", + "object-assign": "^4.0.1", + "read-pkg-up": "^1.0.1", + "redent": "^1.0.0", + "trim-newlines": "^1.0.0" + }, + "dependencies": { + "find-up": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz", + "integrity": "sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8=", + "requires": { + "path-exists": "^2.0.0", + "pinkie-promise": "^2.0.0" + } + }, + "load-json-file": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz", + "integrity": "sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA=", + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^2.2.0", + "pify": "^2.0.0", + "pinkie-promise": "^2.0.0", + "strip-bom": "^2.0.0" + } + }, + "loud-rejection": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/loud-rejection/-/loud-rejection-1.6.0.tgz", + "integrity": "sha1-W0b4AUft7leIcPCG0Eghz5mOVR8=", + "requires": { + "currently-unhandled": "^0.4.1", + "signal-exit": "^3.0.0" + } + }, + "minimist": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=" + }, + "parse-json": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "requires": { + "error-ex": "^1.2.0" + } + }, + "path-exists": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz", + "integrity": "sha1-D+tsZPD8UY2adU3V77YscCJ2H0s=", + "requires": { + "pinkie-promise": "^2.0.0" + } + }, + "path-type": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-1.1.0.tgz", + "integrity": "sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE=", + "requires": { + "graceful-fs": "^4.1.2", + "pify": "^2.0.0", + "pinkie-promise": "^2.0.0" + } + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=" + }, + "read-pkg": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-1.1.0.tgz", + "integrity": "sha1-9f+qXs0pyzHAR0vKfXVra7KePyg=", + "requires": { + "load-json-file": "^1.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^1.0.0" + } + }, + "read-pkg-up": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-1.0.1.tgz", + "integrity": "sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI=", + "requires": { + "find-up": "^1.0.0", + "read-pkg": "^1.0.0" + } + }, + "strip-bom": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz", + "integrity": "sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4=", + "requires": { + "is-utf8": "^0.2.0" + } + } + } + }, + "merge-anything": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/merge-anything/-/merge-anything-2.4.4.tgz", + "integrity": "sha512-l5XlriUDJKQT12bH+rVhAHjwIuXWdAIecGwsYjv2LJo+dA1AeRTmeQS+3QBpO6lEthBMDi2IUMpLC1yyRvGlwQ==", + "requires": { + "is-what": "^3.3.1" + } + }, + "merge-descriptors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" + }, + "merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==" + }, + "merge2": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.3.0.tgz", + "integrity": "sha512-2j4DAdlBOkiSZIsaXk4mTE3sRS02yBHAtfy127xRV3bQUFqXkjHCHLW6Scv7DwNRbIWNHH8zpnz9zMaKXIdvYw==" + }, + "methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" + }, + "micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + } + }, + "miller-rabin": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/miller-rabin/-/miller-rabin-4.0.1.tgz", + "integrity": "sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==", + "requires": { + "bn.js": "^4.0.0", + "brorand": "^1.0.1" + } + }, + "mime": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", + "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==" + }, + "mime-db": { + "version": "1.43.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.43.0.tgz", + "integrity": "sha512-+5dsGEEovYbT8UY9yD7eE4XTc4UwJ1jBYlgaQQF38ENsKR3wj/8q8RFZrF9WIZpB2V1ArTVFUva8sAul1NzRzQ==" + }, + "mime-types": { + "version": "2.1.26", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz", + "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==", + "requires": { + "mime-db": "1.43.0" + } + }, + "mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==" + }, + "mimic-response": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", + "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==" + }, + "min-document": { + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz", + "integrity": "sha1-e9KC4/WELtKVu3SM3Z8f+iyCRoU=", + "requires": { + "dom-walk": "^0.1.0" + } + }, + "mini-css-extract-plugin": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-0.8.2.tgz", + "integrity": "sha512-a3Y4of27Wz+mqK3qrcd3VhYz6cU0iW5x3Sgvqzbj+XmlrSizmvu8QQMl5oMYJjgHOC4iyt+w7l4umP+dQeW3bw==", + "requires": { + "loader-utils": "^1.1.0", + "normalize-url": "1.9.1", + "schema-utils": "^1.0.0", + "webpack-sources": "^1.1.0" + }, + "dependencies": { + "normalize-url": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-1.9.1.tgz", + "integrity": "sha1-LMDWazHqIwNkWENuNiDYWVTGbDw=", + "requires": { + "object-assign": "^4.0.1", + "prepend-http": "^1.0.0", + "query-string": "^4.1.0", + "sort-keys": "^1.0.0" + } + }, + "prepend-http": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-1.0.4.tgz", + "integrity": "sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw=" + }, + "query-string": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/query-string/-/query-string-4.3.4.tgz", + "integrity": "sha1-u7aTucqRXCMlFbIosaArYJBD2+s=", + "requires": { + "object-assign": "^4.1.0", + "strict-uri-encode": "^1.0.0" + } + }, + "schema-utils": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", + "integrity": "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==", + "requires": { + "ajv": "^6.1.0", + "ajv-errors": "^1.0.0", + "ajv-keywords": "^3.1.0" + } + }, + "sort-keys": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-1.1.2.tgz", + "integrity": "sha1-RBttTTRnmPG05J6JIK37oOVD+a0=", + "requires": { + "is-plain-obj": "^1.0.0" + } + } + } + }, + "mini-svg-data-uri": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/mini-svg-data-uri/-/mini-svg-data-uri-1.1.3.tgz", + "integrity": "sha512-EeKOmdzekjdPe53/GdxmUpNgDQFkNeSte6XkJmOBt4BfWL6FQ9G9RtLNh+JMjFS3LhdpSICMIkZdznjiecASHQ==" + }, + "minimalistic-assert": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", + "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==" + }, + "minimalistic-crypto-utils": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz", + "integrity": "sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo=" + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" + }, + "minipass": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.1.tgz", + "integrity": "sha512-UFqVihv6PQgwj8/yTGvl9kPz7xIAY+R5z6XYjRInD3Gk3qx6QGSD6zEcpeG4Dy/lQnv1J6zv8ejV90hyYIKf3w==", + "requires": { + "yallist": "^4.0.0" + }, + "dependencies": { + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } + }, + "minizlib": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.0.tgz", + "integrity": "sha512-EzTZN/fjSvifSX0SlqUERCN39o6T40AMarPbv0MrarSFtIITCBh7bi+dU8nxGFHuqs9jdIAeoYoKuQAAASsPPA==", + "requires": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "dependencies": { + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } + }, + "mississippi": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mississippi/-/mississippi-3.0.0.tgz", + "integrity": "sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA==", + "requires": { + "concat-stream": "^1.5.0", + "duplexify": "^3.4.2", + "end-of-stream": "^1.1.0", + "flush-write-stream": "^1.0.0", + "from2": "^2.1.0", + "parallel-transform": "^1.1.0", + "pump": "^3.0.0", + "pumpify": "^1.3.3", + "stream-each": "^1.1.0", + "through2": "^2.0.0" + } + }, + "mitt": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mitt/-/mitt-1.2.0.tgz", + "integrity": "sha512-r6lj77KlwqLhIUku9UWYes7KJtsczvolZkzp8hbaDPPaE24OmWl5s539Mytlj22siEQKosZ26qCBgda2PKwoJw==" + }, + "mixin-deep": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz", + "integrity": "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==", + "requires": { + "for-in": "^1.0.2", + "is-extendable": "^1.0.1" + }, + "dependencies": { + "is-extendable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", + "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "requires": { + "is-plain-object": "^2.0.4" + } + } + } + }, + "mkdirp": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "requires": { + "minimist": "0.0.8" + } + }, + "moment": { + "version": "2.24.0", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.24.0.tgz", + "integrity": "sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg==" + }, + "move-concurrently": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/move-concurrently/-/move-concurrently-1.0.1.tgz", + "integrity": "sha1-viwAX9oy4LKa8fBdfEszIUxwH5I=", + "requires": { + "aproba": "^1.1.1", + "copy-concurrently": "^1.0.0", + "fs-write-stream-atomic": "^1.0.8", + "mkdirp": "^0.5.1", + "rimraf": "^2.5.4", + "run-queue": "^1.0.3" + } + }, + "mozjpeg": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/mozjpeg/-/mozjpeg-6.0.1.tgz", + "integrity": "sha512-9Z59pJMi8ni+IUvSH5xQwK5tNLw7p3dwDNCZ3o1xE+of3G5Hc/yOz6Ue/YuLiBXU3ZB5oaHPURyPdqfBX/QYJA==", + "requires": { + "bin-build": "^3.0.0", + "bin-wrapper": "^4.0.0", + "logalot": "^2.1.0" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "multicast-dns": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/multicast-dns/-/multicast-dns-6.2.3.tgz", + "integrity": "sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g==", + "requires": { + "dns-packet": "^1.3.1", + "thunky": "^1.0.2" + } + }, + "multicast-dns-service-types": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz", + "integrity": "sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE=" + }, + "mute-stream": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==" + }, + "name-all-modules-plugin": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/name-all-modules-plugin/-/name-all-modules-plugin-1.0.1.tgz", + "integrity": "sha1-Cr+2rYNXGLn7Te8GdOBmV6lUN1w=" + }, + "nan": { + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", + "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" + }, + "nanomatch": { + "version": "1.2.13", + "resolved": "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz", + "integrity": "sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==", + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "fragment-cache": "^0.2.1", + "is-windows": "^1.0.2", + "kind-of": "^6.0.2", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + } + }, + "napi-build-utils": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-1.0.1.tgz", + "integrity": "sha512-boQj1WFgQH3v4clhu3mTNfP+vOBxorDlE8EKiMjUlLG3C4qAESnn9AxIOkFgTR2c9LtzNjPrjS60cT27ZKBhaA==" + }, + "natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=" + }, + "negotiator": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", + "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==" + }, + "neo-async": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.1.tgz", + "integrity": "sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw==" + }, + "neon-js": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/neon-js/-/neon-js-1.1.2.tgz", + "integrity": "sha1-r4XY4ruAmc/H9v4laolqVGSwBiM=" + }, + "next-tick": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.0.0.tgz", + "integrity": "sha1-yobR/ogoFpsBICCOPchCS524NCw=" + }, + "nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==" + }, + "nlcst-to-string": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/nlcst-to-string/-/nlcst-to-string-2.0.3.tgz", + "integrity": "sha512-OY2QhGdf6jpYfHqS4vJwqF7aIBZkaMjMUkcHcskMPitvXLuYNGdQvgVWI/5yKwkmIdmhft3ounSJv+Re2yydng==" + }, + "node-abi": { + "version": "2.13.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-2.13.0.tgz", + "integrity": "sha512-9HrZGFVTR5SOu3PZAnAY2hLO36aW1wmA+FDsVkr85BTST32TLCA1H/AEcatVRAsWLyXS3bqUDYCAjq5/QGuSTA==", + "requires": { + "semver": "^5.4.1" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "node-emoji": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/node-emoji/-/node-emoji-1.10.0.tgz", + "integrity": "sha512-Yt3384If5H6BYGVHiHwTL+99OzJKHhgp82S8/dktEK73T26BazdgZ4JZh92xSVtGNJvz9UbXdNAc5hcrXV42vw==", + "requires": { + "lodash.toarray": "^4.4.0" + } + }, + "node-eta": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/node-eta/-/node-eta-0.9.0.tgz", + "integrity": "sha1-n7CwmbzSoCGUDmA8ZCVNwAPZp6g=" + }, + "node-fetch": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-1.7.3.tgz", + "integrity": "sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ==", + "requires": { + "encoding": "^0.1.11", + "is-stream": "^1.0.1" + } + }, + "node-forge": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.9.0.tgz", + "integrity": "sha512-7ASaDa3pD+lJ3WvXFsxekJQelBKRpne+GOVbLbtHYdd7pFspyeuJHnWfLplGf3SwKGbfs/aYl5V/JCIaHVUKKQ==" + }, + "node-libs-browser": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/node-libs-browser/-/node-libs-browser-2.2.1.tgz", + "integrity": "sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q==", + "requires": { + "assert": "^1.1.1", + "browserify-zlib": "^0.2.0", + "buffer": "^4.3.0", + "console-browserify": "^1.1.0", + "constants-browserify": "^1.0.0", + "crypto-browserify": "^3.11.0", + "domain-browser": "^1.1.1", + "events": "^3.0.0", + "https-browserify": "^1.0.0", + "os-browserify": "^0.3.0", + "path-browserify": "0.0.1", + "process": "^0.11.10", + "punycode": "^1.2.4", + "querystring-es3": "^0.2.0", + "readable-stream": "^2.3.3", + "stream-browserify": "^2.0.1", + "stream-http": "^2.7.2", + "string_decoder": "^1.0.0", + "timers-browserify": "^2.0.4", + "tty-browserify": "0.0.0", + "url": "^0.11.0", + "util": "^0.11.0", + "vm-browserify": "^1.0.1" + }, + "dependencies": { + "punycode": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" + } + } + }, + "node-object-hash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/node-object-hash/-/node-object-hash-2.0.0.tgz", + "integrity": "sha512-VZR0zroAusy1ETZMZiGeLkdu50LGjG5U1KHZqTruqtTyQ2wfWhHG2Ow4nsUbfTFGlaREgNHcCWoM/OzEm6p+NQ==" + }, + "node-releases": { + "version": "1.1.44", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.44.tgz", + "integrity": "sha512-NwbdvJyR7nrcGrXvKAvzc5raj/NkoJudkarh2yIpJ4t0NH4aqjUDz/486P+ynIW5eokKOfzGNRdYoLfBlomruw==", + "requires": { + "semver": "^6.3.0" + } + }, + "noms": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/noms/-/noms-0.0.0.tgz", + "integrity": "sha1-2o69nzr51nYJGbJ9nNyAkqczKFk=", + "requires": { + "inherits": "^2.0.1", + "readable-stream": "~1.0.31" + }, + "dependencies": { + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "readable-stream": { + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + } + } + }, + "noop-logger": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/noop-logger/-/noop-logger-0.1.1.tgz", + "integrity": "sha1-lKKxYzxPExdVMAfYlm/Q6EG2pMI=" + }, + "normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "requires": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "normalize-path": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", + "integrity": "sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=", + "requires": { + "remove-trailing-separator": "^1.0.1" + } + }, + "normalize-range": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", + "integrity": "sha1-LRDAa9/TEuqXd2laTShDlFa3WUI=" + }, + "normalize-url": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-3.3.0.tgz", + "integrity": "sha512-U+JJi7duF1o+u2pynbp2zXDW2/PADgC30f0GsHZtRh+HOcXHnw137TrNlyxxRvWW5fjKd3bcLHPxofWuCjaeZg==" + }, + "npm-conf": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/npm-conf/-/npm-conf-1.1.3.tgz", + "integrity": "sha512-Yic4bZHJOt9RCFbRP3GgpqhScOY4HH3V2P8yBj6CeYq118Qr+BLXqT2JvpJ00mryLESpgOxf5XlFv4ZjXxLScw==", + "requires": { + "config-chain": "^1.1.11", + "pify": "^3.0.0" + }, + "dependencies": { + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + } + } + }, + "npm-run-path": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", + "integrity": "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=", + "requires": { + "path-key": "^2.0.0" + } + }, + "npmlog": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", + "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", + "requires": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.7.3", + "set-blocking": "~2.0.0" + } + }, + "nth-check": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-1.0.2.tgz", + "integrity": "sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg==", + "requires": { + "boolbase": "~1.0.0" + } + }, + "null-loader": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/null-loader/-/null-loader-0.1.1.tgz", + "integrity": "sha1-F76av80/8OFRL2/Er8sfUDk3j64=" + }, + "num2fraction": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/num2fraction/-/num2fraction-1.2.2.tgz", + "integrity": "sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4=" + }, + "number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=" + }, + "oauth-sign": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" + }, + "object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" + }, + "object-component": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/object-component/-/object-component-0.0.3.tgz", + "integrity": "sha1-8MaapQ78lbhmwYb0AKM3acsvEpE=" + }, + "object-copy": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/object-copy/-/object-copy-0.1.0.tgz", + "integrity": "sha1-fn2Fi3gb18mRpBupde04EnVOmYw=", + "requires": { + "copy-descriptor": "^0.1.0", + "define-property": "^0.2.5", + "kind-of": "^3.0.3" + }, + "dependencies": { + "define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "requires": { + "is-descriptor": "^0.1.0" + } + }, + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + }, + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "object-fit-images": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/object-fit-images/-/object-fit-images-3.2.4.tgz", + "integrity": "sha512-G+7LzpYfTfqUyrZlfrou/PLLLAPNC52FTy5y1CBywX+1/FkxIloOyQXBmZ3Zxa2AWO+lMF0JTuvqbr7G5e5CWg==" + }, + "object-hash": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-1.3.1.tgz", + "integrity": "sha512-OSuu/pU4ENM9kmREg0BdNrUDIl1heYa4mBZacJc+vVWz4GtAwu7jO8s4AIt2aGRUTqxykpWzI3Oqnsm13tTMDA==" + }, + "object-inspect": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==" + }, + "object-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.0.2.tgz", + "integrity": "sha512-Epah+btZd5wrrfjkJZq1AOB9O6OxUQto45hzFd7lXGrpHPGE0W1k+426yrZV+k6NJOzLNNW/nVsmZdIWsAqoOQ==" + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" + }, + "object-path": { + "version": "0.11.4", + "resolved": "https://registry.npmjs.org/object-path/-/object-path-0.11.4.tgz", + "integrity": "sha1-NwrnUvvzfePqcKhhwju6iRVpGUk=" + }, + "object-visit": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/object-visit/-/object-visit-1.0.1.tgz", + "integrity": "sha1-95xEk68MU3e1n+OdOV5BBC3QRbs=", + "requires": { + "isobject": "^3.0.0" + } + }, + "object.assign": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", + "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", + "requires": { + "define-properties": "^1.1.2", + "function-bind": "^1.1.1", + "has-symbols": "^1.0.0", + "object-keys": "^1.0.11" + } + }, + "object.entries": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.1.tgz", + "integrity": "sha512-ilqR7BgdyZetJutmDPfXCDffGa0/Yzl2ivVNpbx/g4UeWrCdRnFDUBrKJGLhGieRHDATnyZXWBeCb29k9CJysQ==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "has": "^1.0.3" + } + }, + "object.fromentries": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.2.tgz", + "integrity": "sha512-r3ZiBH7MQppDJVLx6fhD618GKNG40CZYH9wgwdhKxBDDbQgjeWGGd4AtkZad84d291YxvWe7bJGuE65Anh0dxQ==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "has": "^1.0.3" + } + }, + "object.getownpropertydescriptors": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.0.tgz", + "integrity": "sha512-Z53Oah9A3TdLoblT7VKJaTDdXdT+lQO+cNpKVnya5JDe9uLvzu1YyY1yFDFrcxrlRgWrEFH0jJtD/IbuwjcEVg==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1" + } + }, + "object.pick": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", + "integrity": "sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c=", + "requires": { + "isobject": "^3.0.1" + } + }, + "object.values": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.1.tgz", + "integrity": "sha512-WTa54g2K8iu0kmS/us18jEmdv1a4Wi//BZ/DTVYEcH0XhLM5NYdpDHja3gt57VrZLcNAO2WGA+KpWsDBaHt6eA==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "has": "^1.0.3" + } + }, + "obuf": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz", + "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==" + }, + "omggif": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/omggif/-/omggif-1.0.10.tgz", + "integrity": "sha512-LMJTtvgc/nugXj0Vcrrs68Mn2D1r0zf630VNtqtpI1FEO7e+O9FP4gqs9AcnBaSEeoHIPm28u6qgPR0oyEpGSw==" + }, + "on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", + "requires": { + "ee-first": "1.1.1" + } + }, + "on-headers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==" + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "requires": { + "wrappy": "1" + } + }, + "onetime": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", + "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==", + "requires": { + "mimic-fn": "^2.1.0" + } + }, + "open": { + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/open/-/open-6.4.0.tgz", + "integrity": "sha512-IFenVPgF70fSm1keSd2iDBIDIBZkroLeuffXq+wKTzTJlBpesFWojV9lb8mzOfaAzM1sr7HQHuO0vtV0zYekGg==", + "requires": { + "is-wsl": "^1.1.0" + }, + "dependencies": { + "is-wsl": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-1.1.0.tgz", + "integrity": "sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0=" + } + } + }, + "opentracing": { + "version": "0.14.4", + "resolved": "https://registry.npmjs.org/opentracing/-/opentracing-0.14.4.tgz", + "integrity": "sha512-nNnZDkUNExBwEpb7LZaeMeQgvrlO8l4bgY/LvGNZCR0xG/dGWqHqjKrAmR5GUoYo0FIz38kxasvA1aevxWs2CA==" + }, + "opn": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/opn/-/opn-5.1.0.tgz", + "integrity": "sha512-iPNl7SyM8L30Rm1sjGdLLheyHVw5YXVfi3SKWJzBI7efxRwHojfRFjwE/OLM6qp9xJYMgab8WicTU1cPoY+Hpg==", + "requires": { + "is-wsl": "^1.1.0" + }, + "dependencies": { + "is-wsl": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-1.1.0.tgz", + "integrity": "sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0=" + } + } + }, + "optimize-css-assets-webpack-plugin": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/optimize-css-assets-webpack-plugin/-/optimize-css-assets-webpack-plugin-5.0.3.tgz", + "integrity": "sha512-q9fbvCRS6EYtUKKSwI87qm2IxlyJK5b4dygW1rKUBT6mMDhdG5e5bZT63v6tnJR9F9FB/H5a0HTmtw+laUBxKA==", + "requires": { + "cssnano": "^4.1.10", + "last-call-webpack-plugin": "^3.0.0" + } + }, + "optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "requires": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + } + }, + "original": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/original/-/original-1.0.2.tgz", + "integrity": "sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg==", + "requires": { + "url-parse": "^1.4.3" + } + }, + "os-browserify": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/os-browserify/-/os-browserify-0.3.0.tgz", + "integrity": "sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc=" + }, + "os-filter-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/os-filter-obj/-/os-filter-obj-2.0.0.tgz", + "integrity": "sha512-uksVLsqG3pVdzzPvmAHpBK0wKxYItuzZr7SziusRPoz67tGV8rL1szZ6IdeUrbqLjGDwApBtN29eEE3IqGHOjg==", + "requires": { + "arch": "^2.1.0" + } + }, + "os-locale": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-3.1.0.tgz", + "integrity": "sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q==", + "requires": { + "execa": "^1.0.0", + "lcid": "^2.0.0", + "mem": "^4.0.0" + }, + "dependencies": { + "execa": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", + "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", + "requires": { + "cross-spawn": "^6.0.0", + "get-stream": "^4.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + } + } + } + }, + "os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=" + }, + "p-cancelable": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-1.1.0.tgz", + "integrity": "sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==" + }, + "p-defer": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-defer/-/p-defer-1.0.0.tgz", + "integrity": "sha1-n26xgvbJqozXQwBKfU+WsZaw+ww=" + }, + "p-event": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/p-event/-/p-event-2.3.1.tgz", + "integrity": "sha512-NQCqOFhbpVTMX4qMe8PF8lbGtzZ+LCiN7pcNrb/413Na7+TRoe1xkKUzuWa/YEJdGQ0FvKtj35EEbDoVPO2kbA==", + "requires": { + "p-timeout": "^2.0.1" + } + }, + "p-finally": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", + "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=" + }, + "p-is-promise": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/p-is-promise/-/p-is-promise-2.1.0.tgz", + "integrity": "sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg==" + }, + "p-limit": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.2.tgz", + "integrity": "sha512-WGR+xHecKTr7EbUEhyLSh5Dube9JtdiG78ufaeLxTgpudf/20KqyMioIUZJAezlTIi6evxuoUs9YXc11cU+yzQ==", + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-map": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", + "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", + "requires": { + "aggregate-error": "^3.0.0" + } + }, + "p-map-series": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-map-series/-/p-map-series-1.0.0.tgz", + "integrity": "sha1-v5j+V1cFZYqeE1G++4WuTB8Hvco=", + "requires": { + "p-reduce": "^1.0.0" + } + }, + "p-pipe": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/p-pipe/-/p-pipe-1.2.0.tgz", + "integrity": "sha1-SxoROZoRUgpneQ7loMHViB1r7+k=" + }, + "p-reduce": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-reduce/-/p-reduce-1.0.0.tgz", + "integrity": "sha1-GMKw3ZNqRpClKfgjH1ig/bakffo=" + }, + "p-retry": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-3.0.1.tgz", + "integrity": "sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w==", + "requires": { + "retry": "^0.12.0" + } + }, + "p-timeout": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-2.0.1.tgz", + "integrity": "sha512-88em58dDVB/KzPEx1X0N3LwFfYZPyDc4B6eF38M1rk9VTZMbxXXgjugz8mmwpS9Ox4BDZ+t6t3QP5+/gazweIA==", + "requires": { + "p-finally": "^1.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" + }, + "package-json": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/package-json/-/package-json-6.5.0.tgz", + "integrity": "sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ==", + "requires": { + "got": "^9.6.0", + "registry-auth-token": "^4.0.0", + "registry-url": "^5.0.0", + "semver": "^6.2.0" + }, + "dependencies": { + "got": { + "version": "9.6.0", + "resolved": "https://registry.npmjs.org/got/-/got-9.6.0.tgz", + "integrity": "sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==", + "requires": { + "@sindresorhus/is": "^0.14.0", + "@szmarczak/http-timer": "^1.1.2", + "cacheable-request": "^6.0.0", + "decompress-response": "^3.3.0", + "duplexer3": "^0.1.4", + "get-stream": "^4.1.0", + "lowercase-keys": "^1.0.1", + "mimic-response": "^1.0.1", + "p-cancelable": "^1.0.0", + "to-readable-stream": "^1.0.0", + "url-parse-lax": "^3.0.0" + } + } + } + }, + "pako": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.10.tgz", + "integrity": "sha512-0DTvPVU3ed8+HNXOu5Bs+o//Mbdj9VNQMUOe9oKCwh8l0GNwpTDMKCWbRjgtD291AWnkAgkqA/LOnQS8AmS1tw==" + }, + "parallel-transform": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/parallel-transform/-/parallel-transform-1.2.0.tgz", + "integrity": "sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg==", + "requires": { + "cyclist": "^1.0.1", + "inherits": "^2.0.3", + "readable-stream": "^2.1.5" + } + }, + "parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "requires": { + "callsites": "^3.0.0" + } + }, + "parse-asn1": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.5.tgz", + "integrity": "sha512-jkMYn1dcJqF6d5CpU689bq7w/b5ALS9ROVSpQDPrZsqqesUJii9qutvoT5ltGedNXMO2e16YUWIghG9KxaViTQ==", + "requires": { + "asn1.js": "^4.0.0", + "browserify-aes": "^1.0.0", + "create-hash": "^1.1.0", + "evp_bytestokey": "^1.0.0", + "pbkdf2": "^3.0.3", + "safe-buffer": "^5.1.1" + } + }, + "parse-bmfont-ascii": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/parse-bmfont-ascii/-/parse-bmfont-ascii-1.0.6.tgz", + "integrity": "sha1-Eaw8P/WPfCAgqyJ2kHkQjU36AoU=" + }, + "parse-bmfont-binary": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/parse-bmfont-binary/-/parse-bmfont-binary-1.0.6.tgz", + "integrity": "sha1-0Di0dtPp3Z2x4RoLDlOiJ5K2kAY=" + }, + "parse-bmfont-xml": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/parse-bmfont-xml/-/parse-bmfont-xml-1.1.4.tgz", + "integrity": "sha512-bjnliEOmGv3y1aMEfREMBJ9tfL3WR0i0CKPj61DnSLaoxWR3nLrsQrEbCId/8rF4NyRF0cCqisSVXyQYWM+mCQ==", + "requires": { + "xml-parse-from-string": "^1.0.0", + "xml2js": "^0.4.5" + } + }, + "parse-english": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/parse-english/-/parse-english-4.1.2.tgz", + "integrity": "sha512-+PBf+1ifxqJlOpisODiKX4A8wBEgWm4goMvDB5O9zx/cQI58vzHTZeWFbAgCF9fUXRl8/YdINv1cfmfIRR1acg==", + "requires": { + "nlcst-to-string": "^2.0.0", + "parse-latin": "^4.0.0", + "unist-util-modify-children": "^1.0.0", + "unist-util-visit-children": "^1.0.0" + } + }, + "parse-entities": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-1.2.2.tgz", + "integrity": "sha512-NzfpbxW/NPrzZ/yYSoQxyqUZMZXIdCfE0OIN4ESsnptHJECoUk3FZktxNuzQf4tjt5UEopnxpYJbvYuxIFDdsg==", + "requires": { + "character-entities": "^1.0.0", + "character-entities-legacy": "^1.0.0", + "character-reference-invalid": "^1.0.0", + "is-alphanumerical": "^1.0.0", + "is-decimal": "^1.0.0", + "is-hexadecimal": "^1.0.0" + } + }, + "parse-headers": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/parse-headers/-/parse-headers-2.0.3.tgz", + "integrity": "sha512-QhhZ+DCCit2Coi2vmAKbq5RGTRcQUOE2+REgv8vdyu7MnYx2eZztegqtTx99TZ86GTIwqiy3+4nQTWZ2tgmdCA==" + }, + "parse-json": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.0.0.tgz", + "integrity": "sha512-OOY5b7PAEFV0E2Fir1KOkxchnZNCdowAJgQ5NuxjpBKTRP3pQhwkrkxqQjeoKJ+fO7bCpmIZaogI4eZGDMEGOw==", + "requires": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1", + "lines-and-columns": "^1.1.6" + } + }, + "parse-latin": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/parse-latin/-/parse-latin-4.2.0.tgz", + "integrity": "sha512-b8PvsA1Ohh7hIQwDDy6kSjx3EbcuR3oKYm5lC1/l/zIB6mVVV5ESEoS1+Qr5+QgEGmp+aEZzc+D145FIPJUszw==", + "requires": { + "nlcst-to-string": "^2.0.0", + "unist-util-modify-children": "^1.0.0", + "unist-util-visit-children": "^1.0.0" + } + }, + "parse-numeric-range": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/parse-numeric-range/-/parse-numeric-range-0.0.2.tgz", + "integrity": "sha1-tPCdQTx6282Yf26SM8e0shDJOOQ=" + }, + "parse-passwd": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/parse-passwd/-/parse-passwd-1.0.0.tgz", + "integrity": "sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY=" + }, + "parse-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/parse-path/-/parse-path-4.0.1.tgz", + "integrity": "sha512-d7yhga0Oc+PwNXDvQ0Jv1BuWkLVPXcAoQ/WREgd6vNNoKYaW52KI+RdOFjI63wjkmps9yUE8VS4veP+AgpQ/hA==", + "requires": { + "is-ssh": "^1.3.0", + "protocols": "^1.4.0" + } + }, + "parse-url": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/parse-url/-/parse-url-5.0.1.tgz", + "integrity": "sha512-flNUPP27r3vJpROi0/R3/2efgKkyXqnXwyP1KQ2U0SfFRgdizOdWfvrrvJg1LuOoxs7GQhmxJlq23IpQ/BkByg==", + "requires": { + "is-ssh": "^1.3.0", + "normalize-url": "^3.3.0", + "parse-path": "^4.0.0", + "protocols": "^1.4.0" + } + }, + "parse5": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-3.0.3.tgz", + "integrity": "sha512-rgO9Zg5LLLkfJF9E6CCmXlSE4UVceloys8JrFqCcHloC3usd/kJCyPDwH2SOlzix2j3xaP9sUX3e8+kvkuleAA==", + "requires": { + "@types/node": "*" + } + }, + "parseqs": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/parseqs/-/parseqs-0.0.5.tgz", + "integrity": "sha1-1SCKNzjkZ2bikbouoXNoSSGouJ0=", + "requires": { + "better-assert": "~1.0.0" + } + }, + "parseuri": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/parseuri/-/parseuri-0.0.5.tgz", + "integrity": "sha1-gCBKUNTbt3m/3G6+J3jZDkvOMgo=", + "requires": { + "better-assert": "~1.0.0" + } + }, + "parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==" + }, + "pascalcase": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/pascalcase/-/pascalcase-0.1.1.tgz", + "integrity": "sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ=" + }, + "path": { + "version": "0.12.7", + "resolved": "https://registry.npmjs.org/path/-/path-0.12.7.tgz", + "integrity": "sha1-1NwqUGxM4hl+tIHr/NWzbAFAsQ8=", + "requires": { + "process": "^0.11.1", + "util": "^0.10.3" + }, + "dependencies": { + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + }, + "util": { + "version": "0.10.4", + "resolved": "https://registry.npmjs.org/util/-/util-0.10.4.tgz", + "integrity": "sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A==", + "requires": { + "inherits": "2.0.3" + } + } + } + }, + "path-browserify": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-0.0.1.tgz", + "integrity": "sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ==" + }, + "path-dirname": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/path-dirname/-/path-dirname-1.0.2.tgz", + "integrity": "sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA=" + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" + }, + "path-is-inside": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", + "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=" + }, + "path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=" + }, + "path-parse": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", + "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" + }, + "path-to-regexp": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", + "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" + }, + "path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==" + }, + "pbkdf2": { + "version": "3.0.17", + "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.0.17.tgz", + "integrity": "sha512-U/il5MsrZp7mGg3mSQfn742na2T+1/vHDCG5/iTI3X9MKUuYUZVLQhyRsg06mCgDBTd57TxzgZt7P+fYfjRLtA==", + "requires": { + "create-hash": "^1.1.2", + "create-hmac": "^1.1.4", + "ripemd160": "^2.0.1", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + } + }, + "pend": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", + "integrity": "sha1-elfrVQpng/kRUzH89GY9XI4AelA=" + }, + "performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" + }, + "phin": { + "version": "2.9.3", + "resolved": "https://registry.npmjs.org/phin/-/phin-2.9.3.tgz", + "integrity": "sha512-CzFr90qM24ju5f88quFC/6qohjC144rehe5n6DH900lgXmUe86+xCKc10ev56gRKC4/BkHUoG4uSiQgBiIXwDA==" + }, + "physical-cpu-count": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/physical-cpu-count/-/physical-cpu-count-2.0.0.tgz", + "integrity": "sha1-GN4vl+S/epVRrXURlCtUlverpmA=" + }, + "picomatch": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.1.tgz", + "integrity": "sha512-ISBaA8xQNmwELC7eOjqFKMESB2VIqt4PPDD0nsS95b/9dZXvVKOlz9keMSnoGGKcOHXfTvDD6WMaRoSc9UuhRA==" + }, + "pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==" + }, + "pinkie": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "integrity": "sha1-clVrgM+g1IqXToDnckjoDtT3+HA=" + }, + "pinkie-promise": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "integrity": "sha1-ITXW36ejWMBprJsXh3YogihFD/o=", + "requires": { + "pinkie": "^2.0.0" + } + }, + "pixelmatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/pixelmatch/-/pixelmatch-4.0.2.tgz", + "integrity": "sha1-j0fc7FARtHe2fbA8JDvB8wheiFQ=", + "requires": { + "pngjs": "^3.0.0" + } + }, + "pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "requires": { + "find-up": "^3.0.0" + } + }, + "pngjs": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/pngjs/-/pngjs-3.4.0.tgz", + "integrity": "sha512-NCrCHhWmnQklfH4MtJMRjZ2a8c80qXeMlQMv2uVp9ISJMTt562SbGd6n2oq0PaPgKm7Z6pL9E2UlLIhC+SHL3w==" + }, + "pngquant-bin": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/pngquant-bin/-/pngquant-bin-5.0.2.tgz", + "integrity": "sha512-OLdT+4JZx5BqE1CFJkrvomYV0aSsv6x2Bba+aWaVc0PMfWlE+ZByNKYAdKeIqsM4uvW1HOSEHnf8KcOnykPNxA==", + "requires": { + "bin-build": "^3.0.0", + "bin-wrapper": "^4.0.1", + "execa": "^0.10.0", + "logalot": "^2.0.0" + }, + "dependencies": { + "execa": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-0.10.0.tgz", + "integrity": "sha512-7XOMnz8Ynx1gGo/3hyV9loYNPWM94jG3+3T3Y8tsfSstFmETmENCMU/A/zj8Lyaj1lkgEepKepvd6240tBRvlw==", + "requires": { + "cross-spawn": "^6.0.0", + "get-stream": "^3.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + } + }, + "get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" + } + } + }, + "pnp-webpack-plugin": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/pnp-webpack-plugin/-/pnp-webpack-plugin-1.5.0.tgz", + "integrity": "sha512-jd9olUr9D7do+RN8Wspzhpxhgp1n6Vd0NtQ4SFkmIACZoEL1nkyAdW9Ygrinjec0vgDcWjscFQQ1gDW8rsfKTg==", + "requires": { + "ts-pnp": "^1.1.2" + } + }, + "portfinder": { + "version": "1.0.25", + "resolved": "https://registry.npmjs.org/portfinder/-/portfinder-1.0.25.tgz", + "integrity": "sha512-6ElJnHBbxVA1XSLgBp7G1FiCkQdlqGzuF7DswL5tcea+E8UpuvPU7beVAjjRwCioTS9ZluNbu+ZyRvgTsmqEBg==", + "requires": { + "async": "^2.6.2", + "debug": "^3.1.1", + "mkdirp": "^0.5.1" + }, + "dependencies": { + "async": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", + "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==", + "requires": { + "lodash": "^4.17.14" + } + }, + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "requires": { + "ms": "^2.1.1" + } + } + } + }, + "posix-character-classes": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", + "integrity": "sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=" + }, + "postcss": { + "version": "7.0.26", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.26.tgz", + "integrity": "sha512-IY4oRjpXWYshuTDFxMVkJDtWIk2LhsTlu8bZnbEJA4+bYT16Lvpo8Qv6EvDumhYRgzjZl489pmsY3qVgJQ08nA==", + "requires": { + "chalk": "^2.4.2", + "source-map": "^0.6.1", + "supports-color": "^6.1.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + }, + "supports-color": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", + "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", + "requires": { + "has-flag": "^3.0.0" + } + } + } + }, + "postcss-calc": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-7.0.1.tgz", + "integrity": "sha512-oXqx0m6tb4N3JGdmeMSc/i91KppbYsFZKdH0xMOqK8V1rJlzrKlTdokz8ozUXLVejydRN6u2IddxpcijRj2FqQ==", + "requires": { + "css-unit-converter": "^1.1.1", + "postcss": "^7.0.5", + "postcss-selector-parser": "^5.0.0-rc.4", + "postcss-value-parser": "^3.3.1" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-colormin": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-4.0.3.tgz", + "integrity": "sha512-WyQFAdDZpExQh32j0U0feWisZ0dmOtPl44qYmJKkq9xFWY3p+4qnRzCHeNrkeRhwPHz9bQ3mo0/yVkaply0MNw==", + "requires": { + "browserslist": "^4.0.0", + "color": "^3.0.0", + "has": "^1.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "browserslist": { + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.8.3.tgz", + "integrity": "sha512-iU43cMMknxG1ClEZ2MDKeonKE1CCrFVkQK2AqO2YWFmvIrx4JWrvQ4w4hQez6EpVI8rHTtqh/ruHHDHSOKxvUg==", + "requires": { + "caniuse-lite": "^1.0.30001017", + "electron-to-chromium": "^1.3.322", + "node-releases": "^1.1.44" + } + }, + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-convert-values": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-4.0.1.tgz", + "integrity": "sha512-Kisdo1y77KUC0Jmn0OXU/COOJbzM8cImvw1ZFsBgBgMgb1iL23Zs/LXRe3r+EZqM3vGYKdQ2YJVQ5VkJI+zEJQ==", + "requires": { + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-discard-comments": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-4.0.2.tgz", + "integrity": "sha512-RJutN259iuRf3IW7GZyLM5Sw4GLTOH8FmsXBnv8Ab/Tc2k4SR4qbV4DNbyyY4+Sjo362SyDmW2DQ7lBSChrpkg==", + "requires": { + "postcss": "^7.0.0" + } + }, + "postcss-discard-duplicates": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-4.0.2.tgz", + "integrity": "sha512-ZNQfR1gPNAiXZhgENFfEglF93pciw0WxMkJeVmw8eF+JZBbMD7jp6C67GqJAXVZP2BWbOztKfbsdmMp/k8c6oQ==", + "requires": { + "postcss": "^7.0.0" + } + }, + "postcss-discard-empty": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-4.0.1.tgz", + "integrity": "sha512-B9miTzbznhDjTfjvipfHoqbWKwd0Mj+/fL5s1QOz06wufguil+Xheo4XpOnc4NqKYBCNqqEzgPv2aPBIJLox0w==", + "requires": { + "postcss": "^7.0.0" + } + }, + "postcss-discard-overridden": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-4.0.1.tgz", + "integrity": "sha512-IYY2bEDD7g1XM1IDEsUT4//iEYCxAmP5oDSFMVU/JVvT7gh+l4fmjciLqGgwjdWpQIdb0Che2VX00QObS5+cTg==", + "requires": { + "postcss": "^7.0.0" + } + }, + "postcss-flexbugs-fixes": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-3.3.1.tgz", + "integrity": "sha512-9y9kDDf2F9EjKX6x9ueNa5GARvsUbXw4ezH8vXItXHwKzljbu8awP7t5dCaabKYm18Vs1lo5bKQcnc0HkISt+w==", + "requires": { + "postcss": "^6.0.1" + }, + "dependencies": { + "postcss": { + "version": "6.0.23", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.23.tgz", + "integrity": "sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==", + "requires": { + "chalk": "^2.4.1", + "source-map": "^0.6.1", + "supports-color": "^5.4.0" + } + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } + } + }, + "postcss-load-config": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-2.1.0.tgz", + "integrity": "sha512-4pV3JJVPLd5+RueiVVB+gFOAa7GWc25XQcMp86Zexzke69mKf6Nx9LRcQywdz7yZI9n1udOxmLuAwTBypypF8Q==", + "requires": { + "cosmiconfig": "^5.0.0", + "import-cwd": "^2.0.0" + }, + "dependencies": { + "cosmiconfig": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-5.2.1.tgz", + "integrity": "sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA==", + "requires": { + "import-fresh": "^2.0.0", + "is-directory": "^0.3.1", + "js-yaml": "^3.13.1", + "parse-json": "^4.0.0" + } + }, + "import-fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-2.0.0.tgz", + "integrity": "sha1-2BNVwVYS04bGH53dOSLUMEgipUY=", + "requires": { + "caller-path": "^2.0.0", + "resolve-from": "^3.0.0" + } + }, + "parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", + "requires": { + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" + } + }, + "resolve-from": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", + "integrity": "sha1-six699nWiBvItuZTM17rywoYh0g=" + } + } + }, + "postcss-loader": { + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/postcss-loader/-/postcss-loader-2.1.6.tgz", + "integrity": "sha512-hgiWSc13xVQAq25cVw80CH0l49ZKlAnU1hKPOdRrNj89bokRr/bZF2nT+hebPPF9c9xs8c3gw3Fr2nxtmXYnNg==", + "requires": { + "loader-utils": "^1.1.0", + "postcss": "^6.0.0", + "postcss-load-config": "^2.0.0", + "schema-utils": "^0.4.0" + }, + "dependencies": { + "postcss": { + "version": "6.0.23", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.23.tgz", + "integrity": "sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==", + "requires": { + "chalk": "^2.4.1", + "source-map": "^0.6.1", + "supports-color": "^5.4.0" + } + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } + } + }, + "postcss-merge-longhand": { + "version": "4.0.11", + "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-4.0.11.tgz", + "integrity": "sha512-alx/zmoeXvJjp7L4mxEMjh8lxVlDFX1gqWHzaaQewwMZiVhLo42TEClKaeHbRf6J7j82ZOdTJ808RtN0ZOZwvw==", + "requires": { + "css-color-names": "0.0.4", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0", + "stylehacks": "^4.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-merge-rules": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-4.0.3.tgz", + "integrity": "sha512-U7e3r1SbvYzO0Jr3UT/zKBVgYYyhAz0aitvGIYOYK5CPmkNih+WDSsS5tvPrJ8YMQYlEMvsZIiqmn7HdFUaeEQ==", + "requires": { + "browserslist": "^4.0.0", + "caniuse-api": "^3.0.0", + "cssnano-util-same-parent": "^4.0.0", + "postcss": "^7.0.0", + "postcss-selector-parser": "^3.0.0", + "vendors": "^1.0.0" + }, + "dependencies": { + "browserslist": { + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.8.3.tgz", + "integrity": "sha512-iU43cMMknxG1ClEZ2MDKeonKE1CCrFVkQK2AqO2YWFmvIrx4JWrvQ4w4hQez6EpVI8rHTtqh/ruHHDHSOKxvUg==", + "requires": { + "caniuse-lite": "^1.0.30001017", + "electron-to-chromium": "^1.3.322", + "node-releases": "^1.1.44" + } + }, + "postcss-selector-parser": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-3.1.1.tgz", + "integrity": "sha1-T4dfSvsMllc9XPTXQBGu4lCn6GU=", + "requires": { + "dot-prop": "^4.1.1", + "indexes-of": "^1.0.1", + "uniq": "^1.0.1" + } + } + } + }, + "postcss-minify-font-values": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-4.0.2.tgz", + "integrity": "sha512-j85oO6OnRU9zPf04+PZv1LYIYOprWm6IA6zkXkrJXyRveDEuQggG6tvoy8ir8ZwjLxLuGfNkCZEQG7zan+Hbtg==", + "requires": { + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-minify-gradients": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-4.0.2.tgz", + "integrity": "sha512-qKPfwlONdcf/AndP1U8SJ/uzIJtowHlMaSioKzebAXSG4iJthlWC9iSWznQcX4f66gIWX44RSA841HTHj3wK+Q==", + "requires": { + "cssnano-util-get-arguments": "^4.0.0", + "is-color-stop": "^1.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-minify-params": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-4.0.2.tgz", + "integrity": "sha512-G7eWyzEx0xL4/wiBBJxJOz48zAKV2WG3iZOqVhPet/9geefm/Px5uo1fzlHu+DOjT+m0Mmiz3jkQzVHe6wxAWg==", + "requires": { + "alphanum-sort": "^1.0.0", + "browserslist": "^4.0.0", + "cssnano-util-get-arguments": "^4.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0", + "uniqs": "^2.0.0" + }, + "dependencies": { + "browserslist": { + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.8.3.tgz", + "integrity": "sha512-iU43cMMknxG1ClEZ2MDKeonKE1CCrFVkQK2AqO2YWFmvIrx4JWrvQ4w4hQez6EpVI8rHTtqh/ruHHDHSOKxvUg==", + "requires": { + "caniuse-lite": "^1.0.30001017", + "electron-to-chromium": "^1.3.322", + "node-releases": "^1.1.44" + } + }, + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-minify-selectors": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-4.0.2.tgz", + "integrity": "sha512-D5S1iViljXBj9kflQo4YutWnJmwm8VvIsU1GeXJGiG9j8CIg9zs4voPMdQDUmIxetUOh60VilsNzCiAFTOqu3g==", + "requires": { + "alphanum-sort": "^1.0.0", + "has": "^1.0.0", + "postcss": "^7.0.0", + "postcss-selector-parser": "^3.0.0" + }, + "dependencies": { + "postcss-selector-parser": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-3.1.1.tgz", + "integrity": "sha1-T4dfSvsMllc9XPTXQBGu4lCn6GU=", + "requires": { + "dot-prop": "^4.1.1", + "indexes-of": "^1.0.1", + "uniq": "^1.0.1" + } + } + } + }, + "postcss-modules-extract-imports": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-1.2.1.tgz", + "integrity": "sha512-6jt9XZwUhwmRUhb/CkyJY020PYaPJsCyt3UjbaWo6XEbH/94Hmv6MP7fG2C5NDU/BcHzyGYxNtHvM+LTf9HrYw==", + "requires": { + "postcss": "^6.0.1" + }, + "dependencies": { + "postcss": { + "version": "6.0.23", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.23.tgz", + "integrity": "sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==", + "requires": { + "chalk": "^2.4.1", + "source-map": "^0.6.1", + "supports-color": "^5.4.0" + } + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } + } + }, + "postcss-modules-local-by-default": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-1.2.0.tgz", + "integrity": "sha1-99gMOYxaOT+nlkRmvRlQCn1hwGk=", + "requires": { + "css-selector-tokenizer": "^0.7.0", + "postcss": "^6.0.1" + }, + "dependencies": { + "postcss": { + "version": "6.0.23", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.23.tgz", + "integrity": "sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==", + "requires": { + "chalk": "^2.4.1", + "source-map": "^0.6.1", + "supports-color": "^5.4.0" + } + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } + } + }, + "postcss-modules-scope": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-1.1.0.tgz", + "integrity": "sha1-1upkmUx5+XtipytCb75gVqGUu5A=", + "requires": { + "css-selector-tokenizer": "^0.7.0", + "postcss": "^6.0.1" + }, + "dependencies": { + "postcss": { + "version": "6.0.23", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.23.tgz", + "integrity": "sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==", + "requires": { + "chalk": "^2.4.1", + "source-map": "^0.6.1", + "supports-color": "^5.4.0" + } + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } + } + }, + "postcss-modules-values": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-1.3.0.tgz", + "integrity": "sha1-7P+p1+GSUYOJ9CrQ6D9yrsRW6iA=", + "requires": { + "icss-replace-symbols": "^1.1.0", + "postcss": "^6.0.1" + }, + "dependencies": { + "postcss": { + "version": "6.0.23", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.23.tgz", + "integrity": "sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==", + "requires": { + "chalk": "^2.4.1", + "source-map": "^0.6.1", + "supports-color": "^5.4.0" + } + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } + } + }, + "postcss-normalize-charset": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-4.0.1.tgz", + "integrity": "sha512-gMXCrrlWh6G27U0hF3vNvR3w8I1s2wOBILvA87iNXaPvSNo5uZAMYsZG7XjCUf1eVxuPfyL4TJ7++SGZLc9A3g==", + "requires": { + "postcss": "^7.0.0" + } + }, + "postcss-normalize-display-values": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-4.0.2.tgz", + "integrity": "sha512-3F2jcsaMW7+VtRMAqf/3m4cPFhPD3EFRgNs18u+k3lTJJlVe7d0YPO+bnwqo2xg8YiRpDXJI2u8A0wqJxMsQuQ==", + "requires": { + "cssnano-util-get-match": "^4.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-normalize-positions": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-4.0.2.tgz", + "integrity": "sha512-Dlf3/9AxpxE+NF1fJxYDeggi5WwV35MXGFnnoccP/9qDtFrTArZ0D0R+iKcg5WsUd8nUYMIl8yXDCtcrT8JrdA==", + "requires": { + "cssnano-util-get-arguments": "^4.0.0", + "has": "^1.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-normalize-repeat-style": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-4.0.2.tgz", + "integrity": "sha512-qvigdYYMpSuoFs3Is/f5nHdRLJN/ITA7huIoCyqqENJe9PvPmLhNLMu7QTjPdtnVf6OcYYO5SHonx4+fbJE1+Q==", + "requires": { + "cssnano-util-get-arguments": "^4.0.0", + "cssnano-util-get-match": "^4.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-normalize-string": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-4.0.2.tgz", + "integrity": "sha512-RrERod97Dnwqq49WNz8qo66ps0swYZDSb6rM57kN2J+aoyEAJfZ6bMx0sx/F9TIEX0xthPGCmeyiam/jXif0eA==", + "requires": { + "has": "^1.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-normalize-timing-functions": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-4.0.2.tgz", + "integrity": "sha512-acwJY95edP762e++00Ehq9L4sZCEcOPyaHwoaFOhIwWCDfik6YvqsYNxckee65JHLKzuNSSmAdxwD2Cud1Z54A==", + "requires": { + "cssnano-util-get-match": "^4.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-normalize-unicode": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-4.0.1.tgz", + "integrity": "sha512-od18Uq2wCYn+vZ/qCOeutvHjB5jm57ToxRaMeNuf0nWVHaP9Hua56QyMF6fs/4FSUnVIw0CBPsU0K4LnBPwYwg==", + "requires": { + "browserslist": "^4.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "browserslist": { + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.8.3.tgz", + "integrity": "sha512-iU43cMMknxG1ClEZ2MDKeonKE1CCrFVkQK2AqO2YWFmvIrx4JWrvQ4w4hQez6EpVI8rHTtqh/ruHHDHSOKxvUg==", + "requires": { + "caniuse-lite": "^1.0.30001017", + "electron-to-chromium": "^1.3.322", + "node-releases": "^1.1.44" + } + }, + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-normalize-url": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-4.0.1.tgz", + "integrity": "sha512-p5oVaF4+IHwu7VpMan/SSpmpYxcJMtkGppYf0VbdH5B6hN8YNmVyJLuY9FmLQTzY3fag5ESUUHDqM+heid0UVA==", + "requires": { + "is-absolute-url": "^2.0.0", + "normalize-url": "^3.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "is-absolute-url": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-absolute-url/-/is-absolute-url-2.1.0.tgz", + "integrity": "sha1-UFMN+4T8yap9vnhS6Do3uTufKqY=" + }, + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-normalize-whitespace": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-4.0.2.tgz", + "integrity": "sha512-tO8QIgrsI3p95r8fyqKV+ufKlSHh9hMJqACqbv2XknufqEDhDvbguXGBBqxw9nsQoXWf0qOqppziKJKHMD4GtA==", + "requires": { + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-ordered-values": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-4.1.2.tgz", + "integrity": "sha512-2fCObh5UanxvSxeXrtLtlwVThBvHn6MQcu4ksNT2tsaV2Fg76R2CV98W7wNSlX+5/pFwEyaDwKLLoEV7uRybAw==", + "requires": { + "cssnano-util-get-arguments": "^4.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-reduce-initial": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-4.0.3.tgz", + "integrity": "sha512-gKWmR5aUulSjbzOfD9AlJiHCGH6AEVLaM0AV+aSioxUDd16qXP1PCh8d1/BGVvpdWn8k/HiK7n6TjeoXN1F7DA==", + "requires": { + "browserslist": "^4.0.0", + "caniuse-api": "^3.0.0", + "has": "^1.0.0", + "postcss": "^7.0.0" + }, + "dependencies": { + "browserslist": { + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.8.3.tgz", + "integrity": "sha512-iU43cMMknxG1ClEZ2MDKeonKE1CCrFVkQK2AqO2YWFmvIrx4JWrvQ4w4hQez6EpVI8rHTtqh/ruHHDHSOKxvUg==", + "requires": { + "caniuse-lite": "^1.0.30001017", + "electron-to-chromium": "^1.3.322", + "node-releases": "^1.1.44" + } + } + } + }, + "postcss-reduce-transforms": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-4.0.2.tgz", + "integrity": "sha512-EEVig1Q2QJ4ELpJXMZR8Vt5DQx8/mo+dGWSR7vWXqcob2gQLyQGsionYcGKATXvQzMPn6DSN1vTN7yFximdIAg==", + "requires": { + "cssnano-util-get-match": "^4.0.0", + "has": "^1.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-selector-parser": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-5.0.0.tgz", + "integrity": "sha512-w+zLE5Jhg6Liz8+rQOWEAwtwkyqpfnmsinXjXg6cY7YIONZZtgvE0v2O0uhQBs0peNomOJwWRKt6JBfTdTd3OQ==", + "requires": { + "cssesc": "^2.0.0", + "indexes-of": "^1.0.1", + "uniq": "^1.0.1" + }, + "dependencies": { + "cssesc": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-2.0.0.tgz", + "integrity": "sha512-MsCAG1z9lPdoO/IUMLSBWBSVxVtJ1395VGIQ+Fc2gNdkQ1hNDnQdw3YhA71WJCBW1vdwA0cAnk/DnW6bqoEUYg==" + } + } + }, + "postcss-svgo": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-4.0.2.tgz", + "integrity": "sha512-C6wyjo3VwFm0QgBy+Fu7gCYOkCmgmClghO+pjcxvrcBKtiKt0uCF+hvbMO1fyv5BMImRK90SMb+dwUnfbGd+jw==", + "requires": { + "is-svg": "^3.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0", + "svgo": "^1.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" + } + } + }, + "postcss-unique-selectors": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-4.0.1.tgz", + "integrity": "sha512-+JanVaryLo9QwZjKrmJgkI4Fn8SBgRO6WXQBJi7KiAVPlmxikB5Jzc4EvXMT2H0/m0RjrVVm9rGNhZddm/8Spg==", + "requires": { + "alphanum-sort": "^1.0.0", + "postcss": "^7.0.0", + "uniqs": "^2.0.0" + } + }, + "postcss-value-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.0.2.tgz", + "integrity": "sha512-LmeoohTpp/K4UiyQCwuGWlONxXamGzCMtFxLq4W1nZVGIQLYvMCJx3yAF9qyyuFpflABI9yVdtJAqbihOsCsJQ==" + }, + "potrace": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/potrace/-/potrace-2.1.2.tgz", + "integrity": "sha512-dNcUBapRgPkiv3j+70+rSlf0whtJJqEszC04g9a/Ll3p6kA7QVRV1Vsi3jg22voJr2jA9x9fjPbz5MdD+ngbUg==", + "requires": { + "jimp": "^0.6.4" + } + }, + "prebuild-install": { + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-5.3.3.tgz", + "integrity": "sha512-GV+nsUXuPW2p8Zy7SarF/2W/oiK8bFQgJcncoJ0d7kRpekEA0ftChjfEaF9/Y+QJEc/wFR7RAEa8lYByuUIe2g==", + "requires": { + "detect-libc": "^1.0.3", + "expand-template": "^2.0.3", + "github-from-package": "0.0.0", + "minimist": "^1.2.0", + "mkdirp": "^0.5.1", + "napi-build-utils": "^1.0.1", + "node-abi": "^2.7.0", + "noop-logger": "^0.1.1", + "npmlog": "^4.0.1", + "pump": "^3.0.0", + "rc": "^1.2.7", + "simple-get": "^3.0.3", + "tar-fs": "^2.0.0", + "tunnel-agent": "^0.6.0", + "which-pm-runs": "^1.0.0" + }, + "dependencies": { + "minimist": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=" + } + } + }, + "prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=" + }, + "prepend-http": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", + "integrity": "sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=" + }, + "pretty-bytes": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.3.0.tgz", + "integrity": "sha512-hjGrh+P926p4R4WbaB6OckyRtO0F0/lQBiT+0gnxjV+5kjPBrfVBFCsCLbMqVQeydvIoouYTCmmEURiH3R1Bdg==" + }, + "pretty-error": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/pretty-error/-/pretty-error-2.1.1.tgz", + "integrity": "sha1-X0+HyPkeWuPzuoerTPXgOxoX8aM=", + "requires": { + "renderkid": "^2.0.1", + "utila": "~0.4" + } + }, + "prismjs": { + "version": "1.18.0", + "resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.18.0.tgz", + "integrity": "sha512-N0r3i/Cto516V8+GKKamhsPVZSFcO0TMUBtIDW6uq6BVqoC3FNtZVZ+cmH16N2XtGQlgRN+sFUTjOdCsEP51qw==", + "requires": { + "clipboard": "^2.0.0" + } + }, + "private": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/private/-/private-0.1.8.tgz", + "integrity": "sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg==" + }, + "probe-image-size": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/probe-image-size/-/probe-image-size-4.1.1.tgz", + "integrity": "sha512-42LqKZqTLxH/UvAZ2/cKhAsR4G/Y6B7i7fI2qtQu9hRBK4YjS6gqO+QRtwTjvojUx4+/+JuOMzLoFyRecT9qRw==", + "requires": { + "any-promise": "^1.3.0", + "deepmerge": "^4.0.0", + "inherits": "^2.0.3", + "next-tick": "^1.0.0", + "request": "^2.83.0", + "stream-parser": "~0.3.1" + } + }, + "process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=" + }, + "process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + }, + "progress": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", + "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==" + }, + "promise": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz", + "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==", + "requires": { + "asap": "~2.0.3" + } + }, + "promise-inflight": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz", + "integrity": "sha1-mEcocL8igTL8vdhoEputEsPAKeM=" + }, + "prompts": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.3.0.tgz", + "integrity": "sha512-NfbbPPg/74fT7wk2XYQ7hAIp9zJyZp5Fu19iRbORqqy1BhtrkZ0fPafBU+7bmn8ie69DpT0R6QpJIN2oisYjJg==", + "requires": { + "kleur": "^3.0.3", + "sisteransi": "^1.0.3" + } + }, + "prop-types": { + "version": "15.7.2", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", + "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", + "requires": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.8.1" + } + }, + "property-information": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-4.2.0.tgz", + "integrity": "sha512-TlgDPagHh+eBKOnH2VYvk8qbwsCG/TAJdmTL7f1PROUcSO8qt/KSmShEQ/OKvock8X9tFjtqjCScyOkkkvIKVQ==", + "requires": { + "xtend": "^4.0.1" + } + }, + "proto-list": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz", + "integrity": "sha1-IS1b/hMYMGpCD2QCuOJv85ZHqEk=" + }, + "protocols": { + "version": "1.4.7", + "resolved": "https://registry.npmjs.org/protocols/-/protocols-1.4.7.tgz", + "integrity": "sha512-Fx65lf9/YDn3hUX08XUc0J8rSux36rEsyiv21ZGUC1mOyeM3lTRpZLcrm8aAolzS4itwVfm7TAPyxC2E5zd6xg==" + }, + "proxy-addr": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.5.tgz", + "integrity": "sha512-t/7RxHXPH6cJtP0pRG6smSr9QJidhB+3kXu0KgXnbGYMgzEnUxRQ4/LDdfOwZEMyIh3/xHb8PX3t+lfL9z+YVQ==", + "requires": { + "forwarded": "~0.1.2", + "ipaddr.js": "1.9.0" + } + }, + "prr": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", + "integrity": "sha1-0/wRS6BplaRexok/SEzrHXj19HY=" + }, + "pseudomap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", + "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=" + }, + "psl": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.7.0.tgz", + "integrity": "sha512-5NsSEDv8zY70ScRnOTn7bK7eanl2MvFrOrS/R6x+dBt5g1ghnj9Zv90kO8GwT8gxcu2ANyFprnFYB85IogIJOQ==" + }, + "public-encrypt": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/public-encrypt/-/public-encrypt-4.0.3.tgz", + "integrity": "sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q==", + "requires": { + "bn.js": "^4.1.0", + "browserify-rsa": "^4.0.0", + "create-hash": "^1.1.0", + "parse-asn1": "^5.0.0", + "randombytes": "^2.0.1", + "safe-buffer": "^5.1.2" + } + }, + "pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "pumpify": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/pumpify/-/pumpify-1.5.1.tgz", + "integrity": "sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ==", + "requires": { + "duplexify": "^3.6.0", + "inherits": "^2.0.3", + "pump": "^2.0.0" + }, + "dependencies": { + "pump": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pump/-/pump-2.0.1.tgz", + "integrity": "sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==", + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + } + } + }, + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + }, + "q": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/q/-/q-1.5.1.tgz", + "integrity": "sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc=" + }, + "qs": { + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz", + "integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==" + }, + "query-string": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/query-string/-/query-string-5.1.1.tgz", + "integrity": "sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw==", + "requires": { + "decode-uri-component": "^0.2.0", + "object-assign": "^4.1.0", + "strict-uri-encode": "^1.0.0" + } + }, + "querystring": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=" + }, + "querystring-es3": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/querystring-es3/-/querystring-es3-0.2.1.tgz", + "integrity": "sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM=" + }, + "querystringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.1.1.tgz", + "integrity": "sha512-w7fLxIRCRT7U8Qu53jQnJyPkYZIaR4n5151KMfcJlO/A9397Wxb1amJvROTK6TOnp7PfoAmg/qXiNHI+08jRfA==" + }, + "randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "requires": { + "safe-buffer": "^5.1.0" + } + }, + "randomfill": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/randomfill/-/randomfill-1.0.4.tgz", + "integrity": "sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw==", + "requires": { + "randombytes": "^2.0.5", + "safe-buffer": "^5.1.0" + } + }, + "range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" + }, + "raw-body": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz", + "integrity": "sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==", + "requires": { + "bytes": "3.1.0", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "dependencies": { + "bytes": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", + "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" + } + } + }, + "raw-loader": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/raw-loader/-/raw-loader-0.5.1.tgz", + "integrity": "sha1-DD0L6u2KAclm2Xh793goElKpeao=" + }, + "rc": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "requires": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "dependencies": { + "minimist": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=" + }, + "strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=" + } + } + }, + "react": { + "version": "16.12.0", + "resolved": "https://registry.npmjs.org/react/-/react-16.12.0.tgz", + "integrity": "sha512-fglqy3k5E+81pA8s+7K0/T3DBCF0ZDOher1elBFzF7O6arXJgzyu/FW+COxFvAWXJoJN9KIZbT2LXlukwphYTA==", + "requires": { + "loose-envify": "^1.1.0", + "object-assign": "^4.1.1", + "prop-types": "^15.6.2" + } + }, + "react-dev-utils": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/react-dev-utils/-/react-dev-utils-4.2.3.tgz", + "integrity": "sha512-uvmkwl5uMexCmC0GUv1XGQP0YjfYePJufGg4YYiukhqk2vN1tQxwWJIBERqhOmSi80cppZg8mZnPP/kOMf1sUQ==", + "requires": { + "address": "1.0.3", + "babel-code-frame": "6.26.0", + "chalk": "1.1.3", + "cross-spawn": "5.1.0", + "detect-port-alt": "1.1.3", + "escape-string-regexp": "1.0.5", + "filesize": "3.5.11", + "global-modules": "1.0.0", + "gzip-size": "3.0.0", + "inquirer": "3.3.0", + "is-root": "1.0.0", + "opn": "5.1.0", + "react-error-overlay": "^3.0.0", + "recursive-readdir": "2.2.1", + "shell-quote": "1.6.1", + "sockjs-client": "1.1.4", + "strip-ansi": "3.0.1", + "text-table": "0.2.0" + }, + "dependencies": { + "address": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/address/-/address-1.0.3.tgz", + "integrity": "sha512-z55ocwKBRLryBs394Sm3ushTtBeg6VAeuku7utSoSnsJKvKcnXFIyC6vh27n3rXyxSgkJBBCAvyOn7gSUcTYjg==" + }, + "ansi-escapes": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", + "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==" + }, + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + }, + "ansi-styles": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=" + }, + "chalk": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", + "requires": { + "ansi-styles": "^2.2.1", + "escape-string-regexp": "^1.0.2", + "has-ansi": "^2.0.0", + "strip-ansi": "^3.0.0", + "supports-color": "^2.0.0" + } + }, + "chardet": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.4.2.tgz", + "integrity": "sha1-tUc7M9yXxCTl2Y3IfVXU2KKci/I=" + }, + "cli-cursor": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", + "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "requires": { + "restore-cursor": "^2.0.0" + } + }, + "cross-spawn": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", + "integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=", + "requires": { + "lru-cache": "^4.0.1", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "detect-port-alt": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/detect-port-alt/-/detect-port-alt-1.1.3.tgz", + "integrity": "sha1-pNLwYddXoDTs83xRQmCph1DysTE=", + "requires": { + "address": "^1.0.1", + "debug": "^2.6.0" + } + }, + "external-editor": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-2.2.0.tgz", + "integrity": "sha512-bSn6gvGxKt+b7+6TKEv1ZycHleA7aHhRHyAqJyp5pbUFuYYNIzpZnQDk7AsYckyWdEnTeAnay0aCy2aV6iTk9A==", + "requires": { + "chardet": "^0.4.0", + "iconv-lite": "^0.4.17", + "tmp": "^0.0.33" + } + }, + "figures": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", + "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "inquirer": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-3.3.0.tgz", + "integrity": "sha512-h+xtnyk4EwKvFWHrUYsWErEVR+igKtLdchu+o0Z1RL7VU/jVMFbYir2bp6bAj8efFNxWqHX0dIss6fJQ+/+qeQ==", + "requires": { + "ansi-escapes": "^3.0.0", + "chalk": "^2.0.0", + "cli-cursor": "^2.1.0", + "cli-width": "^2.0.0", + "external-editor": "^2.0.4", + "figures": "^2.0.0", + "lodash": "^4.3.0", + "mute-stream": "0.0.7", + "run-async": "^2.2.0", + "rx-lite": "^4.0.8", + "rx-lite-aggregates": "^4.0.8", + "string-width": "^2.1.0", + "strip-ansi": "^4.0.0", + "through": "^2.3.6" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "requires": { + "color-convert": "^1.9.0" + } + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "requires": { + "ansi-regex": "^3.0.0" + } + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "requires": { + "has-flag": "^3.0.0" + } + } + } + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + }, + "lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "requires": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "mimic-fn": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==" + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "mute-stream": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", + "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=" + }, + "onetime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", + "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "requires": { + "mimic-fn": "^1.0.0" + } + }, + "restore-cursor": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", + "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "requires": { + "onetime": "^2.0.0", + "signal-exit": "^3.0.2" + } + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "requires": { + "ansi-regex": "^3.0.0" + } + } + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "requires": { + "ansi-regex": "^2.0.0" + } + }, + "supports-color": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=" + } + } + }, + "react-dom": { + "version": "16.12.0", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-16.12.0.tgz", + "integrity": "sha512-LMxFfAGrcS3kETtQaCkTKjMiifahaMySFDn71fZUNpPHZQEzmk/GiAeIT8JSOrHB23fnuCOMruL2a8NYlw+8Gw==", + "requires": { + "loose-envify": "^1.1.0", + "object-assign": "^4.1.1", + "prop-types": "^15.6.2", + "scheduler": "^0.18.0" + } + }, + "react-error-overlay": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/react-error-overlay/-/react-error-overlay-3.0.0.tgz", + "integrity": "sha512-XzgvowFrwDo6TWcpJ/WTiarb9UI6lhA4PMzS7n1joK3sHfBBBOQHUc0U4u57D6DWO9vHv6lVSWx2Q/Ymfyv4hw==" + }, + "react-fast-compare": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-2.0.4.tgz", + "integrity": "sha512-suNP+J1VU1MWFKcyt7RtjiSWUjvidmQSlqu+eHslq+342xCbGTYmC0mEhPCOHxlW0CywylOC1u2DFAT+bv4dBw==" + }, + "react-helmet": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/react-helmet/-/react-helmet-5.2.1.tgz", + "integrity": "sha512-CnwD822LU8NDBnjCpZ4ySh8L6HYyngViTZLfBBb3NjtrpN8m49clH8hidHouq20I51Y6TpCTISCBbqiY5GamwA==", + "requires": { + "object-assign": "^4.1.1", + "prop-types": "^15.5.4", + "react-fast-compare": "^2.0.2", + "react-side-effect": "^1.1.0" + } + }, + "react-hot-loader": { + "version": "4.12.18", + "resolved": "https://registry.npmjs.org/react-hot-loader/-/react-hot-loader-4.12.18.tgz", + "integrity": "sha512-qYD0Qi9lIbg9jLyfmodfqvAQqCBsoPKxAhca8Nxvy2/2pO5Q9r2kM28jN0bbbSnhwK8dJ7FjsfVtXKOxMW+bqw==", + "requires": { + "fast-levenshtein": "^2.0.6", + "global": "^4.3.0", + "hoist-non-react-statics": "^3.3.0", + "loader-utils": "^1.1.0", + "prop-types": "^15.6.1", + "react-lifecycles-compat": "^3.0.4", + "shallowequal": "^1.1.0", + "source-map": "^0.7.3" + }, + "dependencies": { + "source-map": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", + "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==" + } + } + }, + "react-is": { + "version": "16.12.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.12.0.tgz", + "integrity": "sha512-rPCkf/mWBtKc97aLL9/txD8DZdemK0vkA3JMLShjlJB3Pj3s+lpf1KaBzMfQrAmhMQB0n1cU/SUGgKKBCe837Q==" + }, + "react-lifecycles-compat": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz", + "integrity": "sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==" + }, + "react-reconciler": { + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/react-reconciler/-/react-reconciler-0.24.0.tgz", + "integrity": "sha512-gAGnwWkf+NOTig9oOowqid9O0HjTDC+XVGBCAmJYYJ2A2cN/O4gDdIuuUQjv8A4v6GDwVfJkagpBBLW5OW9HSw==", + "optional": true, + "requires": { + "loose-envify": "^1.1.0", + "object-assign": "^4.1.1", + "prop-types": "^15.6.2", + "scheduler": "^0.18.0" + } + }, + "react-side-effect": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/react-side-effect/-/react-side-effect-1.2.0.tgz", + "integrity": "sha512-v1ht1aHg5k/thv56DRcjw+WtojuuDHFUgGfc+bFHOWsF4ZK6C2V57DO0Or0GPsg6+LSTE0M6Ry/gfzhzSwbc5w==", + "requires": { + "shallowequal": "^1.0.1" + } + }, + "read": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", + "integrity": "sha1-s9oZvQUkMal2cdRKQmNK33ELQMQ=", + "requires": { + "mute-stream": "~0.0.4" + } + }, + "read-chunk": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/read-chunk/-/read-chunk-3.2.0.tgz", + "integrity": "sha512-CEjy9LCzhmD7nUpJ1oVOE6s/hBkejlcJEgLQHVnQznOSilOPb+kpKktlLfFDK3/WP43+F80xkUTM2VOkYoSYvQ==", + "requires": { + "pify": "^4.0.1", + "with-open-file": "^0.1.6" + } + }, + "read-pkg": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", + "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", + "requires": { + "load-json-file": "^2.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^2.0.0" + }, + "dependencies": { + "path-type": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", + "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", + "requires": { + "pify": "^2.0.0" + } + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=" + } + } + }, + "read-pkg-up": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", + "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", + "requires": { + "find-up": "^2.0.0", + "read-pkg": "^2.0.0" + }, + "dependencies": { + "find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "requires": { + "locate-path": "^2.0.0" + } + }, + "locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "requires": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "requires": { + "p-try": "^1.0.0" + } + }, + "p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "requires": { + "p-limit": "^1.1.0" + } + }, + "p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=" + } + } + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "readdirp": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", + "integrity": "sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==", + "requires": { + "graceful-fs": "^4.1.11", + "micromatch": "^3.1.10", + "readable-stream": "^2.0.2" + } + }, + "rebass": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/rebass/-/rebass-4.0.7.tgz", + "integrity": "sha512-GJot6j6Qcr7jk1QIgf9qBoud75CGRpN8pGcEo98TSp4KNSWV01ZLvGwFKGI35oEBuNs+lpEd3+pnwkQUTSFytg==", + "requires": { + "reflexbox": "^4.0.6" + } + }, + "recursive-readdir": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.1.tgz", + "integrity": "sha1-kO8jHQd4xc4JPJpI105cVCLROpk=", + "requires": { + "minimatch": "3.0.3" + }, + "dependencies": { + "minimatch": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.3.tgz", + "integrity": "sha1-Kk5AkLlrLbBqnX3wEFWmKnfJt3Q=", + "requires": { + "brace-expansion": "^1.0.0" + } + } + } + }, + "redent": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-1.0.0.tgz", + "integrity": "sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94=", + "requires": { + "indent-string": "^2.1.0", + "strip-indent": "^1.0.1" + }, + "dependencies": { + "indent-string": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-2.1.0.tgz", + "integrity": "sha1-ji1INIdCEhtKghi3oTfppSBJ3IA=", + "requires": { + "repeating": "^2.0.0" + } + } + } + }, + "redux": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/redux/-/redux-4.0.5.tgz", + "integrity": "sha512-VSz1uMAH24DM6MF72vcojpYPtrTUu3ByVWfPL1nPfVRb5mZVTve5GnNCUV53QM/BZ66xfWrm0CTWoM+Xlz8V1w==", + "requires": { + "loose-envify": "^1.4.0", + "symbol-observable": "^1.2.0" + } + }, + "redux-thunk": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/redux-thunk/-/redux-thunk-2.3.0.tgz", + "integrity": "sha512-km6dclyFnmcvxhAcrQV2AkZmPQjzPDjgVlQtR0EQjxZPyJ0BnMf3in1ryuR8A2qU0HldVRfxYXbFSKlI3N7Slw==" + }, + "reflexbox": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/reflexbox/-/reflexbox-4.0.6.tgz", + "integrity": "sha512-UNUL4YoJEXAPjRKHuty1tuOk+LV1nDJ2KYViDcH7lYm5yU3AQ+EKNXxPU3E14bQNK/pE09b1hYl+ZKdA94tWLQ==", + "requires": { + "@emotion/core": "^10.0.0", + "@emotion/styled": "^10.0.0", + "@styled-system/css": "^5.0.0", + "@styled-system/should-forward-prop": "^5.0.0", + "styled-system": "^5.0.0" + } + }, + "regenerate": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.0.tgz", + "integrity": "sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg==" + }, + "regenerate-unicode-properties": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz", + "integrity": "sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA==", + "requires": { + "regenerate": "^1.4.0" + } + }, + "regenerator-runtime": { + "version": "0.13.3", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz", + "integrity": "sha512-naKIZz2GQ8JWh///G7L3X6LaQUAMp2lvb1rvwwsURe/VXwD6VMfr+/1NuNw3ag8v2kY1aQ/go5SNn79O9JU7yw==" + }, + "regenerator-transform": { + "version": "0.14.1", + "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.14.1.tgz", + "integrity": "sha512-flVuee02C3FKRISbxhXl9mGzdbWUVHubl1SMaknjxkFB1/iqpJhArQUvRxOOPEc/9tAiX0BaQ28FJH10E4isSQ==", + "requires": { + "private": "^0.1.6" + } + }, + "regex-not": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/regex-not/-/regex-not-1.0.2.tgz", + "integrity": "sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==", + "requires": { + "extend-shallow": "^3.0.2", + "safe-regex": "^1.1.0" + } + }, + "regexp.prototype.flags": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.3.0.tgz", + "integrity": "sha512-2+Q0C5g951OlYlJz6yu5/M33IcsESLlLfsyIaLJaG4FA2r4yP8MvVMJUUP/fVBkSpbbbZlS5gynbEWLipiiXiQ==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1" + } + }, + "regexpp": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", + "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==" + }, + "regexpu-core": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-4.6.0.tgz", + "integrity": "sha512-YlVaefl8P5BnFYOITTNzDvan1ulLOiXJzCNZxduTIosN17b87h3bvG9yHMoHaRuo88H4mQ06Aodj5VtYGGGiTg==", + "requires": { + "regenerate": "^1.4.0", + "regenerate-unicode-properties": "^8.1.0", + "regjsgen": "^0.5.0", + "regjsparser": "^0.6.0", + "unicode-match-property-ecmascript": "^1.0.4", + "unicode-match-property-value-ecmascript": "^1.1.0" + } + }, + "registry-auth-token": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.0.0.tgz", + "integrity": "sha512-lpQkHxd9UL6tb3k/aHAVfnVtn+Bcs9ob5InuFLLEDqSqeq+AljB8GZW9xY0x7F+xYwEcjKe07nyoxzEYz6yvkw==", + "requires": { + "rc": "^1.2.8", + "safe-buffer": "^5.0.1" + } + }, + "registry-url": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-5.1.0.tgz", + "integrity": "sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw==", + "requires": { + "rc": "^1.2.8" + } + }, + "regjsgen": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.5.1.tgz", + "integrity": "sha512-5qxzGZjDs9w4tzT3TPhCJqWdCc3RLYwy9J2NB0nm5Lz+S273lvWcpjaTGHsT1dc6Hhfq41uSEOw8wBmxrKOuyg==" + }, + "regjsparser": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.6.2.tgz", + "integrity": "sha512-E9ghzUtoLwDekPT0DYCp+c4h+bvuUpe6rRHCTYn6eGoqj1LgKXxT6I0Il4WbjhQkOghzi/V+y03bPKvbllL93Q==", + "requires": { + "jsesc": "~0.5.0" + }, + "dependencies": { + "jsesc": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", + "integrity": "sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=" + } + } + }, + "remark": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/remark/-/remark-10.0.1.tgz", + "integrity": "sha512-E6lMuoLIy2TyiokHprMjcWNJ5UxfGQjaMSMhV+f4idM625UjjK4j798+gPs5mfjzDE6vL0oFKVeZM6gZVSVrzQ==", + "requires": { + "remark-parse": "^6.0.0", + "remark-stringify": "^6.0.0", + "unified": "^7.0.0" + }, + "dependencies": { + "remark-stringify": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-6.0.4.tgz", + "integrity": "sha512-eRWGdEPMVudijE/psbIDNcnJLRVx3xhfuEsTDGgH4GsFF91dVhw5nhmnBppafJ7+NWINW6C7ZwWbi30ImJzqWg==", + "requires": { + "ccount": "^1.0.0", + "is-alphanumeric": "^1.0.0", + "is-decimal": "^1.0.0", + "is-whitespace-character": "^1.0.0", + "longest-streak": "^2.0.1", + "markdown-escapes": "^1.0.0", + "markdown-table": "^1.1.0", + "mdast-util-compact": "^1.0.0", + "parse-entities": "^1.0.2", + "repeat-string": "^1.5.4", + "state-toggle": "^1.0.0", + "stringify-entities": "^1.0.1", + "unherit": "^1.0.4", + "xtend": "^4.0.1" + } + }, + "unified": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/unified/-/unified-7.1.0.tgz", + "integrity": "sha512-lbk82UOIGuCEsZhPj8rNAkXSDXd6p0QLzIuSsCdxrqnqU56St4eyOB+AlXsVgVeRmetPTYydIuvFfpDIed8mqw==", + "requires": { + "@types/unist": "^2.0.0", + "@types/vfile": "^3.0.0", + "bail": "^1.0.0", + "extend": "^3.0.0", + "is-plain-obj": "^1.1.0", + "trough": "^1.0.0", + "vfile": "^3.0.0", + "x-is-string": "^0.1.0" + } + } + } + }, + "remark-parse": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-6.0.3.tgz", + "integrity": "sha512-QbDXWN4HfKTUC0hHa4teU463KclLAnwpn/FBn87j9cKYJWWawbiLgMfP2Q4XwhxxuuuOxHlw+pSN0OKuJwyVvg==", + "requires": { + "collapse-white-space": "^1.0.2", + "is-alphabetical": "^1.0.0", + "is-decimal": "^1.0.0", + "is-whitespace-character": "^1.0.0", + "is-word-character": "^1.0.0", + "markdown-escapes": "^1.0.0", + "parse-entities": "^1.1.0", + "repeat-string": "^1.5.4", + "state-toggle": "^1.0.0", + "trim": "0.0.1", + "trim-trailing-lines": "^1.0.0", + "unherit": "^1.0.4", + "unist-util-remove-position": "^1.0.0", + "vfile-location": "^2.0.0", + "xtend": "^4.0.1" + } + }, + "remark-retext": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/remark-retext/-/remark-retext-3.1.3.tgz", + "integrity": "sha512-UujXAm28u4lnUvtOZQFYfRIhxX+auKI9PuA2QpQVTT7gYk1OgX6o0OUrSo1KOa6GNrFX+OODOtS5PWIHPxM7qw==", + "requires": { + "mdast-util-to-nlcst": "^3.2.0" + } + }, + "remark-stringify": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-5.0.0.tgz", + "integrity": "sha512-Ws5MdA69ftqQ/yhRF9XhVV29mhxbfGhbz0Rx5bQH+oJcNhhSM6nCu1EpLod+DjrFGrU0BMPs+czVmJZU7xiS7w==", + "requires": { + "ccount": "^1.0.0", + "is-alphanumeric": "^1.0.0", + "is-decimal": "^1.0.0", + "is-whitespace-character": "^1.0.0", + "longest-streak": "^2.0.1", + "markdown-escapes": "^1.0.0", + "markdown-table": "^1.1.0", + "mdast-util-compact": "^1.0.0", + "parse-entities": "^1.0.2", + "repeat-string": "^1.5.4", + "state-toggle": "^1.0.0", + "stringify-entities": "^1.0.1", + "unherit": "^1.0.4", + "xtend": "^4.0.1" + } + }, + "remove-trailing-separator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", + "integrity": "sha1-wkvOKig62tW8P1jg1IJJuSN52O8=" + }, + "renderkid": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/renderkid/-/renderkid-2.0.3.tgz", + "integrity": "sha512-z8CLQp7EZBPCwCnncgf9C4XAi3WR0dv+uWu/PjIyhhAb5d6IJ/QZqlHFprHeKT+59//V6BNUsLbvN8+2LarxGA==", + "requires": { + "css-select": "^1.1.0", + "dom-converter": "^0.2", + "htmlparser2": "^3.3.0", + "strip-ansi": "^3.0.0", + "utila": "^0.4.0" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "requires": { + "ansi-regex": "^2.0.0" + } + } + } + }, + "repeat-element": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.3.tgz", + "integrity": "sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g==" + }, + "repeat-string": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", + "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=" + }, + "repeating": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/repeating/-/repeating-2.0.1.tgz", + "integrity": "sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo=", + "requires": { + "is-finite": "^1.0.0" + } + }, + "replace-ext": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/replace-ext/-/replace-ext-1.0.0.tgz", + "integrity": "sha1-3mMSg3P8v3w8z6TeWkgMRaZ5WOs=" + }, + "request": { + "version": "2.88.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", + "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", + "requires": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.0", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.4.3", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" + }, + "dependencies": { + "qs": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" + } + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=" + }, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==" + }, + "requires-port": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", + "integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=" + }, + "resolve": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.14.1.tgz", + "integrity": "sha512-fn5Wobh4cxbLzuHaE+nphztHy43/b++4M6SsGFC2gB8uYwf0C8LcarfCz1un7UTW8OFQg9iNjZ4xpcFVGebDPg==", + "requires": { + "path-parse": "^1.0.6" + } + }, + "resolve-cwd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-2.0.0.tgz", + "integrity": "sha1-AKn3OHVW4nA46uIyyqNypqWbZlo=", + "requires": { + "resolve-from": "^3.0.0" + }, + "dependencies": { + "resolve-from": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", + "integrity": "sha1-six699nWiBvItuZTM17rywoYh0g=" + } + } + }, + "resolve-dir": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/resolve-dir/-/resolve-dir-1.0.1.tgz", + "integrity": "sha1-eaQGRMNivoLybv/nOcm7U4IEb0M=", + "requires": { + "expand-tilde": "^2.0.0", + "global-modules": "^1.0.0" + } + }, + "resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==" + }, + "resolve-url": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz", + "integrity": "sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=" + }, + "responselike": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz", + "integrity": "sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec=", + "requires": { + "lowercase-keys": "^1.0.0" + } + }, + "restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "requires": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + } + }, + "ret": { + "version": "0.1.15", + "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", + "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==" + }, + "retext-english": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/retext-english/-/retext-english-3.0.4.tgz", + "integrity": "sha512-yr1PgaBDde+25aJXrnt3p1jvT8FVLVat2Bx8XeAWX13KXo8OT+3nWGU3HWxM4YFJvmfqvJYJZG2d7xxaO774gw==", + "requires": { + "parse-english": "^4.0.0", + "unherit": "^1.0.4" + } + }, + "retry": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", + "integrity": "sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs=" + }, + "reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==" + }, + "rgb-regex": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/rgb-regex/-/rgb-regex-1.0.1.tgz", + "integrity": "sha1-wODWiC3w4jviVKR16O3UGRX+rrE=" + }, + "rgba-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/rgba-regex/-/rgba-regex-1.0.0.tgz", + "integrity": "sha1-QzdOLiyglosO8VI0YLfXMP8i7rM=" + }, + "rimraf": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", + "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "requires": { + "glob": "^7.1.3" + } + }, + "ripemd160": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz", + "integrity": "sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==", + "requires": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1" + } + }, + "run-async": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz", + "integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=", + "requires": { + "is-promise": "^2.1.0" + } + }, + "run-parallel": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.1.9.tgz", + "integrity": "sha512-DEqnSRTDw/Tc3FXf49zedI638Z9onwUotBMiUFKmrO2sdFKIbXamXGQ3Axd4qgphxKB4kw/qP1w5kTxnfU1B9Q==" + }, + "run-queue": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/run-queue/-/run-queue-1.0.3.tgz", + "integrity": "sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec=", + "requires": { + "aproba": "^1.1.1" + } + }, + "rx-lite": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/rx-lite/-/rx-lite-4.0.8.tgz", + "integrity": "sha1-Cx4Rr4vESDbwSmQH6S2kJGe3lEQ=" + }, + "rx-lite-aggregates": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/rx-lite-aggregates/-/rx-lite-aggregates-4.0.8.tgz", + "integrity": "sha1-dTuHqJoRyVRnxKwWJsTvxOBcZ74=", + "requires": { + "rx-lite": "*" + } + }, + "rxjs": { + "version": "6.5.4", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.4.tgz", + "integrity": "sha512-naMQXcgEo3csAEGvw/NydRA0fuS2nDZJiw1YUWFKU7aPPAPGZEsD4Iimit96qwCieH6y614MCLYwdkrWx7z/7Q==", + "requires": { + "tslib": "^1.9.0" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "safe-regex": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", + "integrity": "sha1-QKNmnzsHfR6UPURinhV91IAjvy4=", + "requires": { + "ret": "~0.1.10" + } + }, + "safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "sanitize-html": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/sanitize-html/-/sanitize-html-1.20.1.tgz", + "integrity": "sha512-txnH8TQjaQvg2Q0HY06G6CDJLVYCpbnxrdO0WN8gjCKaU5J0KbyGYhZxx5QJg3WLZ1lB7XU9kDkfrCXUozqptA==", + "requires": { + "chalk": "^2.4.1", + "htmlparser2": "^3.10.0", + "lodash.clonedeep": "^4.5.0", + "lodash.escaperegexp": "^4.1.2", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.mergewith": "^4.6.1", + "postcss": "^7.0.5", + "srcset": "^1.0.0", + "xtend": "^4.0.1" + } + }, + "sax": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" + }, + "scheduler": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.18.0.tgz", + "integrity": "sha512-agTSHR1Nbfi6ulI0kYNK0203joW2Y5W4po4l+v03tOoiJKpTBbxpNhWDvqc/4IcOw+KLmSiQLTasZ4cab2/UWQ==", + "requires": { + "loose-envify": "^1.1.0", + "object-assign": "^4.1.1" + } + }, + "schema-utils": { + "version": "0.4.7", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-0.4.7.tgz", + "integrity": "sha512-v/iwU6wvwGK8HbU9yi3/nhGzP0yGSuhQMzL6ySiec1FSrZZDkhm4noOSWzrNFo/jEc+SJY6jRTwuwbSXJPDUnQ==", + "requires": { + "ajv": "^6.1.0", + "ajv-keywords": "^3.1.0" + } + }, + "scroll-behavior": { + "version": "0.9.11", + "resolved": "https://registry.npmjs.org/scroll-behavior/-/scroll-behavior-0.9.11.tgz", + "integrity": "sha512-Eo32cg2uFiQwEiJXHHoTfXLybTlyA1O3ZOIiTz8EqRWie+ExL+7l8PcejhKT+5QmRtykXSlsTRVDC3BVB3S/bg==", + "requires": { + "dom-helpers": "^3.4.0", + "invariant": "^2.2.4" + } + }, + "section-matter": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz", + "integrity": "sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==", + "requires": { + "extend-shallow": "^2.0.1", + "kind-of": "^6.0.0" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "seek-bzip": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/seek-bzip/-/seek-bzip-1.0.5.tgz", + "integrity": "sha1-z+kXyz0nS8/6x5J1ivUxc+sfq9w=", + "requires": { + "commander": "~2.8.1" + }, + "dependencies": { + "commander": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.8.1.tgz", + "integrity": "sha1-Br42f+v9oMMwqh4qBy09yXYkJdQ=", + "requires": { + "graceful-readlink": ">= 1.0.0" + } + } + } + }, + "select": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/select/-/select-1.1.2.tgz", + "integrity": "sha1-DnNQrN7ICxEIUoeG7B1EGNEbOW0=", + "optional": true + }, + "select-hose": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz", + "integrity": "sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo=" + }, + "selfsigned": { + "version": "1.10.7", + "resolved": "https://registry.npmjs.org/selfsigned/-/selfsigned-1.10.7.tgz", + "integrity": "sha512-8M3wBCzeWIJnQfl43IKwOmC4H/RAp50S8DF60znzjW5GVqTcSe2vWclt7hmYVPkKPlHWOu5EaWOMZ2Y6W8ZXTA==", + "requires": { + "node-forge": "0.9.0" + } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + }, + "semver-diff": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-2.1.0.tgz", + "integrity": "sha1-S7uEN8jTfksM8aaP1ybsbWRdbTY=", + "requires": { + "semver": "^5.0.3" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "semver-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/semver-regex/-/semver-regex-2.0.0.tgz", + "integrity": "sha512-mUdIBBvdn0PLOeP3TEkMH7HHeUP3GjsXCwKarjv/kGmUFOYg1VqEemKhoQpWMu6X2I8kHeuVdGibLGkVK+/5Qw==" + }, + "semver-truncate": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/semver-truncate/-/semver-truncate-1.1.2.tgz", + "integrity": "sha1-V/Qd5pcHpicJp+AQS6IRcQnqR+g=", + "requires": { + "semver": "^5.3.0" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "send": { + "version": "0.17.1", + "resolved": "https://registry.npmjs.org/send/-/send-0.17.1.tgz", + "integrity": "sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==", + "requires": { + "debug": "2.6.9", + "depd": "~1.1.2", + "destroy": "~1.0.4", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "~1.7.2", + "mime": "1.6.0", + "ms": "2.1.1", + "on-finished": "~2.3.0", + "range-parser": "~1.2.1", + "statuses": "~1.5.0" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + }, + "dependencies": { + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" + }, + "ms": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", + "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==" + } + } + }, + "serialize-javascript": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-2.1.2.tgz", + "integrity": "sha512-rs9OggEUF0V4jUSecXazOYsLfu7OGK2qIn3c7IPBiffz32XniEp/TX9Xmc9LQfK2nQ2QKHvZ2oygKUGU0lG4jQ==" + }, + "serve-index": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz", + "integrity": "sha1-03aNabHn2C5c4FD/9bRTvqEqkjk=", + "requires": { + "accepts": "~1.3.4", + "batch": "0.6.1", + "debug": "2.6.9", + "escape-html": "~1.0.3", + "http-errors": "~1.6.2", + "mime-types": "~2.1.17", + "parseurl": "~1.3.2" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "http-errors": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", + "integrity": "sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0=", + "requires": { + "depd": "~1.1.2", + "inherits": "2.0.3", + "setprototypeof": "1.1.0", + "statuses": ">= 1.4.0 < 2" + } + }, + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "setprototypeof": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", + "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==" + } + } + }, + "serve-static": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.14.1.tgz", + "integrity": "sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==", + "requires": { + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.17.1" + } + }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" + }, + "set-value": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", + "integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==", + "requires": { + "extend-shallow": "^2.0.1", + "is-extendable": "^0.1.1", + "is-plain-object": "^2.0.3", + "split-string": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU=" + }, + "setprototypeof": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", + "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" + }, + "sha.js": { + "version": "2.4.11", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", + "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", + "requires": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "shallow-compare": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/shallow-compare/-/shallow-compare-1.2.2.tgz", + "integrity": "sha512-LUMFi+RppPlrHzbqmFnINTrazo0lPNwhcgzuAXVVcfy/mqPDrQmHAyz5bvV0gDAuRFrk804V0HpQ6u9sZ0tBeg==" + }, + "shallowequal": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz", + "integrity": "sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==" + }, + "sharp": { + "version": "0.23.4", + "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.23.4.tgz", + "integrity": "sha512-fJMagt6cT0UDy9XCsgyLi0eiwWWhQRxbwGmqQT6sY8Av4s0SVsT/deg8fobBQCTDU5iXRgz0rAeXoE2LBZ8g+Q==", + "requires": { + "color": "^3.1.2", + "detect-libc": "^1.0.3", + "nan": "^2.14.0", + "npmlog": "^4.1.2", + "prebuild-install": "^5.3.3", + "semver": "^6.3.0", + "simple-get": "^3.1.0", + "tar": "^5.0.5", + "tunnel-agent": "^0.6.0" + } + }, + "shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "requires": { + "shebang-regex": "^1.0.0" + } + }, + "shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=" + }, + "shell-quote": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.6.1.tgz", + "integrity": "sha1-9HgZSczkAmlxJ0MOo7PFR29IF2c=", + "requires": { + "array-filter": "~0.0.0", + "array-map": "~0.0.0", + "array-reduce": "~0.0.0", + "jsonify": "~0.0.0" + } + }, + "sift": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/sift/-/sift-5.1.0.tgz", + "integrity": "sha1-G78t+w63HlbEzH+1Z/vRNRtlAV4=" + }, + "signal-exit": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", + "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=" + }, + "simple-concat": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.0.tgz", + "integrity": "sha1-c0TLuLbib7J9ZrL8hvn21Zl1IcY=" + }, + "simple-get": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-3.1.0.tgz", + "integrity": "sha512-bCR6cP+aTdScaQCnQKbPKtJOKDp/hj9EDLJo3Nw4y1QksqaovlW/bnptB6/c1e+qmNIDHRK+oXFDdEqBT8WzUA==", + "requires": { + "decompress-response": "^4.2.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + }, + "dependencies": { + "decompress-response": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-4.2.1.tgz", + "integrity": "sha512-jOSne2qbyE+/r8G1VU+G/82LBs2Fs4LAsTiLSHOCOMZQl2OKZ6i8i4IyHemTe+/yIXOtTcRQMzPcgyhoFlqPkw==", + "requires": { + "mimic-response": "^2.0.0" + } + }, + "mimic-response": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-2.0.0.tgz", + "integrity": "sha512-8ilDoEapqA4uQ3TwS0jakGONKXVJqpy+RpM+3b7pLdOjghCrEiGp9SRkFbUHAmZW9vdnrENWHjaweIoTIJExSQ==" + } + } + }, + "simple-swizzle": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", + "integrity": "sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=", + "requires": { + "is-arrayish": "^0.3.1" + }, + "dependencies": { + "is-arrayish": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" + } + } + }, + "sisteransi": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.4.tgz", + "integrity": "sha512-/ekMoM4NJ59ivGSfKapeG+FWtrmWvA1p6FBZwXrqojw90vJu8lBmrTxCMuBCydKtkaUe2zt4PlxeTKpjwMbyig==" + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==" + }, + "slice-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz", + "integrity": "sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==", + "requires": { + "ansi-styles": "^3.2.0", + "astral-regex": "^1.0.0", + "is-fullwidth-code-point": "^2.0.0" + }, + "dependencies": { + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + } + } + }, + "slugify": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.3.6.tgz", + "integrity": "sha512-wA9XS475ZmGNlEnYYLPReSfuz/c3VQsEMoU43mi6OnKMCdbnFXd4/Yg7J0lBv8jkPolacMpOrWEaoYxuE1+hoQ==" + }, + "snapdragon": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", + "integrity": "sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==", + "requires": { + "base": "^0.11.1", + "debug": "^2.2.0", + "define-property": "^0.2.5", + "extend-shallow": "^2.0.1", + "map-cache": "^0.2.2", + "source-map": "^0.5.6", + "source-map-resolve": "^0.5.0", + "use": "^3.1.0" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "requires": { + "is-descriptor": "^0.1.0" + } + }, + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "requires": { + "is-extendable": "^0.1.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "snapdragon-node": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/snapdragon-node/-/snapdragon-node-2.1.1.tgz", + "integrity": "sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==", + "requires": { + "define-property": "^1.0.0", + "isobject": "^3.0.0", + "snapdragon-util": "^3.0.1" + }, + "dependencies": { + "define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", + "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", + "requires": { + "is-descriptor": "^1.0.0" + } + }, + "is-accessor-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", + "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-data-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", + "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-descriptor": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", + "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", + "requires": { + "is-accessor-descriptor": "^1.0.0", + "is-data-descriptor": "^1.0.0", + "kind-of": "^6.0.2" + } + } + } + }, + "snapdragon-util": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/snapdragon-util/-/snapdragon-util-3.0.1.tgz", + "integrity": "sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==", + "requires": { + "kind-of": "^3.2.0" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + }, + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "socket.io": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-2.3.0.tgz", + "integrity": "sha512-2A892lrj0GcgR/9Qk81EaY2gYhCBxurV0PfmmESO6p27QPrUK1J3zdns+5QPqvUYK2q657nSj0guoIil9+7eFg==", + "requires": { + "debug": "~4.1.0", + "engine.io": "~3.4.0", + "has-binary2": "~1.0.2", + "socket.io-adapter": "~1.1.0", + "socket.io-client": "2.3.0", + "socket.io-parser": "~3.4.0" + } + }, + "socket.io-adapter": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-1.1.2.tgz", + "integrity": "sha512-WzZRUj1kUjrTIrUKpZLEzFZ1OLj5FwLlAFQs9kuZJzJi5DKdU7FsWc36SNmA8iDOtwBQyT8FkrriRM8vXLYz8g==" + }, + "socket.io-client": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-2.3.0.tgz", + "integrity": "sha512-cEQQf24gET3rfhxZ2jJ5xzAOo/xhZwK+mOqtGRg5IowZsMgwvHwnf/mCRapAAkadhM26y+iydgwsXGObBB5ZdA==", + "requires": { + "backo2": "1.0.2", + "base64-arraybuffer": "0.1.5", + "component-bind": "1.0.0", + "component-emitter": "1.2.1", + "debug": "~4.1.0", + "engine.io-client": "~3.4.0", + "has-binary2": "~1.0.2", + "has-cors": "1.1.0", + "indexof": "0.0.1", + "object-component": "0.0.3", + "parseqs": "0.0.5", + "parseuri": "0.0.5", + "socket.io-parser": "~3.3.0", + "to-array": "0.1.4" + }, + "dependencies": { + "component-emitter": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.2.1.tgz", + "integrity": "sha1-E3kY1teCg/ffemt8WmPhQOaUJeY=" + }, + "isarray": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.1.tgz", + "integrity": "sha1-o32U7ZzaLVmGXJ92/llu4fM4dB4=" + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "socket.io-parser": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-3.3.0.tgz", + "integrity": "sha512-hczmV6bDgdaEbVqhAeVMM/jfUfzuEZHsQg6eOmLgJht6G3mPKMxYm75w2+qhAQZ+4X+1+ATZ+QFKeOZD5riHng==", + "requires": { + "component-emitter": "1.2.1", + "debug": "~3.1.0", + "isarray": "2.0.1" + }, + "dependencies": { + "debug": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "requires": { + "ms": "2.0.0" + } + } + } + } + } + }, + "socket.io-parser": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-3.4.0.tgz", + "integrity": "sha512-/G/VOI+3DBp0+DJKW4KesGnQkQPFmUCbA/oO2QGT6CWxU7hLGWqU3tyuzeSK/dqcyeHsQg1vTe9jiZI8GU9SCQ==", + "requires": { + "component-emitter": "1.2.1", + "debug": "~4.1.0", + "isarray": "2.0.1" + }, + "dependencies": { + "component-emitter": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.2.1.tgz", + "integrity": "sha1-E3kY1teCg/ffemt8WmPhQOaUJeY=" + }, + "isarray": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.1.tgz", + "integrity": "sha1-o32U7ZzaLVmGXJ92/llu4fM4dB4=" + } + } + }, + "sockjs": { + "version": "0.3.19", + "resolved": "https://registry.npmjs.org/sockjs/-/sockjs-0.3.19.tgz", + "integrity": "sha512-V48klKZl8T6MzatbLlzzRNhMepEys9Y4oGFpypBFFn1gLI/QQ9HtLLyWJNbPlwGLelOVOEijUbTTJeLLI59jLw==", + "requires": { + "faye-websocket": "^0.10.0", + "uuid": "^3.0.1" + }, + "dependencies": { + "faye-websocket": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.10.0.tgz", + "integrity": "sha1-TkkvjQTftviQA1B/btvy1QHnxvQ=", + "requires": { + "websocket-driver": ">=0.5.1" + } + } + } + }, + "sockjs-client": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/sockjs-client/-/sockjs-client-1.1.4.tgz", + "integrity": "sha1-W6vjhrd15M8U51IJEUUmVAFsixI=", + "requires": { + "debug": "^2.6.6", + "eventsource": "0.1.6", + "faye-websocket": "~0.11.0", + "inherits": "^2.0.1", + "json3": "^3.3.2", + "url-parse": "^1.1.8" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "sort-keys": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-2.0.0.tgz", + "integrity": "sha1-ZYU1WEhh7JfXMNbPQYIuH1ZoQSg=", + "requires": { + "is-plain-obj": "^1.0.0" + } + }, + "sort-keys-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/sort-keys-length/-/sort-keys-length-1.0.1.tgz", + "integrity": "sha1-nLb09OnkgVWmqgZx7dM2/xR5oYg=", + "requires": { + "sort-keys": "^1.0.0" + }, + "dependencies": { + "sort-keys": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-1.1.2.tgz", + "integrity": "sha1-RBttTTRnmPG05J6JIK37oOVD+a0=", + "requires": { + "is-plain-obj": "^1.0.0" + } + } + } + }, + "source-list-map": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/source-list-map/-/source-list-map-2.0.1.tgz", + "integrity": "sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw==" + }, + "source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=" + }, + "source-map-resolve": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.3.tgz", + "integrity": "sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==", + "requires": { + "atob": "^2.1.2", + "decode-uri-component": "^0.2.0", + "resolve-url": "^0.2.1", + "source-map-url": "^0.4.0", + "urix": "^0.1.0" + } + }, + "source-map-support": { + "version": "0.5.16", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.16.tgz", + "integrity": "sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ==", + "requires": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } + } + }, + "source-map-url": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.0.tgz", + "integrity": "sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM=" + }, + "space-separated-tokens": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-1.1.4.tgz", + "integrity": "sha512-UyhMSmeIqZrQn2UdjYpxEkwY9JUrn8pP+7L4f91zRzOQuI8MF1FGLfYU9DKCYeLdo7LXMxwrX5zKFy7eeeVHuA==" + }, + "spdx-correct": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", + "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", + "requires": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-exceptions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", + "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==" + }, + "spdx-expression-parse": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", + "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", + "requires": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-license-ids": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz", + "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==" + }, + "spdy": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/spdy/-/spdy-4.0.1.tgz", + "integrity": "sha512-HeZS3PBdMA+sZSu0qwpCxl3DeALD5ASx8pAX0jZdKXSpPWbQ6SYGnlg3BBmYLx5LtiZrmkAZfErCm2oECBcioA==", + "requires": { + "debug": "^4.1.0", + "handle-thing": "^2.0.0", + "http-deceiver": "^1.2.7", + "select-hose": "^2.0.0", + "spdy-transport": "^3.0.0" + } + }, + "spdy-transport": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/spdy-transport/-/spdy-transport-3.0.0.tgz", + "integrity": "sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==", + "requires": { + "debug": "^4.1.0", + "detect-node": "^2.0.4", + "hpack.js": "^2.1.6", + "obuf": "^1.1.2", + "readable-stream": "^3.0.6", + "wbuf": "^1.7.3" + }, + "dependencies": { + "readable-stream": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", + "integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, + "split-string": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz", + "integrity": "sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==", + "requires": { + "extend-shallow": "^3.0.0" + } + }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" + }, + "squeak": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/squeak/-/squeak-1.3.0.tgz", + "integrity": "sha1-MwRQN7ZDiLVnZ0uEMiplIQc5FsM=", + "requires": { + "chalk": "^1.0.0", + "console-stream": "^0.1.1", + "lpad-align": "^1.0.1" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + }, + "ansi-styles": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=" + }, + "chalk": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", + "requires": { + "ansi-styles": "^2.2.1", + "escape-string-regexp": "^1.0.2", + "has-ansi": "^2.0.0", + "strip-ansi": "^3.0.0", + "supports-color": "^2.0.0" + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "requires": { + "ansi-regex": "^2.0.0" + } + }, + "supports-color": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=" + } + } + }, + "srcset": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/srcset/-/srcset-1.0.0.tgz", + "integrity": "sha1-pWad4StC87HV6D7QPHEEb8SPQe8=", + "requires": { + "array-uniq": "^1.0.2", + "number-is-nan": "^1.0.0" + } + }, + "sshpk": { + "version": "1.16.1", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", + "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", + "requires": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + } + }, + "ssri": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-6.0.1.tgz", + "integrity": "sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA==", + "requires": { + "figgy-pudding": "^3.5.1" + } + }, + "stable": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/stable/-/stable-0.1.8.tgz", + "integrity": "sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==" + }, + "stack-trace": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", + "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=" + }, + "stack-utils": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-1.0.2.tgz", + "integrity": "sha512-MTX+MeG5U994cazkjd/9KNAapsHnibjMLnfXodlkXw76JEea0UiNzrqidzo1emMwk7w5Qhc9jd4Bn9TBb1MFwA==" + }, + "stackframe": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/stackframe/-/stackframe-1.1.1.tgz", + "integrity": "sha512-0PlYhdKh6AfFxRyK/v+6/k+/mMfyiEBbTM5L94D0ZytQnJ166wuwoTYLHFWGbs2dpA8Rgq763KGWmN1EQEYHRQ==" + }, + "state-toggle": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/state-toggle/-/state-toggle-1.0.2.tgz", + "integrity": "sha512-8LpelPGR0qQM4PnfLiplOQNJcIN1/r2Gy0xKB2zKnIW2YzPMt2sR4I/+gtPjhN7Svh9kw+zqEg2SFwpBO9iNiw==" + }, + "static-extend": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz", + "integrity": "sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY=", + "requires": { + "define-property": "^0.2.5", + "object-copy": "^0.1.0" + }, + "dependencies": { + "define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "requires": { + "is-descriptor": "^0.1.0" + } + } + } + }, + "statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=" + }, + "stream-browserify": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-2.0.2.tgz", + "integrity": "sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg==", + "requires": { + "inherits": "~2.0.1", + "readable-stream": "^2.0.2" + } + }, + "stream-each": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/stream-each/-/stream-each-1.2.3.tgz", + "integrity": "sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw==", + "requires": { + "end-of-stream": "^1.1.0", + "stream-shift": "^1.0.0" + } + }, + "stream-http": { + "version": "2.8.3", + "resolved": "https://registry.npmjs.org/stream-http/-/stream-http-2.8.3.tgz", + "integrity": "sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw==", + "requires": { + "builtin-status-codes": "^3.0.0", + "inherits": "^2.0.1", + "readable-stream": "^2.3.6", + "to-arraybuffer": "^1.0.0", + "xtend": "^4.0.0" + } + }, + "stream-parser": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/stream-parser/-/stream-parser-0.3.1.tgz", + "integrity": "sha1-FhhUhpRCACGhGC/wrxkRwSl2F3M=", + "requires": { + "debug": "2" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "stream-shift": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz", + "integrity": "sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==" + }, + "strict-uri-encode": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz", + "integrity": "sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM=" + }, + "string-length": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-3.1.0.tgz", + "integrity": "sha512-Ttp5YvkGm5v9Ijagtaz1BnN+k9ObpvS0eIBblPMp2YWL8FBmi9qblQ9fexc2k/CXFgrTIteU3jAw3payCnwSTA==", + "optional": true, + "requires": { + "astral-regex": "^1.0.0", + "strip-ansi": "^5.2.0" + } + }, + "string-similarity": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/string-similarity/-/string-similarity-1.2.2.tgz", + "integrity": "sha512-IoHUjcw3Srl8nsPlW04U3qwWPk3oG2ffLM0tN853d/E/JlIvcmZmDY2Kz5HzKp4lEi2T7QD7Zuvjq/1rDw+XcQ==", + "requires": { + "lodash.every": "^4.6.0", + "lodash.flattendeep": "^4.4.0", + "lodash.foreach": "^4.5.0", + "lodash.map": "^4.6.0", + "lodash.maxby": "^4.6.0" + } + }, + "string-width": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "dependencies": { + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "requires": { + "ansi-regex": "^5.0.0" + } + } + } + }, + "string.prototype.trimleft": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.1.tgz", + "integrity": "sha512-iu2AGd3PuP5Rp7x2kEZCrB2Nf41ehzh+goo8TV7z8/XDBbsvc6HQIlUl9RjkZ4oyrW1XM5UwlGl1oVEaDjg6Ag==", + "requires": { + "define-properties": "^1.1.3", + "function-bind": "^1.1.1" + } + }, + "string.prototype.trimright": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.1.tgz", + "integrity": "sha512-qFvWL3/+QIgZXVmJBfpHmxLB7xsUXz6HsUmP8+5dRaC3Q7oKUv9Vo6aMCRZC1smrtyECFsIT30PqBJ1gTjAs+g==", + "requires": { + "define-properties": "^1.1.3", + "function-bind": "^1.1.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + }, + "stringify-entities": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-1.3.2.tgz", + "integrity": "sha512-nrBAQClJAPN2p+uGCVJRPIPakKeKWZ9GtBCmormE7pWOSlHat7+x5A8gx85M7HM5Dt0BP3pP5RhVW77WdbJJ3A==", + "requires": { + "character-entities-html4": "^1.0.0", + "character-entities-legacy": "^1.0.0", + "is-alphanumerical": "^1.0.0", + "is-hexadecimal": "^1.0.0" + } + }, + "stringify-object": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/stringify-object/-/stringify-object-3.3.0.tgz", + "integrity": "sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==", + "requires": { + "get-own-enumerable-property-symbols": "^3.0.0", + "is-obj": "^1.0.1", + "is-regexp": "^1.0.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "requires": { + "ansi-regex": "^4.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==" + } + } + }, + "strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=" + }, + "strip-bom-string": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz", + "integrity": "sha1-5SEekiQ2n7uB1jOi8ABE3IztrZI=" + }, + "strip-comments": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/strip-comments/-/strip-comments-1.0.2.tgz", + "integrity": "sha512-kL97alc47hoyIQSV165tTt9rG5dn4w1dNnBhOQ3bOU1Nc1hel09jnXANaHJ7vzHLd4Ju8kseDGzlev96pghLFw==", + "requires": { + "babel-extract-comments": "^1.0.0", + "babel-plugin-transform-object-rest-spread": "^6.26.0" + } + }, + "strip-dirs": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/strip-dirs/-/strip-dirs-2.1.0.tgz", + "integrity": "sha512-JOCxOeKLm2CAS73y/U4ZeZPTkE+gNVCzKt7Eox84Iej1LT/2pTWYpZKJuxwQpvX1LiZb1xokNR7RLfuBAa7T3g==", + "requires": { + "is-natural-number": "^4.0.1" + } + }, + "strip-eof": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", + "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=" + }, + "strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==" + }, + "strip-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-1.0.1.tgz", + "integrity": "sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI=", + "requires": { + "get-stdin": "^4.0.1" + } + }, + "strip-json-comments": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.0.1.tgz", + "integrity": "sha512-VTyMAUfdm047mwKl+u79WIdrZxtFtn+nBxHeb844XBQ9uMNTuTHdx2hc5RiAJYqwTj3wc/xe5HLSdJSkJ+WfZw==" + }, + "strip-outer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/strip-outer/-/strip-outer-1.0.1.tgz", + "integrity": "sha512-k55yxKHwaXnpYGsOzg4Vl8+tDrWylxDEpknGjhTiZB8dFRU5rTo9CAzeycivxV3s+zlTKwrs6WxMxR95n26kwg==", + "requires": { + "escape-string-regexp": "^1.0.2" + } + }, + "style-loader": { + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/style-loader/-/style-loader-0.23.1.tgz", + "integrity": "sha512-XK+uv9kWwhZMZ1y7mysB+zoihsEj4wneFWAS5qoiLwzW0WzSqMrrsIy+a3zkQJq0ipFtBpX5W3MqyRIBF/WFGg==", + "requires": { + "loader-utils": "^1.1.0", + "schema-utils": "^1.0.0" + }, + "dependencies": { + "schema-utils": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", + "integrity": "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==", + "requires": { + "ajv": "^6.1.0", + "ajv-errors": "^1.0.0", + "ajv-keywords": "^3.1.0" + } + } + } + }, + "style-to-object": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.2.3.tgz", + "integrity": "sha512-1d/k4EY2N7jVLOqf2j04dTc37TPOv/hHxZmvpg8Pdh8UYydxeu/C1W1U4vD8alzf5V2Gt7rLsmkr4dxAlDm9ng==", + "requires": { + "inline-style-parser": "0.1.1" + } + }, + "styled-components": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/styled-components/-/styled-components-4.4.1.tgz", + "integrity": "sha512-RNqj14kYzw++6Sr38n7197xG33ipEOktGElty4I70IKzQF1jzaD1U4xQ+Ny/i03UUhHlC5NWEO+d8olRCDji6g==", + "requires": { + "@babel/helper-module-imports": "^7.0.0", + "@babel/traverse": "^7.0.0", + "@emotion/is-prop-valid": "^0.8.1", + "@emotion/unitless": "^0.7.0", + "babel-plugin-styled-components": ">= 1", + "css-to-react-native": "^2.2.2", + "memoize-one": "^5.0.0", + "merge-anything": "^2.2.4", + "prop-types": "^15.5.4", + "react-is": "^16.6.0", + "stylis": "^3.5.0", + "stylis-rule-sheet": "^0.0.10", + "supports-color": "^5.5.0" + } + }, + "styled-system": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/styled-system/-/styled-system-5.1.4.tgz", + "integrity": "sha512-b1EdfZ41NDcR6vnvZauylhpFvSjsFl1yyQEUA+v3rLjcKdM//EIFY195Nh3YLwgj+hWIWsG0Tk1Kl0tq1xLw8Q==", + "requires": { + "@styled-system/background": "^5.1.2", + "@styled-system/border": "^5.1.2", + "@styled-system/color": "^5.1.2", + "@styled-system/core": "^5.1.2", + "@styled-system/flexbox": "^5.1.2", + "@styled-system/grid": "^5.1.2", + "@styled-system/layout": "^5.1.2", + "@styled-system/position": "^5.1.2", + "@styled-system/shadow": "^5.1.2", + "@styled-system/space": "^5.1.2", + "@styled-system/typography": "^5.1.2", + "@styled-system/variant": "^5.1.4", + "object-assign": "^4.1.1" + } + }, + "stylehacks": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/stylehacks/-/stylehacks-4.0.3.tgz", + "integrity": "sha512-7GlLk9JwlElY4Y6a/rmbH2MhVlTyVmiJd1PfTCqFaIBEGMYNsrO/v3SeGTdhBThLg4Z+NbOk/qFMwCa+J+3p/g==", + "requires": { + "browserslist": "^4.0.0", + "postcss": "^7.0.0", + "postcss-selector-parser": "^3.0.0" + }, + "dependencies": { + "browserslist": { + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.8.3.tgz", + "integrity": "sha512-iU43cMMknxG1ClEZ2MDKeonKE1CCrFVkQK2AqO2YWFmvIrx4JWrvQ4w4hQez6EpVI8rHTtqh/ruHHDHSOKxvUg==", + "requires": { + "caniuse-lite": "^1.0.30001017", + "electron-to-chromium": "^1.3.322", + "node-releases": "^1.1.44" + } + }, + "postcss-selector-parser": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-3.1.1.tgz", + "integrity": "sha1-T4dfSvsMllc9XPTXQBGu4lCn6GU=", + "requires": { + "dot-prop": "^4.1.1", + "indexes-of": "^1.0.1", + "uniq": "^1.0.1" + } + } + } + }, + "stylis": { + "version": "3.5.4", + "resolved": "https://registry.npmjs.org/stylis/-/stylis-3.5.4.tgz", + "integrity": "sha512-8/3pSmthWM7lsPBKv7NXkzn2Uc9W7NotcwGNpJaa3k7WMM1XDCA4MgT5k/8BIexd5ydZdboXtU90XH9Ec4Bv/Q==" + }, + "stylis-rule-sheet": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/stylis-rule-sheet/-/stylis-rule-sheet-0.0.10.tgz", + "integrity": "sha512-nTbZoaqoBnmK+ptANthb10ZRZOGC+EmTLLUxeYIuHNkEKcmKgXX1XWKkUBT2Ac4es3NybooPe0SmvKdhKJZAuw==" + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "requires": { + "has-flag": "^3.0.0" + } + }, + "svgo": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/svgo/-/svgo-1.3.2.tgz", + "integrity": "sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw==", + "requires": { + "chalk": "^2.4.1", + "coa": "^2.0.2", + "css-select": "^2.0.0", + "css-select-base-adapter": "^0.1.1", + "css-tree": "1.0.0-alpha.37", + "csso": "^4.0.2", + "js-yaml": "^3.13.1", + "mkdirp": "~0.5.1", + "object.values": "^1.1.0", + "sax": "~1.2.4", + "stable": "^0.1.8", + "unquote": "~1.1.1", + "util.promisify": "~1.0.0" + }, + "dependencies": { + "css-select": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-2.1.0.tgz", + "integrity": "sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ==", + "requires": { + "boolbase": "^1.0.0", + "css-what": "^3.2.1", + "domutils": "^1.7.0", + "nth-check": "^1.0.2" + } + }, + "css-what": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-3.2.1.tgz", + "integrity": "sha512-WwOrosiQTvyms+Ti5ZC5vGEK0Vod3FTt1ca+payZqvKuGJF+dq7bG63DstxtN0dpm6FxY27a/zS3Wten+gEtGw==" + }, + "domutils": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-1.7.0.tgz", + "integrity": "sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg==", + "requires": { + "dom-serializer": "0", + "domelementtype": "1" + } + } + } + }, + "symbol-observable": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.2.0.tgz", + "integrity": "sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ==" + }, + "table": { + "version": "5.4.6", + "resolved": "https://registry.npmjs.org/table/-/table-5.4.6.tgz", + "integrity": "sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==", + "requires": { + "ajv": "^6.10.2", + "lodash": "^4.17.14", + "slice-ansi": "^2.1.0", + "string-width": "^3.0.0" + }, + "dependencies": { + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + } + } + }, + "tapable": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-1.1.3.tgz", + "integrity": "sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==" + }, + "tar": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/tar/-/tar-5.0.5.tgz", + "integrity": "sha512-MNIgJddrV2TkuwChwcSNds/5E9VijOiw7kAc1y5hTNJoLDSuIyid2QtLYiCYNnICebpuvjhPQZsXwUL0O3l7OQ==", + "requires": { + "chownr": "^1.1.3", + "fs-minipass": "^2.0.0", + "minipass": "^3.0.0", + "minizlib": "^2.1.0", + "mkdirp": "^0.5.0", + "yallist": "^4.0.0" + }, + "dependencies": { + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } + }, + "tar-fs": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.0.0.tgz", + "integrity": "sha512-vaY0obB6Om/fso8a8vakQBzwholQ7v5+uy+tF3Ozvxv1KNezmVQAiWtcNmMHFSFPqL3dJA8ha6gdtFbfX9mcxA==", + "requires": { + "chownr": "^1.1.1", + "mkdirp": "^0.5.1", + "pump": "^3.0.0", + "tar-stream": "^2.0.0" + } + }, + "tar-stream": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.1.0.tgz", + "integrity": "sha512-+DAn4Nb4+gz6WZigRzKEZl1QuJVOLtAwwF+WUxy1fJ6X63CaGaUAxJRD2KEn1OMfcbCjySTYpNC6WmfQoIEOdw==", + "requires": { + "bl": "^3.0.0", + "end-of-stream": "^1.4.1", + "fs-constants": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1" + }, + "dependencies": { + "readable-stream": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", + "integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, + "temp-dir": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/temp-dir/-/temp-dir-1.0.0.tgz", + "integrity": "sha1-CnwOom06Oa+n4OvqnB/AvE2qAR0=" + }, + "tempfile": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tempfile/-/tempfile-2.0.0.tgz", + "integrity": "sha1-awRGhWqbERTRhW/8vlCczLCXcmU=", + "requires": { + "temp-dir": "^1.0.0", + "uuid": "^3.0.1" + } + }, + "term-size": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/term-size/-/term-size-2.1.1.tgz", + "integrity": "sha512-UqvQSch04R+69g4RDhrslmGvGL3ucDRX/U+snYW0Mab4uCAyKSndUksaoqlJ81QKSpRnIsuOYQCbC2ZWx2896A==" + }, + "terser": { + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/terser/-/terser-4.6.1.tgz", + "integrity": "sha512-w0f2OWFD7ka3zwetgVAhNMeyzEbj39ht2Tb0qKflw9PmW9Qbo5tjTh01QJLkhO9t9RDDQYvk+WXqpECI2C6i2A==", + "requires": { + "commander": "^2.20.0", + "source-map": "~0.6.1", + "source-map-support": "~0.5.12" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } + } + }, + "terser-webpack-plugin": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-1.4.3.tgz", + "integrity": "sha512-QMxecFz/gHQwteWwSo5nTc6UaICqN1bMedC5sMtUc7y3Ha3Q8y6ZO0iCR8pq4RJC8Hjf0FEPEHZqcMB/+DFCrA==", + "requires": { + "cacache": "^12.0.2", + "find-cache-dir": "^2.1.0", + "is-wsl": "^1.1.0", + "schema-utils": "^1.0.0", + "serialize-javascript": "^2.1.2", + "source-map": "^0.6.1", + "terser": "^4.1.2", + "webpack-sources": "^1.4.0", + "worker-farm": "^1.7.0" + }, + "dependencies": { + "is-wsl": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-1.1.0.tgz", + "integrity": "sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0=" + }, + "schema-utils": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", + "integrity": "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==", + "requires": { + "ajv": "^6.1.0", + "ajv-errors": "^1.0.0", + "ajv-keywords": "^3.1.0" + } + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } + } + }, + "text-extensions": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/text-extensions/-/text-extensions-1.9.0.tgz", + "integrity": "sha512-wiBrwC1EhBelW12Zy26JeOUkQ5mRu+5o8rpsJk5+2t+Y5vE7e842qtZDQ2g1NpX/29HdyFeJ4nSIhI47ENSxlQ==" + }, + "text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=" + }, + "through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" + }, + "through2": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "requires": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + } + }, + "thunky": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", + "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==" + }, + "timed-out": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz", + "integrity": "sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8=" + }, + "timers-browserify": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/timers-browserify/-/timers-browserify-2.0.11.tgz", + "integrity": "sha512-60aV6sgJ5YEbzUdn9c8kYGIqOubPoUdqQCul3SBAsRCZ40s6Y5cMcrW4dt3/k/EsbLVJNl9n6Vz3fTc+k2GeKQ==", + "requires": { + "setimmediate": "^1.0.4" + } + }, + "timm": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/timm/-/timm-1.6.2.tgz", + "integrity": "sha512-IH3DYDL1wMUwmIlVmMrmesw5lZD6N+ZOAFWEyLrtpoL9Bcrs9u7M/vyOnHzDD2SMs4irLkVjqxZbHrXStS/Nmw==" + }, + "timsort": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/timsort/-/timsort-0.3.0.tgz", + "integrity": "sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q=" + }, + "tiny-emitter": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tiny-emitter/-/tiny-emitter-2.1.0.tgz", + "integrity": "sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q==", + "optional": true + }, + "tinycolor2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/tinycolor2/-/tinycolor2-1.4.1.tgz", + "integrity": "sha1-9PrTM0R7wLB9TcjpIJ2POaisd+g=" + }, + "tlds": { + "version": "1.207.0", + "resolved": "https://registry.npmjs.org/tlds/-/tlds-1.207.0.tgz", + "integrity": "sha512-k7d7Q1LqjtAvhtEOs3yN14EabsNO8ZCoY6RESSJDB9lst3bTx3as/m1UuAeCKzYxiyhR1qq72ZPhpSf+qlqiwg==" + }, + "tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "requires": { + "os-tmpdir": "~1.0.2" + } + }, + "to-array": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/to-array/-/to-array-0.1.4.tgz", + "integrity": "sha1-F+bBH3PdTz10zaek/zI46a2b+JA=" + }, + "to-arraybuffer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz", + "integrity": "sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M=" + }, + "to-buffer": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.1.1.tgz", + "integrity": "sha512-lx9B5iv7msuFYE3dytT+KE5tap+rNYw+K4jVkb9R/asAb+pbBSM17jtunHplhBe6RRJdZx3Pn2Jph24O32mOVg==" + }, + "to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=" + }, + "to-object-path": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", + "integrity": "sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68=", + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + }, + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "to-readable-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/to-readable-stream/-/to-readable-stream-1.0.0.tgz", + "integrity": "sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q==" + }, + "to-regex": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/to-regex/-/to-regex-3.0.2.tgz", + "integrity": "sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==", + "requires": { + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "regex-not": "^1.0.2", + "safe-regex": "^1.1.0" + } + }, + "to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "requires": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + } + }, + "toidentifier": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", + "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" + }, + "tough-cookie": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", + "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", + "requires": { + "psl": "^1.1.24", + "punycode": "^1.4.1" + }, + "dependencies": { + "punycode": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" + } + } + }, + "trim": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/trim/-/trim-0.0.1.tgz", + "integrity": "sha1-WFhUf2spB1fulczMZm+1AITEYN0=" + }, + "trim-lines": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-1.1.2.tgz", + "integrity": "sha512-3GOuyNeTqk3FAqc3jOJtw7FTjYl94XBR5aD9QnDbK/T4CA9sW/J0l9RoaRPE9wyPP7NF331qnHnvJFBJ+IDkmQ==" + }, + "trim-newlines": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-1.0.0.tgz", + "integrity": "sha1-WIeWa7WCpFA6QetST301ARgVphM=" + }, + "trim-repeated": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/trim-repeated/-/trim-repeated-1.0.0.tgz", + "integrity": "sha1-42RqLqTokTEr9+rObPsFOAvAHCE=", + "requires": { + "escape-string-regexp": "^1.0.2" + } + }, + "trim-trailing-lines": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/trim-trailing-lines/-/trim-trailing-lines-1.1.2.tgz", + "integrity": "sha512-MUjYItdrqqj2zpcHFTkMa9WAv4JHTI6gnRQGPFLrt5L9a6tRMiDnIqYl8JBvu2d2Tc3lWJKQwlGCp0K8AvCM+Q==" + }, + "trough": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/trough/-/trough-1.0.4.tgz", + "integrity": "sha512-tdzBRDGWcI1OpPVmChbdSKhvSVurznZ8X36AYURAcl+0o2ldlCY2XPzyXNNxwJwwyIU+rIglTCG4kxtNKBQH7Q==" + }, + "true-case-path": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/true-case-path/-/true-case-path-2.2.1.tgz", + "integrity": "sha512-0z3j8R7MCjy10kc/g+qg7Ln3alJTodw9aDuVWZa3uiWqfuBMKeAeP2ocWcxoyM3D73yz3Jt/Pu4qPr4wHSdB/Q==" + }, + "ts-pnp": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/ts-pnp/-/ts-pnp-1.1.5.tgz", + "integrity": "sha512-ti7OGMOUOzo66wLF3liskw6YQIaSsBgc4GOAlWRnIEj8htCxJUxskanMUoJOD6MDCRAXo36goXJZch+nOS0VMA==" + }, + "tslib": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.10.0.tgz", + "integrity": "sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==" + }, + "tsutils": { + "version": "3.17.1", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.17.1.tgz", + "integrity": "sha512-kzeQ5B8H3w60nFY2g8cJIuH7JDpsALXySGtwGJ0p2LSjLgay3NdIpqq5SoOBe46bKDW2iq25irHCr8wjomUS2g==", + "requires": { + "tslib": "^1.8.1" + } + }, + "tty-browserify": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.0.tgz", + "integrity": "sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY=" + }, + "tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "requires": { + "safe-buffer": "^5.0.1" + } + }, + "tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + }, + "type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", + "requires": { + "prelude-ls": "~1.1.2" + } + }, + "type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==" + }, + "type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "requires": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + } + }, + "type-of": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-of/-/type-of-2.0.1.tgz", + "integrity": "sha1-5yoXQYllaOn2KDeNgW1pEvfyOXI=" + }, + "typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=" + }, + "typedarray-to-buffer": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", + "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", + "requires": { + "is-typedarray": "^1.0.0" + } + }, + "ua-parser-js": { + "version": "0.7.21", + "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.21.tgz", + "integrity": "sha512-+O8/qh/Qj8CgC6eYBVBykMrNtp5Gebn4dlGD/kKXVkJNDwyrAwSIqwz8CDf+tsAIWVycKcku6gIXJ0qwx/ZXaQ==" + }, + "unbzip2-stream": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/unbzip2-stream/-/unbzip2-stream-1.3.3.tgz", + "integrity": "sha512-fUlAF7U9Ah1Q6EieQ4x4zLNejrRvDWUYmxXUpN3uziFYCHapjWFaCAnreY9bGgxzaMCFAPPpYNng57CypwJVhg==", + "requires": { + "buffer": "^5.2.1", + "through": "^2.3.8" + }, + "dependencies": { + "buffer": { + "version": "5.4.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.4.3.tgz", + "integrity": "sha512-zvj65TkFeIt3i6aj5bIvJDzjjQQGs4o/sNoezg1F1kYap9Nu2jcUdpwzRSJTHMMzG0H7bZkn4rNQpImhuxWX2A==", + "requires": { + "base64-js": "^1.0.2", + "ieee754": "^1.1.4" + } + } + } + }, + "unc-path-regex": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/unc-path-regex/-/unc-path-regex-0.1.2.tgz", + "integrity": "sha1-5z3T17DXxe2G+6xrCufYxqadUPo=" + }, + "underscore.string": { + "version": "3.3.5", + "resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-3.3.5.tgz", + "integrity": "sha512-g+dpmgn+XBneLmXXo+sGlW5xQEt4ErkS3mgeN2GFbremYeMBSJKr9Wf2KJplQVaiPY/f7FN6atosWYNm9ovrYg==", + "requires": { + "sprintf-js": "^1.0.3", + "util-deprecate": "^1.0.2" + } + }, + "unherit": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/unherit/-/unherit-1.1.2.tgz", + "integrity": "sha512-W3tMnpaMG7ZY6xe/moK04U9fBhi6wEiCYHUW5Mop/wQHf12+79EQGwxYejNdhEz2mkqkBlGwm7pxmgBKMVUj0w==", + "requires": { + "inherits": "^2.0.1", + "xtend": "^4.0.1" + } + }, + "unicode-canonical-property-names-ecmascript": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz", + "integrity": "sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ==" + }, + "unicode-match-property-ecmascript": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz", + "integrity": "sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg==", + "requires": { + "unicode-canonical-property-names-ecmascript": "^1.0.4", + "unicode-property-aliases-ecmascript": "^1.0.4" + } + }, + "unicode-match-property-value-ecmascript": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.1.0.tgz", + "integrity": "sha512-hDTHvaBk3RmFzvSl0UVrUmC3PuW9wKVnpoUDYH0JDkSIovzw+J5viQmeYHxVSBptubnr7PbH2e0fnpDRQnQl5g==" + }, + "unicode-property-aliases-ecmascript": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.0.5.tgz", + "integrity": "sha512-L5RAqCfXqAwR3RriF8pM0lU0w4Ryf/GgzONwi6KnL1taJQa7x1TCxdJnILX59WIGOwR57IVxn7Nej0fz1Ny6fw==" + }, + "unified": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/unified/-/unified-6.2.0.tgz", + "integrity": "sha512-1k+KPhlVtqmG99RaTbAv/usu85fcSRu3wY8X+vnsEhIxNP5VbVIDiXnLqyKIG+UMdyTg0ZX9EI6k2AfjJkHPtA==", + "requires": { + "bail": "^1.0.0", + "extend": "^3.0.0", + "is-plain-obj": "^1.1.0", + "trough": "^1.0.0", + "vfile": "^2.0.0", + "x-is-string": "^0.1.0" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + }, + "unist-util-stringify-position": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-1.1.2.tgz", + "integrity": "sha512-pNCVrk64LZv1kElr0N1wPiHEUoXNVFERp+mlTg/s9R5Lwg87f9bM/3sQB99w+N9D/qnM9ar3+AKDBwo/gm/iQQ==" + }, + "vfile": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-2.3.0.tgz", + "integrity": "sha512-ASt4mBUHcTpMKD/l5Q+WJXNtshlWxOogYyGYYrg4lt/vuRjC1EFQtlAofL5VmtVNIZJzWYFJjzGWZ0Gw8pzW1w==", + "requires": { + "is-buffer": "^1.1.4", + "replace-ext": "1.0.0", + "unist-util-stringify-position": "^1.0.0", + "vfile-message": "^1.0.0" + } + }, + "vfile-message": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-1.1.1.tgz", + "integrity": "sha512-1WmsopSGhWt5laNir+633LszXvZ+Z/lxveBf6yhGsqnQIhlhzooZae7zV6YVM1Sdkw68dtAW3ow0pOdPANugvA==", + "requires": { + "unist-util-stringify-position": "^1.1.1" + } + } + } + }, + "union-value": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz", + "integrity": "sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==", + "requires": { + "arr-union": "^3.1.0", + "get-value": "^2.0.6", + "is-extendable": "^0.1.1", + "set-value": "^2.0.1" + } + }, + "uniq": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/uniq/-/uniq-1.0.1.tgz", + "integrity": "sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8=" + }, + "uniqs": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/uniqs/-/uniqs-2.0.0.tgz", + "integrity": "sha1-/+3ks2slKQaW5uFl1KWe25mOawI=" + }, + "unique-filename": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-1.1.1.tgz", + "integrity": "sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==", + "requires": { + "unique-slug": "^2.0.0" + } + }, + "unique-slug": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-2.0.2.tgz", + "integrity": "sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w==", + "requires": { + "imurmurhash": "^0.1.4" + } + }, + "unique-string": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-1.0.0.tgz", + "integrity": "sha1-nhBXzKhRq7kzmPizOuGHuZyuwRo=", + "requires": { + "crypto-random-string": "^1.0.0" + } + }, + "unist-builder": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/unist-builder/-/unist-builder-1.0.4.tgz", + "integrity": "sha512-v6xbUPP7ILrT15fHGrNyHc1Xda8H3xVhP7/HAIotHOhVPjH5dCXA097C3Rry1Q2O+HbOLCao4hfPB+EYEjHgVg==", + "requires": { + "object-assign": "^4.1.0" + } + }, + "unist-util-generated": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-1.1.5.tgz", + "integrity": "sha512-1TC+NxQa4N9pNdayCYA1EGUOCAO0Le3fVp7Jzns6lnua/mYgwHo0tz5WUAfrdpNch1RZLHc61VZ1SDgrtNXLSw==" + }, + "unist-util-is": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-3.0.0.tgz", + "integrity": "sha512-sVZZX3+kspVNmLWBPAB6r+7D9ZgAFPNWm66f7YNb420RlQSbn+n8rG8dGZSkrER7ZIXGQYNm5pqC3v3HopH24A==" + }, + "unist-util-modify-children": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/unist-util-modify-children/-/unist-util-modify-children-1.1.5.tgz", + "integrity": "sha512-XeL5qqyoS3TEueCKEzHusWXE9JBDJPE4rl6LmcLOwlzv0RIZrcMNqKx02GSK3Ms4v45ldu+ltPxG42FBMVdPZw==", + "requires": { + "array-iterate": "^1.0.0" + } + }, + "unist-util-position": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-3.0.4.tgz", + "integrity": "sha512-tWvIbV8goayTjobxDIr4zVTyG+Q7ragMSMeKC3xnPl9xzIc0+she8mxXLM3JVNDDsfARPbCd3XdzkyLdo7fF3g==" + }, + "unist-util-remove-position": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-1.1.4.tgz", + "integrity": "sha512-tLqd653ArxJIPnKII6LMZwH+mb5q+n/GtXQZo6S6csPRs5zB0u79Yw8ouR3wTw8wxvdJFhpP6Y7jorWdCgLO0A==", + "requires": { + "unist-util-visit": "^1.1.0" + } + }, + "unist-util-select": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/unist-util-select/-/unist-util-select-1.5.0.tgz", + "integrity": "sha1-qTwr6MD2U4J4A7gTMa3sKqJM2TM=", + "requires": { + "css-selector-parser": "^1.1.0", + "debug": "^2.2.0", + "nth-check": "^1.0.1" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "unist-util-stringify-position": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-2.0.2.tgz", + "integrity": "sha512-nK5n8OGhZ7ZgUwoUbL8uiVRwAbZyzBsB/Ddrlbu6jwwubFza4oe15KlyEaLNMXQW1svOQq4xesUeqA85YrIUQA==", + "requires": { + "@types/unist": "^2.0.2" + } + }, + "unist-util-visit": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-1.4.1.tgz", + "integrity": "sha512-AvGNk7Bb//EmJZyhtRUnNMEpId/AZ5Ph/KUpTI09WHQuDZHKovQ1oEv3mfmKpWKtoMzyMC4GLBm1Zy5k12fjIw==", + "requires": { + "unist-util-visit-parents": "^2.0.0" + } + }, + "unist-util-visit-children": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/unist-util-visit-children/-/unist-util-visit-children-1.1.3.tgz", + "integrity": "sha512-/GQ8KNRrG+qD30H76FZNc6Ok+8XTu8lxJByN5LnQ4eQfqxda2gP0CPsCX63BRB26ZRMNf6i1c+jlvNlqysEoFg==" + }, + "unist-util-visit-parents": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-2.1.2.tgz", + "integrity": "sha512-DyN5vD4NE3aSeB+PXYNKxzGsfocxp6asDc2XXE3b0ekO2BaRUpBicbbUygfSvYfUz1IkmjFR1YF7dPklraMZ2g==", + "requires": { + "unist-util-is": "^3.0.0" + } + }, + "universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==" + }, + "unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" + }, + "unquote": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/unquote/-/unquote-1.1.1.tgz", + "integrity": "sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ=" + }, + "unset-value": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unset-value/-/unset-value-1.0.0.tgz", + "integrity": "sha1-g3aHP30jNRef+x5vw6jtDfyKtVk=", + "requires": { + "has-value": "^0.3.1", + "isobject": "^3.0.0" + }, + "dependencies": { + "has-value": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/has-value/-/has-value-0.3.1.tgz", + "integrity": "sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8=", + "requires": { + "get-value": "^2.0.3", + "has-values": "^0.1.4", + "isobject": "^2.0.0" + }, + "dependencies": { + "isobject": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz", + "integrity": "sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=", + "requires": { + "isarray": "1.0.0" + } + } + } + }, + "has-values": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/has-values/-/has-values-0.1.4.tgz", + "integrity": "sha1-bWHeldkd/Km5oCCJrThL/49it3E=" + } + } + }, + "upath": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz", + "integrity": "sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==" + }, + "update-notifier": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-3.0.1.tgz", + "integrity": "sha512-grrmrB6Zb8DUiyDIaeRTBCkgISYUgETNe7NglEbVsrLWXeESnlCSP50WfRSj/GmzMPl6Uchj24S/p80nP/ZQrQ==", + "requires": { + "boxen": "^3.0.0", + "chalk": "^2.0.1", + "configstore": "^4.0.0", + "has-yarn": "^2.1.0", + "import-lazy": "^2.1.0", + "is-ci": "^2.0.0", + "is-installed-globally": "^0.1.0", + "is-npm": "^3.0.0", + "is-yarn-global": "^0.3.0", + "latest-version": "^5.0.0", + "semver-diff": "^2.0.0", + "xdg-basedir": "^3.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" + }, + "boxen": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/boxen/-/boxen-3.2.0.tgz", + "integrity": "sha512-cU4J/+NodM3IHdSL2yN8bqYqnmlBTidDR4RC7nJs61ZmtGz8VZzM3HLQX0zY5mrSmPtR3xWwsq2jOUQqFZN8+A==", + "requires": { + "ansi-align": "^3.0.0", + "camelcase": "^5.3.1", + "chalk": "^2.4.2", + "cli-boxes": "^2.2.0", + "string-width": "^3.0.0", + "term-size": "^1.2.0", + "type-fest": "^0.3.0", + "widest-line": "^2.0.0" + } + }, + "configstore": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/configstore/-/configstore-4.0.0.tgz", + "integrity": "sha512-CmquAXFBocrzaSM8mtGPMM/HiWmyIpr4CcJl/rgY2uCObZ/S7cKU0silxslqJejl+t/T9HS8E0PUNQD81JGUEQ==", + "requires": { + "dot-prop": "^4.1.0", + "graceful-fs": "^4.1.2", + "make-dir": "^1.0.0", + "unique-string": "^1.0.0", + "write-file-atomic": "^2.0.0", + "xdg-basedir": "^3.0.0" + } + }, + "cross-spawn": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", + "integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=", + "requires": { + "lru-cache": "^4.0.1", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, + "execa": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz", + "integrity": "sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c=", + "requires": { + "cross-spawn": "^5.0.1", + "get-stream": "^3.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + } + }, + "get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + }, + "lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "requires": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "make-dir": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz", + "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==", + "requires": { + "pify": "^3.0.0" + } + }, + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "term-size": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/term-size/-/term-size-1.2.0.tgz", + "integrity": "sha1-RYuDiH8oj8Vtb/+/rSYuJmOO+mk=", + "requires": { + "execa": "^0.7.0" + } + }, + "type-fest": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.3.1.tgz", + "integrity": "sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ==" + }, + "widest-line": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-2.0.1.tgz", + "integrity": "sha512-Ba5m9/Fa4Xt9eb2ELXt77JxVDV8w7qQrH0zS/TWSJdLyAwQjWoOzpzj5lwVftDz6n/EOu3tNACS84v509qwnJA==", + "requires": { + "string-width": "^2.1.1" + }, + "dependencies": { + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "requires": { + "ansi-regex": "^3.0.0" + } + } + } + } + } + }, + "uri-js": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", + "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", + "requires": { + "punycode": "^2.1.0" + } + }, + "urix": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/urix/-/urix-0.1.0.tgz", + "integrity": "sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=" + }, + "url": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz", + "integrity": "sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE=", + "requires": { + "punycode": "1.3.2", + "querystring": "0.2.0" + }, + "dependencies": { + "punycode": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=" + } + } + }, + "url-loader": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/url-loader/-/url-loader-1.1.2.tgz", + "integrity": "sha512-dXHkKmw8FhPqu8asTc1puBfe3TehOCo2+RmOOev5suNCIYBcT626kxiWg1NBVkwc4rO8BGa7gP70W7VXuqHrjg==", + "requires": { + "loader-utils": "^1.1.0", + "mime": "^2.0.3", + "schema-utils": "^1.0.0" + }, + "dependencies": { + "schema-utils": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", + "integrity": "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==", + "requires": { + "ajv": "^6.1.0", + "ajv-errors": "^1.0.0", + "ajv-keywords": "^3.1.0" + } + } + } + }, + "url-parse": { + "version": "1.4.7", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.4.7.tgz", + "integrity": "sha512-d3uaVyzDB9tQoSXFvuSUNFibTd9zxd2bkVrDRvF5TmvWWQwqE4lgYJ5m+x1DbecWkw+LK4RNl2CU1hHuOKPVlg==", + "requires": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, + "url-parse-lax": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", + "integrity": "sha1-FrXK/Afb42dsGxmZF3gj1lA6yww=", + "requires": { + "prepend-http": "^2.0.0" + } + }, + "url-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/url-regex/-/url-regex-4.1.1.tgz", + "integrity": "sha512-ViSDgDPNKkrQHI81GLCjdDN+Rsk3tAW/uLXlBOJxtcHzWZjta58Z0APXhfXzS89YszsheMnEvXeDXsWUB53wwA==", + "requires": { + "ip-regex": "^1.0.1", + "tlds": "^1.187.0" + }, + "dependencies": { + "ip-regex": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-1.0.3.tgz", + "integrity": "sha1-3FiQdvZZ9BnCIgOaMzFvHHOH7/0=" + } + } + }, + "url-to-options": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/url-to-options/-/url-to-options-1.0.1.tgz", + "integrity": "sha1-FQWgOiiaSMvXpDTvuu7FBV9WM6k=" + }, + "use": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz", + "integrity": "sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==" + }, + "utif": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/utif/-/utif-2.0.1.tgz", + "integrity": "sha512-Z/S1fNKCicQTf375lIP9G8Sa1H/phcysstNrrSdZKj1f9g58J4NMgb5IgiEZN9/nLMPDwF0W7hdOe9Qq2IYoLg==", + "requires": { + "pako": "^1.0.5" + } + }, + "util": { + "version": "0.11.1", + "resolved": "https://registry.npmjs.org/util/-/util-0.11.1.tgz", + "integrity": "sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ==", + "requires": { + "inherits": "2.0.3" + }, + "dependencies": { + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + } + } + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + }, + "util.promisify": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/util.promisify/-/util.promisify-1.0.0.tgz", + "integrity": "sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA==", + "requires": { + "define-properties": "^1.1.2", + "object.getownpropertydescriptors": "^2.0.3" + } + }, + "utila": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/utila/-/utila-0.4.0.tgz", + "integrity": "sha1-ihagXURWV6Oupe7MWxKk+lN5dyw=" + }, + "utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" + }, + "uuid": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.3.tgz", + "integrity": "sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ==" + }, + "v8-compile-cache": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz", + "integrity": "sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==" + }, + "valid-url": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/valid-url/-/valid-url-1.0.9.tgz", + "integrity": "sha1-HBRHm0DxOXp1eC8RXkCGRHQzogA=" + }, + "validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "requires": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" + }, + "vendors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/vendors/-/vendors-1.0.3.tgz", + "integrity": "sha512-fOi47nsJP5Wqefa43kyWSg80qF+Q3XA6MUkgi7Hp1HQaKDQW4cQrK2D0P7mmbFtsV1N89am55Yru/nyEwRubcw==" + }, + "verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "requires": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + } + }, + "vfile": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-3.0.1.tgz", + "integrity": "sha512-y7Y3gH9BsUSdD4KzHsuMaCzRjglXN0W2EcMf0gpvu6+SbsGhMje7xDc8AEoeXy6mIwCKMI6BkjMsRjzQbhMEjQ==", + "requires": { + "is-buffer": "^2.0.0", + "replace-ext": "1.0.0", + "unist-util-stringify-position": "^1.0.0", + "vfile-message": "^1.0.0" + }, + "dependencies": { + "unist-util-stringify-position": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-1.1.2.tgz", + "integrity": "sha512-pNCVrk64LZv1kElr0N1wPiHEUoXNVFERp+mlTg/s9R5Lwg87f9bM/3sQB99w+N9D/qnM9ar3+AKDBwo/gm/iQQ==" + }, + "vfile-message": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-1.1.1.tgz", + "integrity": "sha512-1WmsopSGhWt5laNir+633LszXvZ+Z/lxveBf6yhGsqnQIhlhzooZae7zV6YVM1Sdkw68dtAW3ow0pOdPANugvA==", + "requires": { + "unist-util-stringify-position": "^1.1.1" + } + } + } + }, + "vfile-location": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-2.0.6.tgz", + "integrity": "sha512-sSFdyCP3G6Ka0CEmN83A2YCMKIieHx0EDaj5IDP4g1pa5ZJ4FJDvpO0WODLxo4LUX4oe52gmSCK7Jw4SBghqxA==" + }, + "vfile-message": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-2.0.2.tgz", + "integrity": "sha512-gNV2Y2fDvDOOqq8bEe7cF3DXU6QgV4uA9zMR2P8tix11l1r7zju3zry3wZ8sx+BEfuO6WQ7z2QzfWTvqHQiwsA==", + "requires": { + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^2.0.0" + } + }, + "vm-browserify": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-1.1.2.tgz", + "integrity": "sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==" + }, + "warning": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/warning/-/warning-3.0.0.tgz", + "integrity": "sha1-MuU3fLVy3kqwR1O9+IIcAe1gW3w=", + "requires": { + "loose-envify": "^1.0.0" + } + }, + "watchpack": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-1.6.0.tgz", + "integrity": "sha512-i6dHe3EyLjMmDlU1/bGQpEw25XSjkJULPuAVKCbNRefQVq48yXKUpwg538F7AZTf9kyr57zj++pQFltUa5H7yA==", + "requires": { + "chokidar": "^2.0.2", + "graceful-fs": "^4.1.2", + "neo-async": "^2.5.0" + }, + "dependencies": { + "chokidar": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", + "integrity": "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==", + "requires": { + "anymatch": "^2.0.0", + "async-each": "^1.0.1", + "braces": "^2.3.2", + "fsevents": "^1.2.7", + "glob-parent": "^3.1.0", + "inherits": "^2.0.3", + "is-binary-path": "^1.0.0", + "is-glob": "^4.0.0", + "normalize-path": "^3.0.0", + "path-is-absolute": "^1.0.0", + "readdirp": "^2.2.1", + "upath": "^1.1.1" + } + }, + "glob-parent": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", + "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", + "requires": { + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" + }, + "dependencies": { + "is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "requires": { + "is-extglob": "^2.1.0" + } + } + } + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==" + } + } + }, + "wbuf": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/wbuf/-/wbuf-1.7.3.tgz", + "integrity": "sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==", + "requires": { + "minimalistic-assert": "^1.0.0" + } + }, + "web-namespaces": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-1.1.3.tgz", + "integrity": "sha512-r8sAtNmgR0WKOKOxzuSgk09JsHlpKlB+uHi937qypOu3PZ17UxPrierFKDye/uNHjNTTEshu5PId8rojIPj/tA==" + }, + "webpack": { + "version": "4.41.5", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-4.41.5.tgz", + "integrity": "sha512-wp0Co4vpyumnp3KlkmpM5LWuzvZYayDwM2n17EHFr4qxBBbRokC7DJawPJC7TfSFZ9HZ6GsdH40EBj4UV0nmpw==", + "requires": { + "@webassemblyjs/ast": "1.8.5", + "@webassemblyjs/helper-module-context": "1.8.5", + "@webassemblyjs/wasm-edit": "1.8.5", + "@webassemblyjs/wasm-parser": "1.8.5", + "acorn": "^6.2.1", + "ajv": "^6.10.2", + "ajv-keywords": "^3.4.1", + "chrome-trace-event": "^1.0.2", + "enhanced-resolve": "^4.1.0", + "eslint-scope": "^4.0.3", + "json-parse-better-errors": "^1.0.2", + "loader-runner": "^2.4.0", + "loader-utils": "^1.2.3", + "memory-fs": "^0.4.1", + "micromatch": "^3.1.10", + "mkdirp": "^0.5.1", + "neo-async": "^2.6.1", + "node-libs-browser": "^2.2.1", + "schema-utils": "^1.0.0", + "tapable": "^1.1.3", + "terser-webpack-plugin": "^1.4.3", + "watchpack": "^1.6.0", + "webpack-sources": "^1.4.1" + }, + "dependencies": { + "acorn": { + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.0.tgz", + "integrity": "sha512-gac8OEcQ2Li1dxIEWGZzsp2BitJxwkwcOm0zHAJLcPJaVvm58FRnk6RkuLRpU1EujipU2ZFODv2P9DLMfnV8mw==" + }, + "eslint-scope": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", + "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "schema-utils": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", + "integrity": "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==", + "requires": { + "ajv": "^6.1.0", + "ajv-errors": "^1.0.0", + "ajv-keywords": "^3.1.0" + } + } + } + }, + "webpack-dev-middleware": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-3.7.2.tgz", + "integrity": "sha512-1xC42LxbYoqLNAhV6YzTYacicgMZQTqRd27Sim9wn5hJrX3I5nxYy1SxSd4+gjUFsz1dQFj+yEe6zEVmSkeJjw==", + "requires": { + "memory-fs": "^0.4.1", + "mime": "^2.4.4", + "mkdirp": "^0.5.1", + "range-parser": "^1.2.1", + "webpack-log": "^2.0.0" + } + }, + "webpack-dev-server": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-3.10.1.tgz", + "integrity": "sha512-AGG4+XrrXn4rbZUueyNrQgO4KGnol+0wm3MPdqGLmmA+NofZl3blZQKxZ9BND6RDNuvAK9OMYClhjOSnxpWRoA==", + "requires": { + "ansi-html": "0.0.7", + "bonjour": "^3.5.0", + "chokidar": "^2.1.8", + "compression": "^1.7.4", + "connect-history-api-fallback": "^1.6.0", + "debug": "^4.1.1", + "del": "^4.1.1", + "express": "^4.17.1", + "html-entities": "^1.2.1", + "http-proxy-middleware": "0.19.1", + "import-local": "^2.0.0", + "internal-ip": "^4.3.0", + "ip": "^1.1.5", + "is-absolute-url": "^3.0.3", + "killable": "^1.0.1", + "loglevel": "^1.6.6", + "opn": "^5.5.0", + "p-retry": "^3.0.1", + "portfinder": "^1.0.25", + "schema-utils": "^1.0.0", + "selfsigned": "^1.10.7", + "semver": "^6.3.0", + "serve-index": "^1.9.1", + "sockjs": "0.3.19", + "sockjs-client": "1.4.0", + "spdy": "^4.0.1", + "strip-ansi": "^3.0.1", + "supports-color": "^6.1.0", + "url": "^0.11.0", + "webpack-dev-middleware": "^3.7.2", + "webpack-log": "^2.0.0", + "ws": "^6.2.1", + "yargs": "12.0.5" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + }, + "array-union": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", + "integrity": "sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=", + "requires": { + "array-uniq": "^1.0.1" + } + }, + "chokidar": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", + "integrity": "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==", + "requires": { + "anymatch": "^2.0.0", + "async-each": "^1.0.1", + "braces": "^2.3.2", + "fsevents": "^1.2.7", + "glob-parent": "^3.1.0", + "inherits": "^2.0.3", + "is-binary-path": "^1.0.0", + "is-glob": "^4.0.0", + "normalize-path": "^3.0.0", + "path-is-absolute": "^1.0.0", + "readdirp": "^2.2.1", + "upath": "^1.1.1" + } + }, + "cliui": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-4.1.0.tgz", + "integrity": "sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ==", + "requires": { + "string-width": "^2.1.1", + "strip-ansi": "^4.0.0", + "wrap-ansi": "^2.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "requires": { + "ansi-regex": "^3.0.0" + } + } + } + }, + "del": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/del/-/del-4.1.1.tgz", + "integrity": "sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ==", + "requires": { + "@types/glob": "^7.1.1", + "globby": "^6.1.0", + "is-path-cwd": "^2.0.0", + "is-path-in-cwd": "^2.0.0", + "p-map": "^2.0.0", + "pify": "^4.0.1", + "rimraf": "^2.6.3" + } + }, + "eventsource": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.0.7.tgz", + "integrity": "sha512-4Ln17+vVT0k8aWq+t/bF5arcS3EpT9gYtW66EPacdj/mAFevznsnyoHLPy2BA8gbIQeIHoPsvwmfBftfcG//BQ==", + "requires": { + "original": "^1.0.0" + } + }, + "get-caller-file": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", + "integrity": "sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==" + }, + "glob-parent": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", + "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", + "requires": { + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" + }, + "dependencies": { + "is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "requires": { + "is-extglob": "^2.1.0" + } + } + } + }, + "globby": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-6.1.0.tgz", + "integrity": "sha1-9abXDoOV4hyFj7BInWTfAkJNUGw=", + "requires": { + "array-union": "^1.0.1", + "glob": "^7.0.3", + "object-assign": "^4.0.1", + "pify": "^2.0.0", + "pinkie-promise": "^2.0.0" + }, + "dependencies": { + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=" + } + } + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + }, + "is-wsl": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-1.1.0.tgz", + "integrity": "sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0=" + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==" + }, + "opn": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/opn/-/opn-5.5.0.tgz", + "integrity": "sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA==", + "requires": { + "is-wsl": "^1.1.0" + } + }, + "p-map": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz", + "integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==" + }, + "require-main-filename": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz", + "integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE=" + }, + "schema-utils": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", + "integrity": "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==", + "requires": { + "ajv": "^6.1.0", + "ajv-errors": "^1.0.0", + "ajv-keywords": "^3.1.0" + } + }, + "sockjs-client": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/sockjs-client/-/sockjs-client-1.4.0.tgz", + "integrity": "sha512-5zaLyO8/nri5cua0VtOrFXBPK1jbL4+1cebT/mmKA1E1ZXOvJrII75bPu0l0k843G/+iAbhEqzyKr0w/eCCj7g==", + "requires": { + "debug": "^3.2.5", + "eventsource": "^1.0.7", + "faye-websocket": "~0.11.1", + "inherits": "^2.0.3", + "json3": "^3.3.2", + "url-parse": "^1.4.3" + }, + "dependencies": { + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "requires": { + "ms": "^2.1.1" + } + } + } + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "requires": { + "ansi-regex": "^3.0.0" + } + } + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "requires": { + "ansi-regex": "^2.0.0" + } + }, + "supports-color": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", + "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", + "requires": { + "has-flag": "^3.0.0" + } + }, + "wrap-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", + "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", + "requires": { + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1" + }, + "dependencies": { + "is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "requires": { + "number-is-nan": "^1.0.0" + } + }, + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "requires": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + } + } + } + }, + "ws": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.1.tgz", + "integrity": "sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA==", + "requires": { + "async-limiter": "~1.0.0" + } + }, + "yargs": { + "version": "12.0.5", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-12.0.5.tgz", + "integrity": "sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw==", + "requires": { + "cliui": "^4.0.0", + "decamelize": "^1.2.0", + "find-up": "^3.0.0", + "get-caller-file": "^1.0.1", + "os-locale": "^3.0.0", + "require-directory": "^2.1.1", + "require-main-filename": "^1.0.1", + "set-blocking": "^2.0.0", + "string-width": "^2.0.0", + "which-module": "^2.0.0", + "y18n": "^3.2.1 || ^4.0.0", + "yargs-parser": "^11.1.1" + } + }, + "yargs-parser": { + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-11.1.1.tgz", + "integrity": "sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ==", + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + } + } + }, + "webpack-hot-middleware": { + "version": "2.25.0", + "resolved": "https://registry.npmjs.org/webpack-hot-middleware/-/webpack-hot-middleware-2.25.0.tgz", + "integrity": "sha512-xs5dPOrGPCzuRXNi8F6rwhawWvQQkeli5Ro48PRuQh8pYPCPmNnltP9itiUPT4xI8oW+y0m59lyyeQk54s5VgA==", + "requires": { + "ansi-html": "0.0.7", + "html-entities": "^1.2.0", + "querystring": "^0.2.0", + "strip-ansi": "^3.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "requires": { + "ansi-regex": "^2.0.0" + } + } + } + }, + "webpack-log": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/webpack-log/-/webpack-log-2.0.0.tgz", + "integrity": "sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg==", + "requires": { + "ansi-colors": "^3.0.0", + "uuid": "^3.3.2" + } + }, + "webpack-merge": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-4.2.2.tgz", + "integrity": "sha512-TUE1UGoTX2Cd42j3krGYqObZbOD+xF7u28WB7tfUordytSjbWTIjK/8V0amkBfTYN4/pB/GIDlJZZ657BGG19g==", + "requires": { + "lodash": "^4.17.15" + } + }, + "webpack-sources": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.4.3.tgz", + "integrity": "sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ==", + "requires": { + "source-list-map": "^2.0.0", + "source-map": "~0.6.1" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + } + } + }, + "webpack-stats-plugin": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/webpack-stats-plugin/-/webpack-stats-plugin-0.3.1.tgz", + "integrity": "sha512-pxqzFE055NlNTlNyfDG3xlB2QwT1EWdm/CF5dCJI/e+rRHVxrWhWg1rf1lfsWhI1/EePv8gi/A36YxO/+u0FgQ==" + }, + "websocket-driver": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.3.tgz", + "integrity": "sha512-bpxWlvbbB459Mlipc5GBzzZwhoZgGEZLuqPaR0INBGnPAY1vdBX6hPnoFXiw+3yWxDuHyQjO2oXTMyS8A5haFg==", + "requires": { + "http-parser-js": ">=0.4.0 <0.4.11", + "safe-buffer": ">=5.1.0", + "websocket-extensions": ">=0.1.1" + } + }, + "websocket-extensions": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.3.tgz", + "integrity": "sha512-nqHUnMXmBzT0w570r2JpJxfiSD1IzoI+HGVdd3aZ0yNi3ngvQ4jv1dtHt5VGxfI2yj5yqImPhOK4vmIh2xMbGg==" + }, + "whatwg-fetch": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-3.0.0.tgz", + "integrity": "sha512-9GSJUgz1D4MfyKU7KRqwOjXCXTqWdFNvEr7eUBYchQiVc744mqK/MzXPNR2WsPkmkOa4ywfg8C2n8h+13Bey1Q==" + }, + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "requires": { + "isexe": "^2.0.0" + } + }, + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=" + }, + "which-pm-runs": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/which-pm-runs/-/which-pm-runs-1.0.0.tgz", + "integrity": "sha1-Zws6+8VS4LVd9rd4DKdGFfI60cs=" + }, + "wide-align": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", + "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", + "requires": { + "string-width": "^1.0.2 || 2" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "requires": { + "ansi-regex": "^3.0.0" + } + } + } + }, + "widest-line": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz", + "integrity": "sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==", + "requires": { + "string-width": "^4.0.0" + } + }, + "with-open-file": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/with-open-file/-/with-open-file-0.1.7.tgz", + "integrity": "sha512-ecJS2/oHtESJ1t3ZfMI3B7KIDKyfN0O16miWxdn30zdh66Yd3LsRFebXZXq6GU4xfxLf6nVxp9kIqElb5fqczA==", + "requires": { + "p-finally": "^1.0.0", + "p-try": "^2.1.0", + "pify": "^4.0.1" + } + }, + "word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==" + }, + "workbox-background-sync": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-background-sync/-/workbox-background-sync-4.3.1.tgz", + "integrity": "sha512-1uFkvU8JXi7L7fCHVBEEnc3asPpiAL33kO495UMcD5+arew9IbKW2rV5lpzhoWcm/qhGB89YfO4PmB/0hQwPRg==", + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-broadcast-update": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-broadcast-update/-/workbox-broadcast-update-4.3.1.tgz", + "integrity": "sha512-MTSfgzIljpKLTBPROo4IpKjESD86pPFlZwlvVG32Kb70hW+aob4Jxpblud8EhNb1/L5m43DUM4q7C+W6eQMMbA==", + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-build": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-build/-/workbox-build-4.3.1.tgz", + "integrity": "sha512-UHdwrN3FrDvicM3AqJS/J07X0KXj67R8Cg0waq1MKEOqzo89ap6zh6LmaLnRAjpB+bDIz+7OlPye9iii9KBnxw==", + "requires": { + "@babel/runtime": "^7.3.4", + "@hapi/joi": "^15.0.0", + "common-tags": "^1.8.0", + "fs-extra": "^4.0.2", + "glob": "^7.1.3", + "lodash.template": "^4.4.0", + "pretty-bytes": "^5.1.0", + "stringify-object": "^3.3.0", + "strip-comments": "^1.0.2", + "workbox-background-sync": "^4.3.1", + "workbox-broadcast-update": "^4.3.1", + "workbox-cacheable-response": "^4.3.1", + "workbox-core": "^4.3.1", + "workbox-expiration": "^4.3.1", + "workbox-google-analytics": "^4.3.1", + "workbox-navigation-preload": "^4.3.1", + "workbox-precaching": "^4.3.1", + "workbox-range-requests": "^4.3.1", + "workbox-routing": "^4.3.1", + "workbox-strategies": "^4.3.1", + "workbox-streams": "^4.3.1", + "workbox-sw": "^4.3.1", + "workbox-window": "^4.3.1" + }, + "dependencies": { + "fs-extra": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-4.0.3.tgz", + "integrity": "sha512-q6rbdDd1o2mAnQreO7YADIxf/Whx4AHBiRf6d+/cVT8h44ss+lHgxf1FemcqDnQt9X3ct4McHr+JMGlYSsK7Cg==", + "requires": { + "graceful-fs": "^4.1.2", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + } + } + } + }, + "workbox-cacheable-response": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-cacheable-response/-/workbox-cacheable-response-4.3.1.tgz", + "integrity": "sha512-Rp5qlzm6z8IOvnQNkCdO9qrDgDpoPNguovs0H8C+wswLuPgSzSp9p2afb5maUt9R1uTIwOXrVQMmPfPypv+npw==", + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-core": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-core/-/workbox-core-4.3.1.tgz", + "integrity": "sha512-I3C9jlLmMKPxAC1t0ExCq+QoAMd0vAAHULEgRZ7kieCdUd919n53WC0AfvokHNwqRhGn+tIIj7vcb5duCjs2Kg==" + }, + "workbox-expiration": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-expiration/-/workbox-expiration-4.3.1.tgz", + "integrity": "sha512-vsJLhgQsQouv9m0rpbXubT5jw0jMQdjpkum0uT+d9tTwhXcEZks7qLfQ9dGSaufTD2eimxbUOJfWLbNQpIDMPw==", + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-google-analytics": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-google-analytics/-/workbox-google-analytics-4.3.1.tgz", + "integrity": "sha512-xzCjAoKuOb55CBSwQrbyWBKqp35yg1vw9ohIlU2wTy06ZrYfJ8rKochb1MSGlnoBfXGWss3UPzxR5QL5guIFdg==", + "requires": { + "workbox-background-sync": "^4.3.1", + "workbox-core": "^4.3.1", + "workbox-routing": "^4.3.1", + "workbox-strategies": "^4.3.1" + } + }, + "workbox-navigation-preload": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-navigation-preload/-/workbox-navigation-preload-4.3.1.tgz", + "integrity": "sha512-K076n3oFHYp16/C+F8CwrRqD25GitA6Rkd6+qAmLmMv1QHPI2jfDwYqrytOfKfYq42bYtW8Pr21ejZX7GvALOw==", + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-precaching": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-precaching/-/workbox-precaching-4.3.1.tgz", + "integrity": "sha512-piSg/2csPoIi/vPpp48t1q5JLYjMkmg5gsXBQkh/QYapCdVwwmKlU9mHdmy52KsDGIjVaqEUMFvEzn2LRaigqQ==", + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-range-requests": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-range-requests/-/workbox-range-requests-4.3.1.tgz", + "integrity": "sha512-S+HhL9+iTFypJZ/yQSl/x2Bf5pWnbXdd3j57xnb0V60FW1LVn9LRZkPtneODklzYuFZv7qK6riZ5BNyc0R0jZA==", + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-routing": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-routing/-/workbox-routing-4.3.1.tgz", + "integrity": "sha512-FkbtrODA4Imsi0p7TW9u9MXuQ5P4pVs1sWHK4dJMMChVROsbEltuE79fBoIk/BCztvOJ7yUpErMKa4z3uQLX+g==", + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-strategies": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-strategies/-/workbox-strategies-4.3.1.tgz", + "integrity": "sha512-F/+E57BmVG8dX6dCCopBlkDvvhg/zj6VDs0PigYwSN23L8hseSRwljrceU2WzTvk/+BSYICsWmRq5qHS2UYzhw==", + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-streams": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-streams/-/workbox-streams-4.3.1.tgz", + "integrity": "sha512-4Kisis1f/y0ihf4l3u/+ndMkJkIT4/6UOacU3A4BwZSAC9pQ9vSvJpIi/WFGQRH/uPXvuVjF5c2RfIPQFSS2uA==", + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-sw": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-sw/-/workbox-sw-4.3.1.tgz", + "integrity": "sha512-0jXdusCL2uC5gM3yYFT6QMBzKfBr2XTk0g5TPAV4y8IZDyVNDyj1a8uSXy3/XrvkVTmQvLN4O5k3JawGReXr9w==" + }, + "workbox-window": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-window/-/workbox-window-4.3.1.tgz", + "integrity": "sha512-C5gWKh6I58w3GeSc0wp2Ne+rqVw8qwcmZnQGpjiek8A2wpbxSJb1FdCoQVO+jDJs35bFgo/WETgl1fqgsxN0Hg==", + "requires": { + "workbox-core": "^4.3.1" + } + }, + "worker-farm": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/worker-farm/-/worker-farm-1.7.0.tgz", + "integrity": "sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw==", + "requires": { + "errno": "~0.1.7" + } + }, + "wrap-ansi": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "requires": { + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" + }, + "dependencies": { + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + } + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + }, + "write": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/write/-/write-1.0.3.tgz", + "integrity": "sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==", + "requires": { + "mkdirp": "^0.5.1" + } + }, + "write-file-atomic": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", + "integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", + "requires": { + "graceful-fs": "^4.1.11", + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.2" + } + }, + "ws": { + "version": "7.2.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.2.1.tgz", + "integrity": "sha512-sucePNSafamSKoOqoNfBd8V0StlkzJKL2ZAhGQinCfNQ+oacw+Pk7lcdAElecBF2VkLNZRiIb5Oi1Q5lVUVt2A==" + }, + "x-is-string": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/x-is-string/-/x-is-string-0.1.0.tgz", + "integrity": "sha1-R0tQhlrzpJqcRlfwWs0UVFj3fYI=" + }, + "xdg-basedir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-3.0.0.tgz", + "integrity": "sha1-SWsswQnsqNus/i3HK2A8F8WHCtQ=" + }, + "xhr": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/xhr/-/xhr-2.5.0.tgz", + "integrity": "sha512-4nlO/14t3BNUZRXIXfXe+3N6w3s1KoxcJUUURctd64BLRe67E4gRwp4PjywtDY72fXpZ1y6Ch0VZQRY/gMPzzQ==", + "requires": { + "global": "~4.3.0", + "is-function": "^1.0.1", + "parse-headers": "^2.0.0", + "xtend": "^4.0.0" + }, + "dependencies": { + "global": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/global/-/global-4.3.2.tgz", + "integrity": "sha1-52mJJopsdMOJCLEwWxD8DjlOnQ8=", + "requires": { + "min-document": "^2.19.0", + "process": "~0.5.1" + } + }, + "process": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/process/-/process-0.5.2.tgz", + "integrity": "sha1-FjjYqONML0QKkduVq5rrZ3/Bhc8=" + } + } + }, + "xml-parse-from-string": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/xml-parse-from-string/-/xml-parse-from-string-1.0.1.tgz", + "integrity": "sha1-qQKekp09vN7RafPG4oI42VpdWig=" + }, + "xml2js": { + "version": "0.4.23", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz", + "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==", + "requires": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + } + }, + "xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==" + }, + "xmlhttprequest-ssl": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.5.tgz", + "integrity": "sha1-wodrBhaKrcQOV9l+gRkayPQ5iz4=" + }, + "xstate": { + "version": "4.7.5", + "resolved": "https://registry.npmjs.org/xstate/-/xstate-4.7.5.tgz", + "integrity": "sha512-1QbwTUviBCHSnT3ct+GZZspzbNsftMEzP1lXmeR/zJphA1BHeLh/JV+woCmRdTBHyuGcJ2I7u7pp/+Y9e4qq9A==" + }, + "xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" + }, + "y18n": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", + "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==" + }, + "yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" + }, + "yaml": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.7.2.tgz", + "integrity": "sha512-qXROVp90sb83XtAoqE8bP9RwAkTTZbugRUTm5YeFCBfNRPEp2YzTeqWiz7m5OORHzEvrA/qcGS8hp/E+MMROYw==", + "requires": { + "@babel/runtime": "^7.6.3" + } + }, + "yaml-loader": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/yaml-loader/-/yaml-loader-0.5.0.tgz", + "integrity": "sha512-p9QIzcFSNm4mCw/m5NdyMfN4RE4aFZJWRRb01ERVNGCym8VNbKtw3OYZXnvUIkim6U/EjqE/2yIh9F/msShH9A==", + "requires": { + "js-yaml": "^3.5.2" + } + }, + "yargs": { + "version": "13.3.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.0.tgz", + "integrity": "sha512-2eehun/8ALW8TLoIl7MVaRUrg+yCnenu8B4kBlRxj3GJGDKU1Og7sMXPNm1BYyM1DOJmTZ4YeN/Nwxv+8XJsUA==", + "requires": { + "cliui": "^5.0.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^13.1.1" + }, + "dependencies": { + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + } + } + }, + "yargs-parser": { + "version": "13.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.1.tgz", + "integrity": "sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ==", + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + }, + "yauzl": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", + "integrity": "sha1-x+sXyT4RLLEIb6bY5R+wZnt5pfk=", + "requires": { + "buffer-crc32": "~0.2.3", + "fd-slicer": "~1.1.0" + } + }, + "yeast": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/yeast/-/yeast-0.1.2.tgz", + "integrity": "sha1-AI4G2AlDIMNy28L47XagymyKxBk=" + }, + "yoga-layout-prebuilt": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/yoga-layout-prebuilt/-/yoga-layout-prebuilt-1.9.3.tgz", + "integrity": "sha512-9SNQpwuEh2NucU83i2KMZnONVudZ86YNcFk9tq74YaqrQfgJWO3yB9uzH1tAg8iqh5c9F5j0wuyJ2z72wcum2w==", + "optional": true + }, + "yurnalist": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/yurnalist/-/yurnalist-1.1.1.tgz", + "integrity": "sha512-WMk8SL262zU/3Cr8twpfx/kdhPDAkhWN9HukNeb1U1xVrwU9iIAsCgYI8J5QMZTz+5N3Et/ZKzvOzVCjd/dAWA==", + "requires": { + "babel-runtime": "^6.26.0", + "chalk": "^2.4.2", + "cli-table3": "^0.5.1", + "debug": "^4.1.1", + "deep-equal": "^1.1.0", + "detect-indent": "^6.0.0", + "inquirer": "^7.0.0", + "invariant": "^2.2.0", + "is-builtin-module": "^3.0.0", + "is-ci": "^2.0.0", + "leven": "^3.1.0", + "loud-rejection": "^2.2.0", + "node-emoji": "^1.10.0", + "object-path": "^0.11.2", + "read": "^1.0.7", + "rimraf": "^3.0.0", + "semver": "^6.3.0", + "strip-ansi": "^5.2.0", + "strip-bom": "^4.0.0" + }, + "dependencies": { + "rimraf": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.0.tgz", + "integrity": "sha512-NDGVxTsjqfunkds7CqsOiEnxln4Bo7Nddl3XhS4pXg5OzwkLqJ971ZVAAnB+DDLnF76N+VnDEiBHaVV8I06SUg==", + "requires": { + "glob": "^7.1.3" + } + }, + "strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==" + } + } + }, + "zwitch": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-1.0.4.tgz", + "integrity": "sha512-YO803/X+13GNaZB7fVopjvHH0uWQKgJkgKnU1YCjxShjKGVuN9PPHHW8g+uFDpkHpSTNi3rCMKMewIcbC1BAYg==" + } + } +} diff --git a/docs/package.json b/docs/package.json new file mode 100644 index 0000000000000..708dc1009c2d3 --- /dev/null +++ b/docs/package.json @@ -0,0 +1,45 @@ +{ + "name": "npm-cli-docs", + "description": "npm cli docs", + "version": "0.1.0", + "author": "Tanya Brassie ", + "license": "Artistic-2.0", + "repository": { + "type": "git", + "url": "https://github.com/npm/cli" + }, + "dependencies": { + "babel-plugin-styled-components": "^1.10.6", + "eslint": "^6.3.0", + "gatsby": "^2.18.17", + "gatsby-image": "^2.2.37", + "gatsby-plugin-catch-links": "^2.1.21", + "gatsby-plugin-ipfs": "^2.0.2", + "gatsby-plugin-manifest": "^2.2.34", + "gatsby-plugin-no-sourcemaps": "^2.1.1", + "gatsby-plugin-offline": "^3.0.30", + "gatsby-plugin-prefetch-google-fonts": "^1.4.3", + "gatsby-plugin-react-helmet": "^3.1.18", + "gatsby-plugin-root-import": "^2.0.5", + "gatsby-plugin-sharp": "^2.3.10", + "gatsby-plugin-styled-components": "^3.1.16", + "gatsby-remark-autolink-headers": "^2.1.21", + "gatsby-remark-prismjs": "^3.3.28", + "gatsby-source-filesystem": "^2.1.43", + "gatsby-transformer-remark": "^2.6.45", + "prismjs": "^1.17.1", + "prop-types": "^15.7.2", + "react": "^16.9.0", + "react-dom": "^16.9.0", + "react-helmet": "^5.2.1", + "rebass": "^4.0.5", + "styled-components": "^4.4.0" + }, + "scripts": { + "develop": "gatsby develop", + "start": "npm run develop", + "build": "gatsby build", + "build:static": "GATSBY_IS_STATIC=true gatsby build --prefix-paths", + "serve": "gatsby serve" + } +} diff --git a/docs/src/components/Accordion.js b/docs/src/components/Accordion.js new file mode 100644 index 0000000000000..e7086f4ec09aa --- /dev/null +++ b/docs/src/components/Accordion.js @@ -0,0 +1,57 @@ +import React from 'react' +import styled from 'styled-components' +import downCarrot from '../images/down-carrot.svg' +import upCarrot from '../images/up-carrot.svg' + +const SectionButton = styled.button` + outline: none; + background-color: transparent; + cursor: pointer; + color: red; + border: none; + font-size: 18px; + font-weight: bold; + padding: 5px 0; + transition: opacity .5s; + + &:after { + background: center / contain no-repeat url(${(props) => props.isOpen ? upCarrot : downCarrot}); + content: ''; + height: 11px; + width: 28px; + display: inline-block; + } + + &:hover { + opacity: .6; + } +` + +class Accordion extends React.Component { + constructor (props) { + super(props) + this.state = { + isOpen: true + } + this.onHide = this.onHide.bind(this) + } + + onHide () { + this.setState({isOpen: !this.state.isOpen}) + } + + render () { + return ( +
+ {this.props.section} + {this.state.isOpen && +
+ {this.props.children} +
+ } +
+ ) + } +} + +export default Accordion diff --git a/docs/src/components/Button.js b/docs/src/components/Button.js new file mode 100644 index 0000000000000..f8372ba7cd512 --- /dev/null +++ b/docs/src/components/Button.js @@ -0,0 +1,22 @@ +import {Link} from 'gatsby' +import {colors} from '../theme' +import styled from 'styled-components' + +export const LinkButton = styled(Link)` + background-color: ${colors.red}; + color: ${colors.white}; + font-size: 20px; + border-radius: 1px; + padding: 20px; + box-shadow: 8px 8px 0 rgba(251,59,73,.2); + text-decoration: none; + text-align: center; + display: inline-block; + min-width: 180px; + font-weight: 700; + transition: opacity .5s; + + &:hover { + opacity: .8; + } +` diff --git a/docs/src/components/DocLinks.js b/docs/src/components/DocLinks.js new file mode 100644 index 0000000000000..f3f5ef6484e95 --- /dev/null +++ b/docs/src/components/DocLinks.js @@ -0,0 +1,72 @@ +import React from 'react' +import styled from 'styled-components' +import {StaticQuery, graphql} from 'gatsby' +import {Flex} from 'rebass' +import {SidebarLink} from './links' +import Accordion from './Accordion' + +const LinkDesc = styled.span` + font-size: 11px; + line-height: 1.5; + text-transform: lowercase; + display: block; + font-weight: 400; + color: ${(props) => props.theme.colors.darkGray}; +` + +const DocLinks = ({data}) => { + const linkInfo = data.allMarkdownRemark.nodes + const sections = ['cli-commands', 'configuring-npm', 'using-npm'] + let sortedData = {} + + sections.map((section) => ( + sortedData[section] = linkInfo.filter(function (item) { + return item.frontmatter.section === section + }) + )) + + return sections.map((section, index) => ( + + {sortedData[section].map((linkData, index) => { + const title = section === 'cli-commands' + ? linkData.frontmatter.title.replace(/(npm-)+([a-zA-Z\\.-]*)/, 'npm $2') + : linkData.frontmatter.title + + return ( + + + {title} + {linkData.frontmatter.description} + + + ) + }) + } + + )) +} + +export default props => ( + } + /> +) diff --git a/docs/src/components/FoundTypo.js b/docs/src/components/FoundTypo.js new file mode 100644 index 0000000000000..5aca0894934dc --- /dev/null +++ b/docs/src/components/FoundTypo.js @@ -0,0 +1,23 @@ +import React from 'react' +import styled from 'styled-components' + +const Container = styled.div` + margin: 80px 0; + border-top: 1px solid black; + padding: 20px 0; +` + +const FoundTypo = () => { + return ( + +

👀 Found a typo? Let us know!

+

The current stable version of npm is here. To upgrade, run: npm install npm@latest -g

+

+ To report bugs or submit feature requests for the docs, please post here. + Submit npm issues here. +

+
+ ) +} + +export default FoundTypo diff --git a/docs/src/components/MobileSidebar.js b/docs/src/components/MobileSidebar.js new file mode 100644 index 0000000000000..13835e6aa78a1 --- /dev/null +++ b/docs/src/components/MobileSidebar.js @@ -0,0 +1,33 @@ +import React from 'react' +import styled from 'styled-components' +import DocLinks from './DocLinks' +import {} from '../components/Sidebar' + +const MobileContainer = styled.div` + border-left: 1px solid #86838333; + border-bottom: 1px solid #86838333; + padding: 30px 30px 200px; + width: 340px; + display: block; + height: calc(100vh - 54px); + overflow: scroll; + position: fixed; + top: 54px; + right: 0px; + background-color: ${(props) => props.theme.colors.white}; + z-index: 100; + + @media screen and (min-width: ${(props) => props.theme.breakpoints.TABLET}) { + display: none; + } +` + +const MobileSidebar = () => { + return ( + + + + ) +} + +export default MobileSidebar diff --git a/docs/src/components/Sidebar.js b/docs/src/components/Sidebar.js new file mode 100644 index 0000000000000..3141cb0fa0fa8 --- /dev/null +++ b/docs/src/components/Sidebar.js @@ -0,0 +1,30 @@ +import React from 'react' +import styled from 'styled-components' +import DocLinks from './DocLinks' + +const Container = styled.nav` + border-right: 1px solid #86838333; + padding: 30px; + height: 100vh; + display: none; + width: 380px; + position: sticky; + overflow: scroll; + padding-bottom: 200px; + top: 54px; + background-color: ${(props) => props.theme.colors.white}; + + @media screen and (min-width: ${(props) => props.theme.breakpoints.TABLET}) { + display: block; + } +` + +const Sidebar = () => { + return ( + + + + ) +} + +export default Sidebar diff --git a/docs/src/components/home/DarkBlock.js b/docs/src/components/home/DarkBlock.js new file mode 100644 index 0000000000000..bba6a9fc51b3d --- /dev/null +++ b/docs/src/components/home/DarkBlock.js @@ -0,0 +1,40 @@ +import React from 'react' +import styled from 'styled-components' +import {Flex, Box} from 'rebass' +import {LinkButton} from '../Button' + +const Container = styled(Flex)` + background-color: ${(props) => props.theme.colors.purpleBlack}; + color: ${(props) => props.theme.colors.white}; +` + +const ContentWrapper = styled(Flex)` + max-width: 640px; + align-items: center; +` + +const Text = styled.p` + line-height: 1.5; + text-align: center; +` + +const aStyle = { + color: '#fb3b49', + textDecoration: 'none' +} + +const DarkBlock = () => { + return ( + + + +

The current stable version of npm is available on GitHub.

+

To upgrade, run: npm install npm@latest -g

+
+ read docs +
+
+ ) +} + +export default DarkBlock diff --git a/docs/src/components/home/FeatureCard.js b/docs/src/components/home/FeatureCard.js new file mode 100644 index 0000000000000..744a01bb7224f --- /dev/null +++ b/docs/src/components/home/FeatureCard.js @@ -0,0 +1,39 @@ +import React from 'react' +import styled from 'styled-components' +import {Flex, Image, Text} from 'rebass' + +const Card = styled(Flex)` + background-color: #f2f2f2ab; + box-shadow: 5px 5px 1px 1px ${(props) => props.theme.colors.red}; + border-radius: 2px; +` + +const Desc = styled.p` + padding: 5px 0; + font-size: 16px; +` + +const Title = styled(Text)` + font-size: 24px; + font-weight: 500; + text-shadow: 1px 2px 2px #f061df6e; +` + +const Icon = styled(Image)` + width: 110px; + flex-shrink: 0; +` + +const FeatureCard = ({icon, text, title}) => { + return ( + + + + {title} + {text} + + + ) +} + +export default FeatureCard diff --git a/docs/src/components/home/Features.js b/docs/src/components/home/Features.js new file mode 100644 index 0000000000000..e544b4ac9de41 --- /dev/null +++ b/docs/src/components/home/Features.js @@ -0,0 +1,83 @@ +import React from 'react' +import styled from 'styled-components' +import FeatureCard from './FeatureCard' +import { FeatureLink } from '../links' +import { Flex } from 'rebass' +import rectangles from '../../images/background-rectangles.svg' +import terminalIcon from '../../images/terminal-icon.svg' +import networkIcon from '../../images/network-icon.svg' +import npmIcon from '../../images/npm-icon.png' +import managerIcon from '../../images/manager-icon.svg' + +const ContainerInner = styled(Flex)` + background: linear-gradient(84deg, #fb881799, #ff4b0199, #c1212799, #e02aff99); +` + +const Container = styled.div` + background: top / cover no-repeat url(${rectangles}); +` + +const ContentWrapper = styled(Flex)` + max-width: 640px; +` + +const featureTexts = { + textOne: 'Download, install, and configure.', + textTwo: 'All available npm commands.', + textThree: 'How npm things work.', + textFour: 'Publish your own public or private packages to the registry with a free or paid account on npmjs.com from npm, Inc.' +} + +const featureTitles = { + titleOne: 'Getting Started', + titleTwo: 'Command Reference', + titleThree: 'Using npm', + titleFour: 'Publishing' +} + +const aStyle = { + color: '#231f20', + textDecoration: 'none' +} +const productsLink = `https://www.npmjs.com/products` + +const Features = () => { + return ( + + + + + + + + + + + + + + + + + + + ) +} + +export default Features diff --git a/docs/src/components/home/Footer.js b/docs/src/components/home/Footer.js new file mode 100644 index 0000000000000..851b8dd5652a7 --- /dev/null +++ b/docs/src/components/home/Footer.js @@ -0,0 +1,29 @@ +import React from 'react' +import boxes from '../../images/background-boxes.svg' +import styled from 'styled-components' +import {Flex, Box} from 'rebass' + +const Container = styled(Flex)` + background: center / cover no-repeat url(${boxes}); + height: 380px; + background-color: ${(props) => props.theme.colors.offWhite}; + ` + +const ContentWrapper = styled(Box)` + align-content: center; + width: 100%; + text-align: center; + background-color: ${(props) => props.theme.colors.white}; +` + +const Footer = () => { + return ( + + + Footer Text 🤪 + + + ) +} + +export default Footer diff --git a/docs/src/components/home/Terminal.js b/docs/src/components/home/Terminal.js new file mode 100644 index 0000000000000..19d890cb98057 --- /dev/null +++ b/docs/src/components/home/Terminal.js @@ -0,0 +1,120 @@ +import React from 'react' +import styled, {keyframes} from 'styled-components' +import {Flex, Box, Button as RebassButton} from 'rebass' +import closeX from '../../images/x.svg' +import {LinkButton} from '../Button' +import bracket from '../../images/bracket.svg' + +const TerminalBody = styled(Flex)` + background-color: ${(props) => props.theme.colors.purpleBlack}; + border: 2px solid ${(props) => props.theme.colors.purpleBlack}; + color: ${(props) => props.theme.colors.white}; + flex-direction: column; + max-width: 620px; + width: 100%; + height: 100%; + box-shadow: 0px 0px 17px 1px #dc3bc180; + border-radius: 2px; + top: ${(props) => props.top}; + left: ${(props) => props.left}; + right: 0; + position: absolute; +` + +const Top = styled(Flex)` + background-color: ${(props) => props.theme.colors.white}; + height: 18px; +` + +const SiteName = styled(Flex)` + font-size: 45px; + font-family: 'Inconsolata', sans-serif; + font-weight: 700; + letter-spacing: 5px; + text-shadow: 3px 2px 4px #abf1e04d; + + @media screen and (min-width: ${(props) => props.theme.breakpoints.TABLET}) { + font-size: 70px; + } +` + +const Bottom = styled(Flex)` + flex-direction: column; + padding: 30px; + + @media screen and (min-width: ${(props) => props.theme.breakpoints.TABLET}) { + font-size: 70px; + padding: 30px 50px; + + } +` + +const blink = keyframes` + 0% { + opacity: 0; + } + 50% { + opacity 1; + } + 100% { + opacity: 0; + } +` + +const Cursor = styled.span` + color: ${(props) => props.theme.colors.red}; + text-shadow: none; + opacity: 1; + animation: ${blink}; + animation-duration: 3s; + animation-iteration-count: infinite; + animation-fill-mode: both; +` + +const Bracket = styled.span` + background: center / contain no-repeat url(${bracket}); + width: 25px; + margin-right: 5px; + margin-top: 10px; +` + +const Text = styled.span` + font-size: 15px; + font-weight: 400; + letter-spacing: 1px; + line-height: 1.4; + + @media screen and (min-width: ${(props) => props.theme.breakpoints.TABLET}) { + font-size: 18px; + } +` + +const ModalButton = styled(RebassButton)` + cursor: pointer; + background: center no-repeat url(${closeX}); + width: 14px; + height: 14px; +` + +const Terminal = ({onClose, top, left}) => { + return ( + + + + + + npm cli _ + + The intelligent package manager for the Node Javascript Platform. Install stuff and get coding! + + + + read docs + + + + + ) +} + +export default Terminal diff --git a/docs/src/components/home/Windows.js b/docs/src/components/home/Windows.js new file mode 100644 index 0000000000000..fcdfd0eed029a --- /dev/null +++ b/docs/src/components/home/Windows.js @@ -0,0 +1,73 @@ +import React from 'react' +import Terminal from './Terminal' +import styled from 'styled-components' + +const Container = styled.div` + position: relative; + height: 350px; + width: 80%; + margin: auto; + left: -4%; + + @media screen and (min-width: ${(props) => props.theme.breakpoints.TABLET}) { + height: 400px; + } +` + +class Windows extends React.Component { + constructor (props) { + super(props) + this.state = { + showTopTerminal: true, + showMiddleTerminal: true, + showBottomTerminal: true, + counter: 0 + } + this.onHide = this.onHide.bind(this) + } + + onHide (terminal) { + this.setState({ [terminal]: false, counter: this.state.counter + 1 }, () => { + if (this.state.counter === 3) { + this.setState({ + showTopTerminal: true, + showMiddleTerminal: true, + showBottomTerminal: true, + counter: 0 + }) + } + }) + } + + render () { + return ( + + {this.state.showTopTerminal && + this.onHide('showTopTerminal')} + top={'0%'} + left={'0%'} + /> + } + + {this.state.showMiddleTerminal && + this.onHide('showMiddleTerminal')} + top={'8%'} + left={'5%'} + /> + } + + {this.state.showBottomTerminal && + this.onHide('showBottomTerminal')} + top={'16%'} + left={'10%'} + /> + } + + ) + } +} + +export default Windows diff --git a/docs/src/components/home/cubes.js b/docs/src/components/home/cubes.js new file mode 100644 index 0000000000000..65a2b8bd953f2 --- /dev/null +++ b/docs/src/components/home/cubes.js @@ -0,0 +1,101 @@ +import styled, {css, keyframes} from 'styled-components' +import purpleCube from '../../images/purple-cube.svg' +import orangeCube from '../../images/orange-cube.svg' +import redCube from '../../images/red-cube.svg' +import purpleGradientCube from '../../images/purple-gradient-cube.svg' +import pinkGradientCube from '../../images/pink-gradient-cube.svg' + +const commonCubeStyles = css` + background-position: center; + background-repeat: no-repeat; + position: absolute; +` + +const wiggle = keyframes` + 0% { + transform: rotate(0deg); + } + 33% { + transform: rotate(8deg); + } + 100% { + transform: rotate(0deg); + } +` + +export const CubeTopLeft = styled.div` + ${commonCubeStyles}; + background-image: url(${purpleCube}); + height: 35px; + width: 35px; + top: 10%; + left: 8%; + + animation-name: ${wiggle}; + animation-duration: 2.5s; + animation-delay: .5s; + animation-iteration-count: infinite; + animation-fill-mode: both; + animation-timing-function: ease-in-out; +` + +export const CubeMiddleLeft = styled.span` + ${commonCubeStyles}; + background-image: url(${orangeCube}); + height: 30px; + width: 30px; + top: 40%; + left: 17%; + + animation-name: ${wiggle}; + animation-duration: 2.5s; + animation-iteration-count: infinite; + animation-fill-mode: both; + animation-timing-function: ease-in-out; +` + +export const CubeBottomLeft = styled.span` + ${commonCubeStyles}; + background-image: url(${redCube}); + height: 45px; + width: 45px; + top: 78%; + left: 12%; + + animation-name: ${wiggle}; + animation-duration: 3s; + animation-iteration-count: infinite; + animation-fill-mode: both; + animation-timing-function: ease-in-out; +` + +export const CubeBottomRight = styled.span` + ${commonCubeStyles}; + background-image: url(${pinkGradientCube}); + height: 40px; + width: 40px; + top: 70%; + right: 12%; + + animation-name: ${wiggle}; + animation-duration: 2.5s; + animation-iteration-count: infinite; + animation-delay: .3s; + animation-fill-mode: both; + animation-timing-function: ease-in-out; +` + +export const CubeTopRight = styled.span` + ${commonCubeStyles}; + background-image: url(${purpleGradientCube}); + height: 40px; + width: 40px; + top: 14%; + right: 12%; + + animation-name: ${wiggle}; + animation-duration: 3s; + animation-iteration-count: infinite; + animation-fill-mode: backwards; + animation-timing-function: ease-in-out; +` diff --git a/docs/src/components/home/hero.js b/docs/src/components/home/hero.js new file mode 100644 index 0000000000000..eb690b290de82 --- /dev/null +++ b/docs/src/components/home/hero.js @@ -0,0 +1,25 @@ +import React from 'react' +import styled from 'styled-components' +import Windows from './Windows' +import {Flex} from 'rebass' +import {CubeTopLeft, CubeMiddleLeft, CubeBottomLeft, CubeTopRight, CubeBottomRight} from './cubes' + +const Container = styled(Flex)` + background-color: ${(props) => props.theme.colors.offWhite}; + position: relative; +` + +const Hero = () => { + return ( + + + + + + + + + ) +} + +export default Hero diff --git a/docs/src/components/layout.js b/docs/src/components/layout.js new file mode 100644 index 0000000000000..ebb2636bfcb17 --- /dev/null +++ b/docs/src/components/layout.js @@ -0,0 +1,24 @@ +import React from 'react' +import Navbar from './navbar' +import Sidebar from './Sidebar' +import {Flex, Box} from 'rebass' +import { theme } from 'src/theme' +import { ThemeProvider } from 'styled-components' + +const IS_STATIC = process.env.GATSBY_IS_STATIC + +const Layout = ({children, path}) => { + const showSidebar = IS_STATIC || path.match(/cli-commands|configuring-npm|using-npm/) + + return ( + + + + {showSidebar && } + {children} + + + ) +} + +export default Layout diff --git a/docs/src/components/links.js b/docs/src/components/links.js new file mode 100644 index 0000000000000..b0424c132cc0c --- /dev/null +++ b/docs/src/components/links.js @@ -0,0 +1,50 @@ +import {Link} from 'gatsby' +import styled, {css} from 'styled-components' + +const baseLinkStyles = css` + font-weight: 500; + text-decoration: none; + letter-spacing: .3px; + font-size: 14px; +` +const featureLinkStyles = css` + ${baseLinkStyles} + color: ${(props) => props.theme.colors.black}; + transition: opacity .5s + &:hover { + opacity: .9; + } +` + +const navLinkStyles = css` + ${baseLinkStyles}; + color: ${(props) => props.theme.colors.black}; + transition: opacity .5s; + margin: 0 10px; + + &:hover { + opacity: .5; + } +` +export const FeatureLink = styled(Link)` + ${featureLinkStyles} +` + +export const NavLink = styled(Link)` + ${navLinkStyles}; +` + +export const BasicNavLink = styled.a` + ${navLinkStyles}; +` + +export const SidebarLink = styled(Link)` + ${baseLinkStyles}; + color: ${(props) => props.theme.colors.red}; + padding: 10px; + transition: background-color .3s; + + &:hover { + background-color: ${(props) => props.theme.colors.lightPurple}; + } +` diff --git a/docs/src/components/navbar.js b/docs/src/components/navbar.js new file mode 100644 index 0000000000000..37356a6a47a05 --- /dev/null +++ b/docs/src/components/navbar.js @@ -0,0 +1,136 @@ +import React from 'react' +import styled from 'styled-components' +import {Flex, Image, Box} from 'rebass' +import cliLogo from '../images/cli-logo.svg' +import {Link} from 'gatsby' +import {NavLink, BasicNavLink} from './links' +import MobileSidebar from '../components/MobileSidebar' +import hamburger from '../images/hamburger.svg' +import hamburgerClose from '../images/hamburger-close.svg' + +const IS_STATIC = !!process.env.GATSBY_IS_STATIC + +const Container = styled(Flex)` + width: 100%; + border-bottom: 1px solid #86838333; + position: sticky; + top: 0; + background-color: ${(props) => props.theme.colors.white}; + z-index: 1; +` + +const Inner = styled(Flex)` + border-top: 3px solid; + border-image: linear-gradient(139deg, #fb8817, #ff4b01, #c12127, #e02aff) 3; + margin: auto; + height: 53px; + padding: 0 30px; + align-items: center; + width: 100%; +` + +const Logo = styled(Image)` + width: 120px; + padding: 0px 5px; + height: 18px; + vertical-align: middle; + display: inline-block; + transition: opacity .5s; + + &:hover { + opacity: .8; + } +` + +const Links = styled.ul` + display: none; + + @media screen and (min-width: ${(props) => props.theme.breakpoints.TABLET}) { + display: block; + margin-left: auto; + } +` + +const Heart = styled(Box)` + font-size: 15px; + display: inline-block; +` + +const Hamburger = styled.button` + border: none; + background: center no-repeat url(${(props) => props.isOpen ? hamburgerClose : hamburger}); + height: 30px; + width: 30px; + display: block; + margin-left: auto; + transition: opacity .5s; + cursor: pointer; + + &:hover { + opacity: .6; + } + + @media screen and (min-width: ${(props) => props.theme.breakpoints.TABLET}) { + display: none; + } +` + +class Navbar extends React.Component { + constructor (props) { + super(props) + this.state = { + value: null, + showMobileNav: false + } + this.enableBody = this.enableBody.bind(this) + this.toggleNav = this.toggleNav.bind(this) + } + + componentDidMount () { + window.addEventListener('resize', () => { + this.enableBody() + this.setState({showMobileNav: false}) + }) + } + + componentWillUnmount () { + this.enableBody() + } + + enableBody () { + window.document.getElementsByTagName('body')[0].classList.remove('disabled-body') + } + + toggleNav () { + this.setState({showMobileNav: !this.state.showMobileNav}) + window.document.getElementsByTagName('body')[0].classList.toggle('disabled-body') + } + + render () { + return ( + + + + + + + + + docs + + npmjs.org + + + + + {this.state.showMobileNav && } + + ) + } +} + +export default Navbar diff --git a/docs/src/components/scripts.js b/docs/src/components/scripts.js new file mode 100644 index 0000000000000..75126e11ffaf8 --- /dev/null +++ b/docs/src/components/scripts.js @@ -0,0 +1,27 @@ +import React from 'react' + +const IS_STATIC = process.env.GATSBY_IS_STATIC + +const Scripts = () => { + // Workaround: Make links work on the static html site + if (IS_STATIC) { + return ( + - - -
- diff --git a/html/favicon.ico b/html/favicon.ico deleted file mode 100644 index 9e0d4eef78c9c..0000000000000 Binary files a/html/favicon.ico and /dev/null differ diff --git a/html/index.html b/html/index.html deleted file mode 100644 index 32dd01a34f8ee..0000000000000 --- a/html/index.html +++ /dev/null @@ -1,93 +0,0 @@ - - - - - - npm - JavaScript Package Manager - -

npm

- -

npm is a package manager for node. You can use it to install - and publish your node programs. It manages dependencies and does other cool stuff.

- -

Easy Zero Line Install

- -

Install Node.js
-(npm comes with it.)

- -

Because a one-line install is one too many.

- -

Fancy Install

- -
    -
  1. Get the code. -
  2. Do what the README - says to do. -
- -

There's a pretty thorough install script at -https://npmjs.org/install.sh

- -

For maximum security, make sure to thoroughly inspect every -program that you run on your computer!

- -

Other Cool Stuff

- - - - - diff --git a/html/n-64.png b/html/n-64.png deleted file mode 100644 index 994330a1a9b08..0000000000000 Binary files a/html/n-64.png and /dev/null differ diff --git a/html/n-large.png b/html/n-large.png deleted file mode 100644 index 34008b23afce6..0000000000000 Binary files a/html/n-large.png and /dev/null differ diff --git a/html/npm-16.png b/html/npm-16.png deleted file mode 100644 index e97b01d1fc5a1..0000000000000 Binary files a/html/npm-16.png and /dev/null differ diff --git a/html/npm-256-square.png b/html/npm-256-square.png deleted file mode 100644 index b3556f93cc8f6..0000000000000 Binary files a/html/npm-256-square.png and /dev/null differ diff --git a/html/npm-256w.png b/html/npm-256w.png deleted file mode 100644 index c9db703359b3b..0000000000000 Binary files a/html/npm-256w.png and /dev/null differ diff --git a/html/npm-64-square.png b/html/npm-64-square.png deleted file mode 100644 index cabbb2e1cf3b6..0000000000000 Binary files a/html/npm-64-square.png and /dev/null differ diff --git a/html/npm-fin.png b/html/npm-fin.png deleted file mode 100644 index 5174b98af8615..0000000000000 Binary files a/html/npm-fin.png and /dev/null differ diff --git a/html/npm-large-trans.png b/html/npm-large-trans.png deleted file mode 100644 index 690df8d92d798..0000000000000 Binary files a/html/npm-large-trans.png and /dev/null differ diff --git a/html/npm-large.png b/html/npm-large.png deleted file mode 100644 index 59fed0014cb9f..0000000000000 Binary files a/html/npm-large.png and /dev/null differ diff --git a/html/npm-logo-white-trans.png b/html/npm-logo-white-trans.png deleted file mode 100644 index 531569a172f88..0000000000000 Binary files a/html/npm-logo-white-trans.png and /dev/null differ diff --git a/html/npm.png b/html/npm.png deleted file mode 100644 index 9b4b769279c6a..0000000000000 Binary files a/html/npm.png and /dev/null differ diff --git a/html/static/style.css b/html/static/style.css deleted file mode 100644 index 7a7f6ea5df871..0000000000000 --- a/html/static/style.css +++ /dev/null @@ -1,336 +0,0 @@ -/* reset */ -* { - margin:0; - padding:0; - border:none; - font-family:inherit; - font-size:inherit; - font-weight:inherit; -} -:target::before { - content:" >>> "; - position:absolute; - display:block; - opacity:0.5; - color:#f00; - margin:0 0 0 -2em; -} -abbr, acronym { - border-bottom:1px dotted #aaa; -} -kbd, code, pre { - font-family:monospace; - margin:0; - font-size:18px; - line-height:24px; - background:#eee; - outline:1px solid #ccc; -} -kbd code, kbd pre, kbd kbd, -pre code, pre pre, pre kbd, -code code, code pre, code kbd { outline: none } -.dollar::before { - content:"$ "; - display:inline; -} -p, ul, ol, dl, pre { - margin:30px 0; - line-height:30px; -} -hr { - margin:30px auto 29px; - width:66%; - height:1px; - background:#aaa; -} -pre { - display:block; -} -dd :first-child { - margin-top:0; -} - -body { - quotes:"“" "”" "‘" "’"; - width:666px; - margin:30px auto 120px; - font-family:Times New Roman, serif; - font-size:20px; - background:#fff; - line-height:30px; - color:#111; -} - -blockquote { - position:relative; - font-size:16px; - line-height:30px; - font-weight:bold; - width:85%; - margin:0 auto; -} -blockquote::before { - font-size:90px; - display:block; - position:absolute; - top:20px; - right:100%; - content:"“"; - padding-right:10px; - color:#ccc; -} -.source cite::before { - content:"— "; -} -.source { - padding-left:20%; - margin-top:30px; -} -.source cite span { - font-style:normal; -} -blockquote p { - margin-bottom:0; -} -.quote blockquote { - font-weight:normal; -} - -h1, h2, h3, h4, h5, h6, dt, #header { - font-family:serif; - font-size:20px; - font-weight:bold; -} -h2 { - background:#eee; -} -h1, h2 { - line-height:40px; -} - -i, em, cite { - font-style:italic; -} -b, strong { - font-weight:bold; -} -i, em, cite, b, strong, small { - line-height:28px; -} -small, .small, .small *, aside { - font-style:italic; - color:#669; - font-size:18px; -} -small a, .small a { - text-decoration:underline; -} -del { - text-decoration:line-through; -} -ins { - text-decoration:underline; -} -.alignright { display:block; float:right; margin-left:1em; } -.alignleft { display:block; float:left; margin-right:1em; } - -q:before, q q q:before, q q q q q:before, q q q q q q q:before { content:"“"; } -q q:before, q q q q:before, q q q q q q:before, q q q q q q q q:before { content:"‘"; } -q:after, q q q:after, q q q q q:after, q q q q q q q:after { content:"”"; } -q q:after, q q q q:after, q q q q q q:after, q q q q q q q q:after { content:"’"; } - -a { color:#00f; text-decoration:none; } -a:visited { color:#636; } -a:hover, a:active { color:#c00!important; text-decoration:underline; } - -h1 { - font-weight:bold; - background:#fff; -} -h1 a, h1 a:visited { - font-family:monospace; - font-size:60px; - color:#c00; - display:block; -} -h1 a:focus, h1 a:hover, h1 a:active { - color:#f00!important; - text-decoration:none; -} - -.navigation { - display:table; - width:100%; - margin:0 0 30px 0; - position:relative; -} -#nav-above { - margin-bottom:0; -} -.navigation .nav-previous { - display:table-cell; - text-align:left; - width:50%; -} -/* hang the » and « off into the margins */ -.navigation .nav-previous a:before, .navigation .nav-next a:after { - content: "«"; - display:block; - height:30px; - margin-bottom:-30px; - text-decoration:none; - margin-left:-15px; -} -.navigation .nav-next a:after { - content: "»"; - text-align:right; - margin-left:0; - margin-top:-30px; - margin-right:-15px; -} - - -.navigation .nav-next { - display:table-cell; - text-align:right; - width:50%; -} -.navigation a { - display:block; - width:100%; - height:100%; -} - -input, button, textarea { - border:0; - line-height:30px; -} -textarea { - height:300px; -} -input { - height:30px; - line-height:30px; -} -input.submit, input#submit, input.button, button, input[type=submit] { - cursor:hand; cursor:pointer; - outline:1px solid #ccc; -} - -#wrapper { - margin-bottom:90px; - position:relative; - z-index:1; - *zoom:1; - background:#fff; -} -#wrapper:after { - display:block; - content:"."; - visibility:hidden; - width:0; - height:0; - clear:both; -} - -.sidebar .xoxo > li { - float:left; - width:50%; -} -.sidebar li { - list-style:none; -} -.sidebar #elsewhere { - margin-left:-10%; - margin-right:-10%; -} -.sidebar #rss-links, .sidebar #twitter-feeds { - float:right; - clear:right; - width:20%; -} -.sidebar #comment { - clear:both; - float:none; - width:100%; -} -.sidebar #search { - clear:both; - float:none; - width:100%; -} -.sidebar #search h2 { - margin-left:40%; -} -.sidebar #search #s { - width:90%; - float:left; -} -.sidebar #search #searchsubmit { - width:10%; - float:right; -} -.sidebar * { - font-size:15px; - line-height:30px; -} - -#footer, #footer * { - text-align:center; - font-size:16px; - color:#ccc; - font-style:italic; - word-spacing:1em; - margin-top:0; -} - -#toc { - position:absolute; - top:0; - right:0; - padding:40px 0 40px 20px; - margin:0; - width:200px; - opacity:0.2; - z-index:-1; -} -#toc:hover { - opacity:1; - background:#fff; - z-index:999; -} -#toc ul { - padding:0; - margin:0; -} -#toc, #toc li { - list-style-type:none; - font-size:15px; - line-height:15px; -} -#toc li { - padding:0 0 0 10px; -} -#toc li a { - position:relative; - display:block; -} - -table#npmlogo { - line-height:10px; - width:180px; - margin:0 auto; -} - -@media print { - a[href] { - color:inherit; - } - a[href]:after { - white-space:nowrap; - content:" " attr(href); - } - a[href^=\#], .navigation { - display:none; - } -} diff --git a/html/static/toc.js b/html/static/toc.js deleted file mode 100644 index 7551e47efdf48..0000000000000 --- a/html/static/toc.js +++ /dev/null @@ -1,29 +0,0 @@ -;(function () { - var wrapper = document.getElementById('wrapper') - var els = Array.prototype.slice.call(wrapper.getElementsByTagName('*'), 0) - .filter(function (el) { - return el.parentNode === wrapper && - el.tagName.match(/H[1-6]/) && - el.id - }) - var l = 2 - var toc = document.createElement('ul') - toc.innerHTML = els.map(function (el) { - var i = el.tagName.charAt(1) - var out = '' - while (i > l) { - out += '
    ' - l++ - } - while (i < l) { - out += '
' - l-- - } - out += '
  • ' + - (el.innerText || el.text || el.innerHTML) + - '' - return out - }).join('\n') - toc.id = 'toc' - document.body.appendChild(toc) -})() diff --git a/lib/access.js b/lib/access.js index 164ea3b7d741a..e850c132eb24b 100644 --- a/lib/access.js +++ b/lib/access.js @@ -1,28 +1,50 @@ 'use strict' /* eslint-disable standard/no-callback-literal */ -var resolve = require('path').resolve +const BB = require('bluebird') -var readPackageJson = require('read-package-json') -var mapToRegistry = require('./utils/map-to-registry.js') -var npm = require('./npm.js') -var output = require('./utils/output.js') - -var whoami = require('./whoami') +const figgyPudding = require('figgy-pudding') +const libaccess = require('libnpm/access') +const npmConfig = require('./config/figgy-config.js') +const output = require('./utils/output.js') +const otplease = require('./utils/otplease.js') +const path = require('path') +const prefix = require('./npm.js').prefix +const readPackageJson = BB.promisify(require('read-package-json')) +const usage = require('./utils/usage.js') +const whoami = require('./whoami.js') module.exports = access -access.usage = +access.usage = usage( + 'npm access', 'npm access public []\n' + 'npm access restricted []\n' + 'npm access grant []\n' + 'npm access revoke []\n' + + 'npm access 2fa-required []\n' + + 'npm access 2fa-not-required []\n' + 'npm access ls-packages [||]\n' + 'npm access ls-collaborators [ []]\n' + 'npm access edit []' +) + +access.subcommands = [ + 'public', 'restricted', 'grant', 'revoke', + 'ls-packages', 'ls-collaborators', 'edit', + '2fa-required', '2fa-not-required' +] + +const AccessConfig = figgyPudding({ + json: {} +}) -access.subcommands = ['public', 'restricted', 'grant', 'revoke', - 'ls-packages', 'ls-collaborators', 'edit'] +function UsageError (msg = '') { + throw Object.assign(new Error( + (msg ? `\nUsage: ${msg}\n\n` : '') + + access.usage + ), {code: 'EUSAGE'}) +} access.completion = function (opts, cb) { var argv = opts.conf.argv.remain @@ -42,6 +64,8 @@ access.completion = function (opts, cb) { case 'ls-packages': case 'ls-collaborators': case 'edit': + case '2fa-required': + case '2fa-not-required': return cb(null, []) case 'revoke': return cb(null, []) @@ -50,81 +74,125 @@ access.completion = function (opts, cb) { } } -function access (args, cb) { - var cmd = args.shift() - var params - return parseParams(cmd, args, function (err, p) { - if (err) { return cb(err) } - params = p - return mapToRegistry(params.package, npm.config, invokeCmd) - }) +function access ([cmd, ...args], cb) { + return BB.try(() => { + const fn = access.subcommands.includes(cmd) && access[cmd] + if (!cmd) { UsageError('Subcommand is required.') } + if (!fn) { UsageError(`${cmd} is not a recognized subcommand.`) } - function invokeCmd (err, uri, auth, base) { - if (err) { return cb(err) } - params.auth = auth - try { - return npm.registry.access(cmd, uri, params, function (err, data) { - if (!err && data) { - output(JSON.stringify(data, undefined, 2)) - } - cb(err, data) - }) - } catch (e) { - cb(e.message + '\n\nUsage:\n' + access.usage) - } - } + return fn(args, AccessConfig(npmConfig())) + }).then( + x => cb(null, x), + err => err.code === 'EUSAGE' ? cb(err.message) : cb(err) + ) } -function parseParams (cmd, args, cb) { - // mapToRegistry will complain if package is undefined, - // but it's not needed for ls-packages - var params = { 'package': '' } - if (cmd === 'grant') { - params.permissions = args.shift() - } - if (['grant', 'revoke', 'ls-packages'].indexOf(cmd) !== -1) { - var entity = (args.shift() || '').split(':') - params.scope = entity[0] - params.team = entity[1] - } +access.public = ([pkg], opts) => { + return modifyPackage(pkg, opts, libaccess.public) +} - if (cmd === 'ls-packages') { - if (!params.scope) { - whoami([], true, function (err, scope) { - params.scope = scope - cb(err, params) - }) - } else { - cb(null, params) +access.restricted = ([pkg], opts) => { + return modifyPackage(pkg, opts, libaccess.restricted) +} + +access.grant = ([perms, scopeteam, pkg], opts) => { + return BB.try(() => { + if (!perms || (perms !== 'read-only' && perms !== 'read-write')) { + UsageError('First argument must be either `read-only` or `read-write.`') + } + if (!scopeteam) { + UsageError('`` argument is required.') + } + const [, scope, team] = scopeteam.match(/^@?([^:]+):(.*)$/) || [] + if (!scope && !team) { + UsageError( + 'Second argument used incorrect format.\n' + + 'Example: @example:developers' + ) } - } else { - getPackage(args.shift(), function (err, pkg) { - if (err) return cb(err) - params.package = pkg + return modifyPackage(pkg, opts, (pkgName, opts) => { + return libaccess.grant(pkgName, scopeteam, perms, opts) + }, false) + }) +} - if (cmd === 'ls-collaborators') params.user = args.shift() - cb(null, params) +access.revoke = ([scopeteam, pkg], opts) => { + return BB.try(() => { + if (!scopeteam) { + UsageError('`` argument is required.') + } + const [, scope, team] = scopeteam.match(/^@?([^:]+):(.*)$/) || [] + if (!scope || !team) { + UsageError( + 'First argument used incorrect format.\n' + + 'Example: @example:developers' + ) + } + return modifyPackage(pkg, opts, (pkgName, opts) => { + return libaccess.revoke(pkgName, scopeteam, opts) }) - } + }) +} + +access['2fa-required'] = access.tfaRequired = ([pkg], opts) => { + return modifyPackage(pkg, opts, libaccess.tfaRequired, false) +} + +access['2fa-not-required'] = access.tfaNotRequired = ([pkg], opts) => { + return modifyPackage(pkg, opts, libaccess.tfaNotRequired, false) +} + +access['ls-packages'] = access.lsPackages = ([owner], opts) => { + return ( + owner ? BB.resolve(owner) : BB.fromNode(cb => whoami([], true, cb)) + ).then(owner => { + return libaccess.lsPackages(owner, opts) + }).then(pkgs => { + // TODO - print these out nicely (breaking change) + output(JSON.stringify(pkgs, null, 2)) + }) +} + +access['ls-collaborators'] = access.lsCollaborators = ([pkg, usr], opts) => { + return getPackage(pkg, false).then(pkgName => + libaccess.lsCollaborators(pkgName, usr, opts) + ).then(collabs => { + // TODO - print these out nicely (breaking change) + output(JSON.stringify(collabs, null, 2)) + }) } -function getPackage (name, cb) { - if (name && name.trim()) { - cb(null, name.trim()) - } else { - readPackageJson( - resolve(npm.prefix, 'package.json'), - function (err, data) { - if (err) { +access['edit'] = () => BB.reject(new Error('edit subcommand is not implemented yet')) + +function modifyPackage (pkg, opts, fn, requireScope = true) { + return getPackage(pkg, requireScope).then(pkgName => + otplease(opts, opts => fn(pkgName, opts)) + ) +} + +function getPackage (name, requireScope = true) { + return BB.try(() => { + if (name && name.trim()) { + return name.trim() + } else { + return readPackageJson( + path.resolve(prefix, 'package.json') + ).then( + data => data.name, + err => { if (err.code === 'ENOENT') { - cb(new Error('no package name passed to command and no package.json found')) + throw new Error('no package name passed to command and no package.json found') } else { - cb(err) + throw err } - } else { - cb(null, data.name) } - } - ) - } + ) + } + }).then(name => { + if (requireScope && !name.match(/^@[^/]+\/.*$/)) { + UsageError('This command is only available for scoped packages.') + } else { + return name + } + }) } diff --git a/lib/adduser.js b/lib/adduser.js index e1c221032568d..cf82ff5b04915 100644 --- a/lib/adduser.js +++ b/lib/adduser.js @@ -1,9 +1,9 @@ module.exports = adduser -var log = require('npmlog') -var npm = require('./npm.js') -var usage = require('./utils/usage') -var crypto +const log = require('npmlog') +const npm = require('./npm.js') +const usage = require('./utils/usage') +let crypto try { crypto = require('crypto') @@ -21,20 +21,21 @@ function adduser (args, cb) { )) } - var registry = npm.config.get('registry') - var scope = npm.config.get('scope') - var creds = npm.config.getCredentialsByURI(npm.config.get('registry')) + let registry = npm.config.get('registry') + const scope = npm.config.get('scope') + const creds = npm.config.getCredentialsByURI(npm.config.get('registry')) if (scope) { - var scopedRegistry = npm.config.get(scope + ':registry') - var cliRegistry = npm.config.get('registry', 'cli') + const scopedRegistry = npm.config.get(scope + ':registry') + const cliRegistry = npm.config.get('registry', 'cli') if (scopedRegistry && !cliRegistry) registry = scopedRegistry } log.disableProgress() + let auth try { - var auth = require('./auth/' + npm.config.get('auth-type')) + auth = require('./auth/' + npm.config.get('auth-type')) } catch (e) { return cb(new Error('no such auth module')) } diff --git a/lib/audit.js b/lib/audit.js index 06852610e6466..7b694c13c5d7f 100644 --- a/lib/audit.js +++ b/lib/audit.js @@ -3,23 +3,43 @@ const Bluebird = require('bluebird') const audit = require('./install/audit.js') +const figgyPudding = require('figgy-pudding') const fs = require('graceful-fs') const Installer = require('./install.js').Installer const lockVerify = require('lock-verify') const log = require('npmlog') -const npa = require('npm-package-arg') +const npa = require('libnpm/parse-arg') const npm = require('./npm.js') +const npmConfig = require('./config/figgy-config.js') const output = require('./utils/output.js') const parseJson = require('json-parse-better-errors') const readFile = Bluebird.promisify(fs.readFile) +const AuditConfig = figgyPudding({ + also: {}, + 'audit-level': {}, + deepArgs: 'deep-args', + 'deep-args': {}, + dev: {}, + force: {}, + 'dry-run': {}, + global: {}, + json: {}, + only: {}, + parseable: {}, + prod: {}, + production: {}, + registry: {}, + runId: {} +}) + module.exports = auditCmd const usage = require('./utils/usage') auditCmd.usage = usage( 'audit', - '\nnpm audit [--json]' + + '\nnpm audit [--json] [--production]' + '\nnpm audit fix ' + '[--force|--package-lock-only|--dry-run|--production|--only=(dev|prod)]' ) @@ -110,12 +130,12 @@ function maybeReadFile (name) { }) } -function filterEnv (action) { - const includeDev = npm.config.get('dev') || - (!/^prod(uction)?$/.test(npm.config.get('only')) && !npm.config.get('production')) || - /^dev(elopment)?$/.test(npm.config.get('only')) || - /^dev(elopment)?$/.test(npm.config.get('also')) - const includeProd = !/^dev(elopment)?$/.test(npm.config.get('only')) +function filterEnv (action, opts) { + const includeDev = opts.dev || + (!/^prod(uction)?$/.test(opts.only) && !opts.production) || + /^dev(elopment)?$/.test(opts.only) || + /^dev(elopment)?$/.test(opts.also) + const includeProd = !/^dev(elopment)?$/.test(opts.only) const resolves = action.resolves.filter(({dev}) => { return (dev && includeDev) || (!dev && includeProd) }) @@ -125,7 +145,8 @@ function filterEnv (action) { } function auditCmd (args, cb) { - if (npm.config.get('global')) { + const opts = AuditConfig(npmConfig()) + if (opts.global) { const err = new Error('`npm audit` does not support testing globals') err.code = 'EAUDITGLOBAL' throw err @@ -154,7 +175,7 @@ function auditCmd (args, cb) { const requires = Object.assign( {}, (pkgJson && pkgJson.dependencies) || {}, - (pkgJson && pkgJson.devDependencies) || {} + (!opts.production && pkgJson && pkgJson.devDependencies) || {} ) return lockVerify(npm.prefix).then((result) => { if (result.status) return audit.generate(sw, requires) @@ -168,8 +189,19 @@ function auditCmd (args, cb) { }).then((auditReport) => { return audit.submitForFullReport(auditReport) }).catch((err) => { - if (err.statusCode === 404 || err.statusCode >= 500) { - const ne = new Error(`Your configured registry (${npm.config.get('registry')}) does not support audit requests.`) + if (err.statusCode >= 400) { + let msg + if (err.statusCode === 401) { + msg = `Either your login credentials are invalid or your registry (${opts.registry}) does not support audit.` + } else if (err.statusCode === 404) { + msg = `Your configured registry (${opts.registry}) does not support audit requests.` + } else { + msg = `Your configured registry (${opts.registry}) may not support audit requests, or the audit endpoint may be temporarily unavailable.` + } + if (err.body.length) { + msg += '\nThe server said: ' + err.body + } + const ne = new Error(msg) ne.code = 'ENOAUDIT' ne.wrapped = err throw ne @@ -178,7 +210,7 @@ function auditCmd (args, cb) { }).then((auditResult) => { if (args[0] === 'fix') { const actions = (auditResult.actions || []).reduce((acc, action) => { - action = filterEnv(action) + action = filterEnv(action, opts) if (!action) { return acc } if (action.isMajor) { acc.major.add(`${action.module}@${action.target}`) @@ -215,7 +247,7 @@ function auditCmd (args, cb) { review: new Set() }) return Bluebird.try(() => { - const installMajor = npm.config.get('force') + const installMajor = opts.force const installCount = actions.install.size + (installMajor ? actions.major.size : 0) + actions.update.size const vulnFixCount = new Set([...actions.installFixes, ...actions.updateFixes, ...(installMajor ? actions.majorFixes : [])]).size const metavuln = auditResult.metadata.vulnerabilities @@ -230,22 +262,22 @@ function auditCmd (args, cb) { return Bluebird.fromNode(cb => { new Auditor( npm.prefix, - !!npm.config.get('dry-run'), + !!opts['dry-run'], [...actions.install, ...(installMajor ? actions.major : [])], - { + opts.concat({ runId: auditResult.runId, deepArgs: [...actions.update].map(u => u.split('>')) - } + }).toJSON() ).run(cb) }).then(() => { const numScanned = auditResult.metadata.totalDependencies - if (!npm.config.get('json') && !npm.config.get('parseable')) { + if (!opts.json && !opts.parseable) { output(`fixed ${vulnFixCount} of ${total} vulnerabilit${total === 1 ? 'y' : 'ies'} in ${numScanned} scanned package${numScanned === 1 ? '' : 's'}`) if (actions.review.size) { output(` ${actions.review.size} vulnerabilit${actions.review.size === 1 ? 'y' : 'ies'} required manual review and could not be updated`) } if (actions.major.size) { - output(` ${actions.major.size} package update${actions.major.size === 1 ? '' : 's'} for ${actions.majorFixes.size} vuln${actions.majorFixes.size === 1 ? '' : 's'} involved breaking changes`) + output(` ${actions.major.size} package update${actions.major.size === 1 ? '' : 's'} for ${actions.majorFixes.size} vulnerabilit${actions.majorFixes.size === 1 ? 'y' : 'ies'} involved breaking changes`) if (installMajor) { output(' (installed due to `--force` option)') } else { @@ -258,12 +290,12 @@ function auditCmd (args, cb) { }) } else { const levels = ['low', 'moderate', 'high', 'critical'] - const minLevel = levels.indexOf(npm.config.get('audit-level')) + const minLevel = levels.indexOf(opts['audit-level']) const vulns = levels.reduce((count, level, i) => { return i < minLevel ? count : count + (auditResult.metadata.vulnerabilities[level] || 0) }, 0) if (vulns > 0) process.exitCode = 1 - if (npm.config.get('parseable')) { + if (opts.parseable) { return audit.printParseableReport(auditResult) } else { return audit.printFullReport(auditResult) diff --git a/lib/auth/legacy.js b/lib/auth/legacy.js index 8c25df0288e67..7ad678be5e5c1 100644 --- a/lib/auth/legacy.js +++ b/lib/auth/legacy.js @@ -1,11 +1,11 @@ 'use strict' + const read = require('../utils/read-user-info.js') -const profile = require('npm-profile') +const profile = require('libnpm/profile') const log = require('npmlog') -const npm = require('../npm.js') +const figgyPudding = require('figgy-pudding') +const npmConfig = require('../config/figgy-config.js') const output = require('../utils/output.js') -const pacoteOpts = require('../config/pacote') -const fetchOpts = require('../config/fetch-opts') const openUrl = require('../utils/open-url') const openerPromise = (url) => new Promise((resolve, reject) => { @@ -26,54 +26,54 @@ const loginPrompter = (creds) => { }) } -module.exports.login = (creds, registry, scope, cb) => { - const conf = { - log: log, - creds: creds, - registry: registry, - auth: { - otp: npm.config.get('otp') - }, - scope: scope, - opts: fetchOpts.fromPacote(pacoteOpts()) - } - login(conf).then((newCreds) => cb(null, newCreds)).catch(cb) +const LoginOpts = figgyPudding({ + 'always-auth': {}, + creds: {}, + log: {default: () => log}, + registry: {}, + scope: {} +}) + +module.exports.login = (creds = {}, registry, scope, cb) => { + const opts = LoginOpts(npmConfig()).concat({scope, registry, creds}) + login(opts).then((newCreds) => cb(null, newCreds)).catch(cb) } -function login (conf) { - return profile.login(openerPromise, loginPrompter, conf) +function login (opts) { + return profile.login(openerPromise, loginPrompter, opts) .catch((err) => { if (err.code === 'EOTP') throw err - const u = conf.creds.username - const p = conf.creds.password - const e = conf.creds.email + const u = opts.creds.username + const p = opts.creds.password + const e = opts.creds.email if (!(u && p && e)) throw err - return profile.adduserCouch(u, e, p, conf) + return profile.adduserCouch(u, e, p, opts) }) .catch((err) => { if (err.code !== 'EOTP') throw err - return read.otp('Enter one-time password from your authenticator app: ').then((otp) => { - conf.auth.otp = otp - const u = conf.creds.username - const p = conf.creds.password - return profile.loginCouch(u, p, conf) + return read.otp( + 'Enter one-time password from your authenticator app: ' + ).then(otp => { + const u = opts.creds.username + const p = opts.creds.password + return profile.loginCouch(u, p, opts.concat({otp})) }) }).then((result) => { const newCreds = {} if (result && result.token) { newCreds.token = result.token } else { - newCreds.username = conf.creds.username - newCreds.password = conf.creds.password - newCreds.email = conf.creds.email - newCreds.alwaysAuth = npm.config.get('always-auth') + newCreds.username = opts.creds.username + newCreds.password = opts.creds.password + newCreds.email = opts.creds.email + newCreds.alwaysAuth = opts['always-auth'] } - const usermsg = conf.creds.username ? ' user ' + conf.creds.username : '' - conf.log.info('login', 'Authorized' + usermsg) - const scopeMessage = conf.scope ? ' to scope ' + conf.scope : '' - const userout = conf.creds.username ? ' as ' + conf.creds.username : '' - output('Logged in%s%s on %s.', userout, scopeMessage, conf.registry) + const usermsg = opts.creds.username ? ' user ' + opts.creds.username : '' + opts.log.info('login', 'Authorized' + usermsg) + const scopeMessage = opts.scope ? ' to scope ' + opts.scope : '' + const userout = opts.creds.username ? ' as ' + opts.creds.username : '' + output('Logged in%s%s on %s.', userout, scopeMessage, opts.registry) return newCreds }) } diff --git a/lib/auth/sso.js b/lib/auth/sso.js index 519ca8496c74c..099e764e3ab40 100644 --- a/lib/auth/sso.js +++ b/lib/auth/sso.js @@ -1,56 +1,73 @@ -var log = require('npmlog') -var npm = require('../npm.js') -var output = require('../utils/output') -var openUrl = require('../utils/open-url') +'use strict' + +const BB = require('bluebird') + +const figgyPudding = require('figgy-pudding') +const log = require('npmlog') +const npmConfig = require('../config/figgy-config.js') +const npmFetch = require('npm-registry-fetch') +const output = require('../utils/output.js') +const openUrl = BB.promisify(require('../utils/open-url.js')) +const otplease = require('../utils/otplease.js') +const profile = require('libnpm/profile') + +const SsoOpts = figgyPudding({ + ssoType: 'sso-type', + 'sso-type': {}, + ssoPollFrequency: 'sso-poll-frequency', + 'sso-poll-frequency': {} +}) module.exports.login = function login (creds, registry, scope, cb) { - var ssoType = npm.config.get('sso-type') + const opts = SsoOpts(npmConfig()).concat({creds, registry, scope}) + const ssoType = opts.ssoType if (!ssoType) { return cb(new Error('Missing option: sso-type')) } - var params = { - // We're reusing the legacy login endpoint, so we need some dummy - // stuff here to pass validation. They're never used. - auth: { - username: 'npm_' + ssoType + '_auth_dummy_user', - password: 'placeholder', - email: 'support@npmjs.com', - authType: ssoType - } + // We're reusing the legacy login endpoint, so we need some dummy + // stuff here to pass validation. They're never used. + const auth = { + username: 'npm_' + ssoType + '_auth_dummy_user', + password: 'placeholder', + email: 'support@npmjs.com', + authType: ssoType } - npm.registry.adduser(registry, params, function (er, doc) { - if (er) return cb(er) - if (!doc || !doc.token) return cb(new Error('no SSO token returned')) - if (!doc.sso) return cb(new Error('no SSO URL returned by services')) - - openUrl(doc.sso, 'to complete your login please visit', function () { - pollForSession(registry, doc.token, function (err, username) { - if (err) return cb(err) - log.info('adduser', 'Authorized user %s', username) - var scopeMessage = scope ? ' to scope ' + scope : '' - output('Logged in as %s%s on %s.', username, scopeMessage, registry) - - cb(null, { token: doc.token }) - }) + otplease(opts, + opts => profile.loginCouch(auth.username, auth.password, opts) + ).then(({token, sso}) => { + if (!token) { throw new Error('no SSO token returned') } + if (!sso) { throw new Error('no SSO URL returned by services') } + return openUrl(sso, 'to complete your login please visit').then(() => { + return pollForSession(registry, token, opts) + }).then(username => { + log.info('adduser', 'Authorized user %s', username) + var scopeMessage = scope ? ' to scope ' + scope : '' + output('Logged in as %s%s on %s.', username, scopeMessage, registry) + return {token} }) - }) + }).nodeify(cb) } -function pollForSession (registry, token, cb) { +function pollForSession (registry, token, opts) { log.info('adduser', 'Polling for validated SSO session') - npm.registry.whoami(registry, { - auth: { - token: token - } - }, function (er, username) { - if (er && er.statusCode !== 401) { - cb(er) - } else if (!username) { - setTimeout(function () { - pollForSession(registry, token, cb) - }, npm.config.get('sso-poll-frequency')) - } else { - cb(null, username) + return npmFetch.json( + '/-/whoami', opts.concat({registry, forceAuth: {token}}) + ).then( + ({username}) => username, + err => { + if (err.code === 'E401') { + return sleep(opts['sso-poll-frequency']).then(() => { + return pollForSession(registry, token, opts) + }) + } else { + throw err + } } + ) +} + +function sleep (time) { + return new BB((resolve) => { + setTimeout(resolve, time) }) } diff --git a/lib/cache.js b/lib/cache.js index 169f192cad5f2..00abd8c746ab7 100644 --- a/lib/cache.js +++ b/lib/cache.js @@ -9,9 +9,9 @@ const finished = BB.promisify(require('mississippi').finished) const log = require('npmlog') const npa = require('npm-package-arg') const npm = require('./npm.js') +const npmConfig = require('./config/figgy-config.js') const output = require('./utils/output.js') const pacote = require('pacote') -const pacoteOpts = require('./config/pacote') const path = require('path') const rm = BB.promisify(require('./utils/gently-rm.js')) const unbuild = BB.promisify(npm.commands.unbuild) @@ -107,7 +107,7 @@ function add (args, where) { log.verbose('cache add', 'spec', spec) if (!spec) return BB.reject(new Error(usage)) log.silly('cache add', 'parsed spec', spec) - return finished(pacote.tarball.stream(spec, pacoteOpts({where})).resume()) + return finished(pacote.tarball.stream(spec, npmConfig({where})).resume()) } cache.verify = verify @@ -131,7 +131,7 @@ function verify () { cache.unpack = unpack function unpack (pkg, ver, unpackTarget, dmode, fmode, uid, gid) { return unbuild([unpackTarget], true).then(() => { - const opts = pacoteOpts({dmode, fmode, uid, gid, offline: true}) + const opts = npmConfig({dmode, fmode, uid, gid, offline: true}) return pacote.extract(npa.resolve(pkg, ver), unpackTarget, opts) }) } diff --git a/lib/ci.js b/lib/ci.js index 03822b9528d1d..d5b5c15442512 100644 --- a/lib/ci.js +++ b/lib/ci.js @@ -1,40 +1,47 @@ 'use strict' -const Installer = require('libcipm') -const lifecycleOpts = require('./config/lifecycle.js') const npm = require('./npm.js') -const npmlog = require('npmlog') -const pacoteOpts = require('./config/pacote.js') +const Installer = require('libcipm') +const log = require('npmlog') +const path = require('path') +const pack = require('./pack.js') ci.usage = 'npm ci' ci.completion = (cb) => cb(null, []) -Installer.CipmConfig.impl(npm.config, { - get: npm.config.get, - set: npm.config.set, - toLifecycle (moreOpts) { - return lifecycleOpts(moreOpts) - }, - toPacote (moreOpts) { - return pacoteOpts(moreOpts) - } -}) - module.exports = ci function ci (args, cb) { - return new Installer({ - config: npm.config, - log: npmlog - }) - .run() - .then( - (details) => { - npmlog.disableProgress() - console.log(`added ${details.pkgCount} packages in ${ - details.runTime / 1000 - }s`) - } - ) - .then(() => cb(), cb) + const opts = { + // Add some non-npm-config opts by hand. + cache: path.join(npm.config.get('cache'), '_cacache'), + // NOTE: npm has some magic logic around color distinct from the config + // value, so we have to override it here + color: !!npm.color, + hashAlgorithm: 'sha1', + includeDeprecated: false, + log, + 'npm-session': npm.session, + 'project-scope': npm.projectScope, + refer: npm.referer, + dmode: npm.modes.exec, + fmode: npm.modes.file, + umask: npm.modes.umask, + npmVersion: npm.version, + tmp: npm.tmp, + dirPacker: pack.packGitDep + } + + for (const key in npm.config.list[0]) { + if (!['log', 'cache'].includes(key)) { + opts[key] = npm.config.list[0][key] + } + } + + return new Installer(opts).run().then(details => { + log.disableProgress() + console.log(`added ${details.pkgCount} packages in ${ + details.runTime / 1000 + }s`) + }).then(() => cb(), cb) } diff --git a/lib/config.js b/lib/config.js index 0d4161d3b53e8..5f9819879be23 100644 --- a/lib/config.js +++ b/lib/config.js @@ -11,7 +11,7 @@ var ini = require('ini') var editor = require('editor') var os = require('os') var path = require('path') -var mkdirp = require('mkdirp') +var mkdirp = require('gentle-fs').mkdir var umask = require('./utils/umask') var usage = require('./utils/usage') var output = require('./utils/output') diff --git a/lib/config/cmd-list.js b/lib/config/cmd-list.js index a453082adc1bc..d9d0d85b7d520 100644 --- a/lib/config/cmd-list.js +++ b/lib/config/cmd-list.js @@ -50,7 +50,9 @@ var affordances = { 'rm': 'uninstall', 'r': 'uninstall', 'rum': 'run-script', - 'sit': 'cit' + 'sit': 'cit', + 'urn': 'run-script', + 'ogr': 'org' } // these are filenames in . @@ -89,6 +91,8 @@ var cmdList = [ 'token', 'profile', 'audit', + 'fund', + 'org', 'help', 'help-search', diff --git a/lib/config/core.js b/lib/config/core.js index b9851f98d0e0c..36420b3450163 100644 --- a/lib/config/core.js +++ b/lib/config/core.js @@ -8,7 +8,7 @@ var path = require('path') var nopt = require('nopt') var ini = require('ini') var Umask = configDefs.Umask -var mkdirp = require('mkdirp') +var mkdirp = require('gentle-fs').mkdir var umask = require('../utils/umask') var isWindows = require('../utils/is-windows.js') @@ -31,10 +31,8 @@ enumerable: true }) exports.validate = validate -var myUid = process.env.SUDO_UID !== undefined - ? process.env.SUDO_UID : (process.getuid && process.getuid()) -var myGid = process.env.SUDO_GID !== undefined - ? process.env.SUDO_GID : (process.getgid && process.getgid()) +var myUid = process.getuid && process.getuid() +var myGid = process.getgid && process.getgid() var loading = false var loadCbs = [] @@ -218,7 +216,6 @@ function Conf (base) { Conf.prototype.loadPrefix = require('./load-prefix.js') Conf.prototype.loadCAFile = require('./load-cafile.js') -Conf.prototype.loadUid = require('./load-uid.js') Conf.prototype.setUser = require('./set-user.js') Conf.prototype.getCredentialsByURI = require('./get-credentials-by-uri.js') Conf.prototype.setCredentialsByURI = require('./set-credentials-by-uri.js') @@ -227,11 +224,8 @@ Conf.prototype.clearCredentialsByURI = require('./clear-credentials-by-uri.js') Conf.prototype.loadExtras = function (cb) { this.setUser(function (er) { if (er) return cb(er) - this.loadUid(function (er) { - if (er) return cb(er) - // Without prefix, nothing will ever work - mkdirp(this.prefix, cb) - }.bind(this)) + // Without prefix, nothing will ever work + mkdirp(this.prefix, cb) }.bind(this)) } @@ -287,15 +281,21 @@ Conf.prototype.save = function (where, cb) { done(null) }) } else { - mkdirp(path.dirname(target.path), function (er) { + // we don't have to use inferOwner here, because gentle-fs will + // mkdir with the correctly inferred ownership. Just preserve it. + const dir = path.dirname(target.path) + mkdirp(dir, function (er) { if (er) return then(er) - fs.writeFile(target.path, data, 'utf8', function (er) { + fs.stat(dir, (er, st) => { if (er) return then(er) - if (where === 'user' && myUid && myGid) { - fs.chown(target.path, +myUid, +myGid, then) - } else { - then() - } + fs.writeFile(target.path, data, 'utf8', function (er) { + if (er) return then(er) + if (myUid === 0 && (myUid !== st.uid || myGid !== st.gid)) { + fs.chown(target.path, st.uid, st.gid, then) + } else { + then() + } + }) }) }) } diff --git a/lib/config/defaults.js b/lib/config/defaults.js index 991a2129f6894..e07da3aaf97f4 100644 --- a/lib/config/defaults.js +++ b/lib/config/defaults.js @@ -113,6 +113,7 @@ Object.defineProperty(exports, 'defaults', {get: function () { 'audit-level': 'low', 'auth-type': 'legacy', + 'before': null, 'bin-links': true, browser: null, @@ -140,6 +141,9 @@ Object.defineProperty(exports, 'defaults', {get: function () { editor: osenv.editor(), 'engine-strict': false, force: false, + 'format-package-lock': true, + + fund: true, 'fetch-retries': 2, 'fetch-retry-factor': 10, @@ -200,7 +204,8 @@ Object.defineProperty(exports, 'defaults', {get: function () { 'user-agent': 'npm/{npm-version} ' + 'node/{node-version} ' + '{platform} ' + - '{arch}', + '{arch} ' + + '{ci}', 'read-only': false, 'rebuild-bundle': true, registry: 'https://registry.npmjs.org/', @@ -239,7 +244,7 @@ Object.defineProperty(exports, 'defaults', {get: function () { process.getuid() !== 0, 'update-notifier': true, usage: false, - user: process.platform === 'win32' ? 0 : 'nobody', + user: (process.platform === 'win32' || os.type() === 'OS400') ? 0 : 'nobody', userconfig: path.resolve(home, '.npmrc'), umask: process.umask ? process.umask() : umask.fromString('022'), version: false, @@ -260,6 +265,7 @@ exports.types = { audit: Boolean, 'audit-level': ['low', 'moderate', 'high', 'critical'], 'auth-type': ['legacy', 'sso', 'saml', 'oauth'], + 'before': [null, Date], 'bin-links': Boolean, browser: [null, String], ca: [null, String, Array], @@ -280,6 +286,8 @@ exports.types = { editor: String, 'engine-strict': Boolean, force: Boolean, + fund: Boolean, + 'format-package-lock': Boolean, 'fetch-retries': Number, 'fetch-retry-factor': Number, 'fetch-retry-mintimeout': Number, @@ -394,6 +402,7 @@ function getLocalAddresses () { } exports.shorthands = { + before: ['--enjoy-by'], s: ['--loglevel', 'silent'], d: ['--loglevel', 'info'], dd: ['--loglevel', 'verbose'], diff --git a/lib/config/fetch-opts.js b/lib/config/fetch-opts.js deleted file mode 100644 index 213c293d6c7c9..0000000000000 --- a/lib/config/fetch-opts.js +++ /dev/null @@ -1,77 +0,0 @@ -'use strict' - -const url = require('url') - -module.exports.fromPacote = fromPacote - -function fromPacote (opts) { - return { - cache: getCacheMode(opts), - cacheManager: opts.cache, - ca: opts.ca, - cert: opts.cert, - headers: getHeaders('', opts.registry, opts), - key: opts.key, - localAddress: opts.localAddress, - maxSockets: opts.maxSockets, - proxy: opts.proxy, - referer: opts.refer, - retry: opts.retry, - strictSSL: !!opts.strictSSL, - timeout: opts.timeout, - uid: opts.uid, - gid: opts.gid - } -} - -function getCacheMode (opts) { - return opts.offline - ? 'only-if-cached' - : opts.preferOffline - ? 'force-cache' - : opts.preferOnline - ? 'no-cache' - : 'default' -} - -function getHeaders (uri, registry, opts) { - const headers = Object.assign({ - 'npm-in-ci': opts.isFromCI, - 'npm-scope': opts.projectScope, - 'npm-session': opts.npmSession, - 'user-agent': opts.userAgent, - 'referer': opts.refer - }, opts.headers) - // check for auth settings specific to this registry - let auth = ( - opts.auth && - opts.auth[registryKey(registry)] - ) || opts.auth - // If a tarball is hosted on a different place than the manifest, only send - // credentials on `alwaysAuth` - const shouldAuth = auth && ( - auth.alwaysAuth || - url.parse(uri).host === url.parse(registry).host - ) - if (shouldAuth && auth.token) { - headers.authorization = `Bearer ${auth.token}` - } else if (shouldAuth && auth.username && auth.password) { - const encoded = Buffer.from( - `${auth.username}:${auth.password}`, 'utf8' - ).toString('base64') - headers.authorization = `Basic ${encoded}` - } else if (shouldAuth && auth._auth) { - headers.authorization = `Basic ${auth._auth}` - } - return headers -} - -function registryKey (registry) { - const parsed = url.parse(registry) - const formatted = url.format({ - host: parsed.host, - pathname: parsed.pathname, - slashes: parsed.slashes - }) - return url.resolve(formatted, '.') -} diff --git a/lib/config/figgy-config.js b/lib/config/figgy-config.js new file mode 100644 index 0000000000000..d704d1502cb44 --- /dev/null +++ b/lib/config/figgy-config.js @@ -0,0 +1,87 @@ +'use strict' + +const BB = require('bluebird') + +const crypto = require('crypto') +const figgyPudding = require('figgy-pudding') +const log = require('npmlog') +const npm = require('../npm.js') +const pack = require('../pack.js') +const path = require('path') + +const npmSession = npm.session = crypto.randomBytes(8).toString('hex') +log.verbose('npm-session', npmSession) + +const SCOPE_REGISTRY_REGEX = /@.*:registry$/gi +const NpmConfig = figgyPudding({}, { + other (key) { + return key.match(SCOPE_REGISTRY_REGEX) + } +}) + +let baseConfig + +module.exports = mkConfig +function mkConfig (...providers) { + if (!baseConfig) { + baseConfig = NpmConfig(npm.config, { + // Add some non-npm-config opts by hand. + cache: path.join(npm.config.get('cache'), '_cacache'), + // NOTE: npm has some magic logic around color distinct from the config + // value, so we have to override it here + color: !!npm.color, + dirPacker: pack.packGitDep, + hashAlgorithm: 'sha1', + includeDeprecated: false, + log, + 'npm-session': npmSession, + 'project-scope': npm.projectScope, + refer: npm.referer, + dmode: npm.modes.exec, + fmode: npm.modes.file, + umask: npm.modes.umask, + npmVersion: npm.version, + tmp: npm.tmp, + Promise: BB + }) + const ownerStats = calculateOwner() + if (ownerStats.uid != null || ownerStats.gid != null) { + baseConfig = baseConfig.concat(ownerStats) + } + } + let conf = baseConfig.concat(...providers) + // Adapt some other configs if missing + if (npm.config.get('prefer-online') === undefined) { + conf = conf.concat({ + 'prefer-online': npm.config.get('cache-max') <= 0 + }) + } + if (npm.config.get('prefer-online') === undefined) { + conf = conf.concat({ + 'prefer-online': npm.config.get('cache-min') >= 9999 + }) + } + return conf +} + +let effectiveOwner +function calculateOwner () { + if (!effectiveOwner) { + effectiveOwner = { uid: 0, gid: 0 } + + // Pretty much only on windows + if (!process.getuid) { + return effectiveOwner + } + + effectiveOwner.uid = +process.getuid() + effectiveOwner.gid = +process.getgid() + + if (effectiveOwner.uid === 0) { + if (process.env.SUDO_UID) effectiveOwner.uid = +process.env.SUDO_UID + if (process.env.SUDO_GID) effectiveOwner.gid = +process.env.SUDO_GID + } + } + + return effectiveOwner +} diff --git a/lib/config/load-uid.js b/lib/config/load-uid.js deleted file mode 100644 index 859eac7494bc7..0000000000000 --- a/lib/config/load-uid.js +++ /dev/null @@ -1,15 +0,0 @@ -module.exports = loadUid - -var getUid = require('uid-number') - -// Call in the context of a npmconf object - -function loadUid (cb) { - // if we're not in unsafe-perm mode, then figure out who - // to run stuff as. Do this first, to support `npm update npm -g` - if (!this.get('unsafe-perm')) { - getUid(this.get('user'), this.get('group'), cb) - } else { - process.nextTick(cb) - } -} diff --git a/lib/config/pacote.js b/lib/config/pacote.js deleted file mode 100644 index 505b69da375a4..0000000000000 --- a/lib/config/pacote.js +++ /dev/null @@ -1,141 +0,0 @@ -'use strict' - -const Buffer = require('safe-buffer').Buffer - -const crypto = require('crypto') -const npm = require('../npm') -const log = require('npmlog') -let pack -const path = require('path') - -let effectiveOwner - -const npmSession = crypto.randomBytes(8).toString('hex') -log.verbose('npm-session', npmSession) - -module.exports = pacoteOpts -function pacoteOpts (moreOpts) { - if (!pack) { - pack = require('../pack.js') - } - const ownerStats = calculateOwner() - const opts = { - cache: path.join(npm.config.get('cache'), '_cacache'), - ca: npm.config.get('ca'), - cert: npm.config.get('cert'), - defaultTag: npm.config.get('tag'), - dirPacker: pack.packGitDep, - hashAlgorithm: 'sha1', - includeDeprecated: false, - key: npm.config.get('key'), - localAddress: npm.config.get('local-address'), - log: log, - maxAge: npm.config.get('cache-min'), - maxSockets: npm.config.get('maxsockets'), - npmSession: npmSession, - offline: npm.config.get('offline'), - preferOffline: npm.config.get('prefer-offline') || npm.config.get('cache-min') > 9999, - preferOnline: npm.config.get('prefer-online') || npm.config.get('cache-max') <= 0, - projectScope: npm.projectScope, - proxy: npm.config.get('https-proxy') || npm.config.get('proxy'), - noProxy: npm.config.get('noproxy'), - refer: npm.registry.refer, - registry: npm.config.get('registry'), - retry: { - retries: npm.config.get('fetch-retries'), - factor: npm.config.get('fetch-retry-factor'), - minTimeout: npm.config.get('fetch-retry-mintimeout'), - maxTimeout: npm.config.get('fetch-retry-maxtimeout') - }, - scope: npm.config.get('scope'), - strictSSL: npm.config.get('strict-ssl'), - userAgent: npm.config.get('user-agent'), - - dmode: npm.modes.exec, - fmode: npm.modes.file, - umask: npm.modes.umask - } - - if (ownerStats.uid != null || ownerStats.gid != null) { - Object.assign(opts, ownerStats) - } - - npm.config.keys.forEach(function (k) { - const authMatchGlobal = k.match( - /^(_authToken|username|_password|password|email|always-auth|_auth)$/ - ) - const authMatchScoped = k[0] === '/' && k.match( - /(.*):(_authToken|username|_password|password|email|always-auth|_auth)$/ - ) - - // if it matches scoped it will also match global - if (authMatchGlobal || authMatchScoped) { - let nerfDart = null - let key = null - let val = null - - if (!opts.auth) { opts.auth = {} } - - if (authMatchScoped) { - nerfDart = authMatchScoped[1] - key = authMatchScoped[2] - val = npm.config.get(k) - if (!opts.auth[nerfDart]) { - opts.auth[nerfDart] = { - alwaysAuth: !!npm.config.get('always-auth') - } - } - } else { - key = authMatchGlobal[1] - val = npm.config.get(k) - opts.auth.alwaysAuth = !!npm.config.get('always-auth') - } - - const auth = authMatchScoped ? opts.auth[nerfDart] : opts.auth - if (key === '_authToken') { - auth.token = val - } else if (key.match(/password$/i)) { - auth.password = - // the config file stores password auth already-encoded. pacote expects - // the actual username/password pair. - Buffer.from(val, 'base64').toString('utf8') - } else if (key === 'always-auth') { - auth.alwaysAuth = val === 'false' ? false : !!val - } else { - auth[key] = val - } - } - - if (k[0] === '@') { - if (!opts.scopeTargets) { opts.scopeTargets = {} } - opts.scopeTargets[k.replace(/:registry$/, '')] = npm.config.get(k) - } - }) - - Object.keys(moreOpts || {}).forEach((k) => { - opts[k] = moreOpts[k] - }) - - return opts -} - -function calculateOwner () { - if (!effectiveOwner) { - effectiveOwner = { uid: 0, gid: 0 } - - // Pretty much only on windows - if (!process.getuid) { - return effectiveOwner - } - - effectiveOwner.uid = +process.getuid() - effectiveOwner.gid = +process.getgid() - - if (effectiveOwner.uid === 0) { - if (process.env.SUDO_UID) effectiveOwner.uid = +process.env.SUDO_UID - if (process.env.SUDO_GID) effectiveOwner.gid = +process.env.SUDO_GID - } - } - - return effectiveOwner -} diff --git a/lib/config/reg-client.js b/lib/config/reg-client.js deleted file mode 100644 index d4e2417097fa0..0000000000000 --- a/lib/config/reg-client.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -module.exports = regClientConfig -function regClientConfig (npm, log, config) { - return { - proxy: { - http: config.get('proxy'), - https: config.get('https-proxy'), - localAddress: config.get('local-address') - }, - ssl: { - certificate: config.get('cert'), - key: config.get('key'), - ca: config.get('ca'), - strict: config.get('strict-ssl') - }, - retry: { - retries: config.get('fetch-retries'), - factor: config.get('fetch-retry-factor'), - minTimeout: config.get('fetch-retry-mintimeout'), - maxTimeout: config.get('fetch-retry-maxtimeout') - }, - userAgent: config.get('user-agent'), - log: log, - defaultTag: config.get('tag'), - maxSockets: config.get('maxsockets'), - scope: npm.projectScope - } -} diff --git a/lib/config/set-user.js b/lib/config/set-user.js index 14cc21d2ebd99..570a1f54e2757 100644 --- a/lib/config/set-user.js +++ b/lib/config/set-user.js @@ -3,7 +3,7 @@ module.exports = setUser var assert = require('assert') var path = require('path') var fs = require('fs') -var mkdirp = require('mkdirp') +var mkdirp = require('gentle-fs').mkdir function setUser (cb) { var defaultConf = this.root diff --git a/lib/deprecate.js b/lib/deprecate.js index 9b71d1de494ad..7fe2fbed4ba55 100644 --- a/lib/deprecate.js +++ b/lib/deprecate.js @@ -1,55 +1,72 @@ -/* eslint-disable standard/no-callback-literal */ -var npm = require('./npm.js') -var mapToRegistry = require('./utils/map-to-registry.js') -var npa = require('npm-package-arg') +'use strict' + +const BB = require('bluebird') + +const npmConfig = require('./config/figgy-config.js') +const fetch = require('libnpm/fetch') +const figgyPudding = require('figgy-pudding') +const otplease = require('./utils/otplease.js') +const npa = require('libnpm/parse-arg') +const semver = require('semver') +const whoami = require('./whoami.js') + +const DeprecateConfig = figgyPudding({}) module.exports = deprecate deprecate.usage = 'npm deprecate [@] ' deprecate.completion = function (opts, cb) { - // first, get a list of remote packages this user owns. - // once we have a user account, then don't complete anything. - if (opts.conf.argv.remain.length > 2) return cb() - // get the list of packages by user - var path = '/-/by-user/' - mapToRegistry(path, npm.config, function (er, uri, c) { - if (er) return cb(er) - - if (!(c && c.username)) return cb() - - var params = { - timeout: 60000, - auth: c - } - npm.registry.get(uri + c.username, params, function (er, list) { - if (er) return cb() - console.error(list) - return cb(null, list[c.username]) + return BB.try(() => { + if (opts.conf.argv.remain.length > 2) { return } + return whoami([], true, () => {}).then(username => { + if (username) { + // first, get a list of remote packages this user owns. + // once we have a user account, then don't complete anything. + // get the list of packages by user + return fetch( + `/-/by-user/${encodeURIComponent(username)}`, + DeprecateConfig() + ).then(list => list[username]) + } }) - }) + }).nodeify(cb) } -function deprecate (args, cb) { - var pkg = args[0] - var msg = args[1] - if (msg === undefined) return cb('Usage: ' + deprecate.usage) +function deprecate ([pkg, msg], opts, cb) { + if (typeof cb !== 'function') { + cb = opts + opts = null + } + opts = DeprecateConfig(opts || npmConfig()) + return BB.try(() => { + if (msg == null) throw new Error(`Usage: ${deprecate.usage}`) + // fetch the data and make sure it exists. + const p = npa(pkg) - // fetch the data and make sure it exists. - var p = npa(pkg) + // npa makes the default spec "latest", but for deprecation + // "*" is the appropriate default. + const spec = p.rawSpec === '' ? '*' : p.fetchSpec - // npa makes the default spec "latest", but for deprecation - // "*" is the appropriate default. - var spec = p.rawSpec === '' ? '*' : p.fetchSpec - - mapToRegistry(p.name, npm.config, function (er, uri, auth) { - if (er) return cb(er) - - var params = { - version: spec, - message: msg, - auth: auth + if (semver.validRange(spec, true) === null) { + throw new Error('invalid version range: ' + spec) } - npm.registry.deprecate(uri, params, cb) - }) + + const uri = '/' + p.escapedName + return fetch.json(uri, opts.concat({ + spec: p, + query: {write: true} + })).then(packument => { + // filter all the versions that match + Object.keys(packument.versions) + .filter(v => semver.satisfies(v, spec)) + .forEach(v => { packument.versions[v].deprecated = msg }) + return otplease(opts, opts => fetch(uri, opts.concat({ + spec: p, + method: 'PUT', + body: packument, + ignoreBody: true + }))) + }) + }).nodeify(cb) } diff --git a/lib/dist-tag.js b/lib/dist-tag.js index bd0c5ae8a27a7..64bb97b61a4c6 100644 --- a/lib/dist-tag.js +++ b/lib/dist-tag.js @@ -1,15 +1,22 @@ /* eslint-disable standard/no-callback-literal */ module.exports = distTag -var log = require('npmlog') -var npa = require('npm-package-arg') -var semver = require('semver') - -var npm = require('./npm.js') -var mapToRegistry = require('./utils/map-to-registry.js') -var readLocalPkg = require('./utils/read-local-package.js') -var usage = require('./utils/usage') -var output = require('./utils/output.js') +const BB = require('bluebird') + +const figgyPudding = require('figgy-pudding') +const log = require('npmlog') +const npa = require('libnpm/parse-arg') +const npmConfig = require('./config/figgy-config.js') +const output = require('./utils/output.js') +const otplease = require('./utils/otplease.js') +const readLocalPkg = BB.promisify(require('./utils/read-local-package.js')) +const regFetch = require('libnpm/fetch') +const semver = require('semver') +const usage = require('./utils/usage') + +const DistTagOpts = figgyPudding({ + tag: {} +}) distTag.usage = usage( 'dist-tag', @@ -30,130 +37,128 @@ distTag.completion = function (opts, cb) { } } -function distTag (args, cb) { - var cmd = args.shift() - switch (cmd) { - case 'add': case 'a': case 'set': case 's': - return add(args[0], args[1], cb) - case 'rm': case 'r': case 'del': case 'd': case 'remove': - return remove(args[1], args[0], cb) - case 'ls': case 'l': case 'sl': case 'list': - return list(args[0], cb) - default: - return cb('Usage:\n' + distTag.usage) - } +function UsageError () { + throw Object.assign(new Error('Usage:\n' + distTag.usage), { + code: 'EUSAGE' + }) } -function add (spec, tag, cb) { - var thing = npa(spec || '') - var pkg = thing.name - var version = thing.rawSpec - var t = (tag || npm.config.get('tag')).trim() +function distTag ([cmd, pkg, tag], cb) { + const opts = DistTagOpts(npmConfig()) + return BB.try(() => { + switch (cmd) { + case 'add': case 'a': case 'set': case 's': + return add(pkg, tag, opts) + case 'rm': case 'r': case 'del': case 'd': case 'remove': + return remove(pkg, tag, opts) + case 'ls': case 'l': case 'sl': case 'list': + return list(pkg, opts) + default: + if (!pkg) { + return list(cmd, opts) + } else { + UsageError() + } + } + }).then( + x => cb(null, x), + err => { + if (err.code === 'EUSAGE') { + cb(err.message) + } else { + cb(err) + } + } + ) +} - log.verbose('dist-tag add', t, 'to', pkg + '@' + version) +function add (spec, tag, opts) { + spec = npa(spec || '') + const version = spec.rawSpec + const t = (tag || opts.tag).trim() - if (!pkg || !version || !t) return cb('Usage:\n' + distTag.usage) + log.verbose('dist-tag add', t, 'to', spec.name + '@' + version) + + if (!spec || !version || !t) UsageError() if (semver.validRange(t)) { - var er = new Error('Tag name must not be a valid SemVer range: ' + t) - return cb(er) + throw new Error('Tag name must not be a valid SemVer range: ' + t) } - fetchTags(pkg, function (er, tags) { - if (er) return cb(er) - + return fetchTags(spec, opts).then(tags => { if (tags[t] === version) { log.warn('dist-tag add', t, 'is already set to version', version) - return cb() + return } tags[t] = version - - mapToRegistry(pkg, npm.config, function (er, uri, auth, base) { - var params = { - 'package': pkg, - distTag: t, - version: version, - auth: auth - } - - npm.registry.distTags.add(base, params, function (er) { - if (er) return cb(er) - - output('+' + t + ': ' + pkg + '@' + version) - cb() - }) + const url = `/-/package/${spec.escapedName}/dist-tags/${encodeURIComponent(t)}` + const reqOpts = opts.concat({ + method: 'PUT', + body: JSON.stringify(version), + headers: { + 'content-type': 'application/json' + }, + spec + }) + return otplease(reqOpts, reqOpts => regFetch(url, reqOpts)).then(() => { + output(`+${t}: ${spec.name}@${version}`) }) }) } -function remove (tag, pkg, cb) { - log.verbose('dist-tag del', tag, 'from', pkg) - - fetchTags(pkg, function (er, tags) { - if (er) return cb(er) +function remove (spec, tag, opts) { + spec = npa(spec || '') + log.verbose('dist-tag del', tag, 'from', spec.name) + return fetchTags(spec, opts).then(tags => { if (!tags[tag]) { - log.info('dist-tag del', tag, 'is not a dist-tag on', pkg) - return cb(new Error(tag + ' is not a dist-tag on ' + pkg)) + log.info('dist-tag del', tag, 'is not a dist-tag on', spec.name) + throw new Error(tag + ' is not a dist-tag on ' + spec.name) } - - var version = tags[tag] + const version = tags[tag] delete tags[tag] - - mapToRegistry(pkg, npm.config, function (er, uri, auth, base) { - var params = { - 'package': pkg, - distTag: tag, - auth: auth - } - - npm.registry.distTags.rm(base, params, function (er) { - if (er) return cb(er) - - output('-' + tag + ': ' + pkg + '@' + version) - cb() - }) + const url = `/-/package/${spec.escapedName}/dist-tags/${encodeURIComponent(tag)}` + const reqOpts = opts.concat({ + method: 'DELETE', + spec + }) + return otplease(reqOpts, reqOpts => regFetch(url, reqOpts)).then(() => { + output(`-${tag}: ${spec.name}@${version}`) }) }) } -function list (pkg, cb) { - if (!pkg) { - return readLocalPkg(function (er, pkg) { - if (er) return cb(er) - if (!pkg) return cb(distTag.usage) - list(pkg, cb) +function list (spec, opts) { + if (!spec) { + return readLocalPkg().then(pkg => { + if (!pkg) { UsageError() } + return list(pkg, opts) }) } + spec = npa(spec) - fetchTags(pkg, function (er, tags) { - if (er) { - log.error('dist-tag ls', "Couldn't get dist-tag data for", pkg) - return cb(er) - } - var msg = Object.keys(tags).map(function (k) { - return k + ': ' + tags[k] - }).sort().join('\n') + return fetchTags(spec, opts).then(tags => { + var msg = Object.keys(tags).map(k => `${k}: ${tags[k]}`).sort().join('\n') output(msg) - cb(er, tags) + return tags + }, err => { + log.error('dist-tag ls', "Couldn't get dist-tag data for", spec) + throw err }) } -function fetchTags (pkg, cb) { - mapToRegistry(pkg, npm.config, function (er, uri, auth, base) { - if (er) return cb(er) - - var params = { - 'package': pkg, - auth: auth - } - npm.registry.distTags.fetch(base, params, function (er, tags) { - if (er) return cb(er) - if (!tags || !Object.keys(tags).length) { - return cb(new Error('No dist-tags found for ' + pkg)) - } - - cb(null, tags) +function fetchTags (spec, opts) { + return regFetch.json( + `/-/package/${spec.escapedName}/dist-tags`, + opts.concat({ + 'prefer-online': true, + spec }) + ).then(data => { + if (data && typeof data === 'object') delete data._etag + if (!data || !Object.keys(data).length) { + throw new Error('No dist-tags found for ' + spec.name) + } + return data }) } diff --git a/lib/doctor.js b/lib/doctor.js index 95ede1bc87242..96094e6346d05 100644 --- a/lib/doctor.js +++ b/lib/doctor.js @@ -87,7 +87,7 @@ function makePretty (p) { const cacheStatus = p[8] ? `verified ${p[8].verifiedContent} tarballs` : 'notOk' const npmV = npm.version const nodeV = process.version.replace('v', '') - const registry = npm.config.get('registry') + const registry = npm.config.get('registry') || '' const list = [ ['npm ping', ping], ['npm -v', 'v' + npmV], diff --git a/lib/doctor/check-ping.js b/lib/doctor/check-ping.js index e7e82902a7165..58f14fe69e1e0 100644 --- a/lib/doctor/check-ping.js +++ b/lib/doctor/check-ping.js @@ -4,8 +4,12 @@ var ping = require('../ping.js') function checkPing (cb) { var tracker = log.newItem('checkPing', 1) tracker.info('checkPing', 'Pinging registry') - ping({}, true, (_err, pong, data, res) => { - cb(null, [res.statusCode, res.statusMessage]) + ping({}, true, (err, pong) => { + if (err && err.code && err.code.match(/^E\d{3}$/)) { + return cb(null, [err.code.substr(1)]) + } else { + cb(null, [200, 'ok']) + } }) } diff --git a/lib/explore.js b/lib/explore.js index 826a527fa7ef3..0c9930f8e4ca7 100644 --- a/lib/explore.js +++ b/lib/explore.js @@ -9,10 +9,11 @@ var npm = require('./npm.js') var spawn = require('./utils/spawn') var path = require('path') var fs = require('graceful-fs') -var isWindowsShell = require('./utils/is-windows-shell.js') +var isWindows = require('./utils/is-windows.js') var escapeExecPath = require('./utils/escape-exec-path.js') var escapeArg = require('./utils/escape-arg.js') var output = require('./utils/output.js') +var log = require('npmlog') function explore (args, cb) { if (args.length < 1 || !args[0]) return cb(explore.usage) @@ -23,7 +24,7 @@ function explore (args, cb) { var shellArgs = [] if (args) { - if (isWindowsShell) { + if (isWindows) { var execCmd = escapeExecPath(args.shift()) var execArgs = [execCmd].concat(args.map(escapeArg)) opts.windowsVerbatimArguments = true @@ -49,6 +50,7 @@ function explore (args, cb) { ) } + log.silly('explore', {sh, shellArgs, opts}) var shell = spawn(sh, shellArgs, opts) shell.on('close', function (er) { // only fail if non-interactive. diff --git a/lib/fetch-package-metadata.js b/lib/fetch-package-metadata.js index cca6dc64f4168..c4f46f513fed2 100644 --- a/lib/fetch-package-metadata.js +++ b/lib/fetch-package-metadata.js @@ -8,11 +8,11 @@ const rimraf = require('rimraf') const validate = require('aproba') const npa = require('npm-package-arg') const npm = require('./npm') +let npmConfig const npmlog = require('npmlog') const limit = require('call-limit') const tempFilename = require('./utils/temp-filename') const pacote = require('pacote') -let pacoteOpts const isWindows = require('./utils/is-windows.js') function andLogAndFinish (spec, tracker, done) { @@ -26,7 +26,8 @@ function andLogAndFinish (spec, tracker, done) { } } -const CACHE = require('lru-cache')({ +const LRUCache = require('lru-cache') +const CACHE = new LRUCache({ max: 300 * 1024 * 1024, length: (p) => p._contentLength }) @@ -52,10 +53,10 @@ function fetchPackageMetadata (spec, where, opts, done) { err.code = 'EWINDOWSPATH' return logAndFinish(err) } - if (!pacoteOpts) { - pacoteOpts = require('./config/pacote') + if (!npmConfig) { + npmConfig = require('./config/figgy-config.js') } - pacote.manifest(dep, pacoteOpts({ + pacote.manifest(dep, npmConfig({ annotate: true, fullMetadata: opts.fullMetadata, log: tracker || npmlog, @@ -85,9 +86,6 @@ function fetchPackageMetadata (spec, where, opts, done) { module.exports.addBundled = addBundled function addBundled (pkg, next) { validate('OF', arguments) - if (!pacoteOpts) { - pacoteOpts = require('./config/pacote') - } if (pkg._bundled !== undefined) return next(null, pkg) if (!pkg.bundleDependencies && pkg._requested.type !== 'directory') return next(null, pkg) @@ -101,7 +99,10 @@ function addBundled (pkg, next) { } pkg._bundled = null const target = tempFilename('unpack') - const opts = pacoteOpts({integrity: pkg._integrity}) + if (!npmConfig) { + npmConfig = require('./config/figgy-config.js') + } + const opts = npmConfig({integrity: pkg._integrity}) pacote.extract(pkg._resolved || pkg._requested || npa.resolve(pkg.name, pkg.version), target, opts).then(() => { log.silly('addBundled', 'read tarball') readPackageTree(target, (err, tree) => { diff --git a/lib/fetch-package-metadata.md b/lib/fetch-package-metadata.md index 6fe4beac6eeed..7b4562341b1cb 100644 --- a/lib/fetch-package-metadata.md +++ b/lib/fetch-package-metadata.md @@ -1,7 +1,7 @@ fetch-package-metadata ---------------------- - var fetchPackageMetadata = require("npm/lib/fetch-package-metadata") + const fetchPackageMetadata = require("npm/lib/fetch-package-metadata") fetchPackageMetadata(spec, contextdir, callback) This will get package metadata (and if possible, ONLY package metadata) for diff --git a/lib/fund.js b/lib/fund.js new file mode 100644 index 0000000000000..e605ea8c3fd03 --- /dev/null +++ b/lib/fund.js @@ -0,0 +1,168 @@ +'use strict' + +const path = require('path') + +const archy = require('archy') +const figgyPudding = require('figgy-pudding') +const readPackageTree = require('read-package-tree') + +const npm = require('./npm.js') +const npmConfig = require('./config/figgy-config.js') +const fetchPackageMetadata = require('./fetch-package-metadata.js') +const computeMetadata = require('./install/deps.js').computeMetadata +const readShrinkwrap = require('./install/read-shrinkwrap.js') +const mutateIntoLogicalTree = require('./install/mutate-into-logical-tree.js') +const output = require('./utils/output.js') +const openUrl = require('./utils/open-url.js') +const { getFundingInfo, retrieveFunding, validFundingField, flatCacheSymbol } = require('./utils/funding.js') + +const FundConfig = figgyPudding({ + browser: {}, // used by ./utils/open-url + global: {}, + json: {}, + unicode: {}, + which: {} +}) + +module.exports = fundCmd + +const usage = require('./utils/usage') +fundCmd.usage = usage( + 'fund', + 'npm fund [--json]', + 'npm fund [--browser] [[<@scope>/] [--which=]' +) + +fundCmd.completion = function (opts, cb) { + const argv = opts.conf.argv.remain + switch (argv[2]) { + case 'fund': + return cb(null, []) + default: + return cb(new Error(argv[2] + ' not recognized')) + } +} + +function printJSON (fundingInfo) { + return JSON.stringify(fundingInfo, null, 2) +} + +// the human-printable version does some special things that turned out to +// be very verbose but hopefully not hard to follow: we stack up items +// that have a shared url/type and make sure they're printed at the highest +// level possible, in that process they also carry their dependencies along +// with them, moving those up in the visual tree +function printHuman (fundingInfo, opts) { + const flatCache = fundingInfo[flatCacheSymbol] + + const { name, version } = fundingInfo + const printableVersion = version ? `@${version}` : '' + + const items = Object.keys(flatCache).map((url) => { + const deps = flatCache[url] + + const packages = deps.map((dep) => { + const { name, version } = dep + + const printableVersion = version ? `@${version}` : '' + return `${name}${printableVersion}` + }) + + return { + label: url, + nodes: [packages.join(', ')] + } + }) + + return archy({ label: `${name}${printableVersion}`, nodes: items }, '', { unicode: opts.unicode }) +} + +function openFundingUrl (packageName, fundingSourceNumber, cb) { + function getUrlAndOpen (packageMetadata) { + const { funding } = packageMetadata + const validSources = [].concat(retrieveFunding(funding)).filter(validFundingField) + + if (validSources.length === 1 || (fundingSourceNumber > 0 && fundingSourceNumber <= validSources.length)) { + const { type, url } = validSources[fundingSourceNumber ? fundingSourceNumber - 1 : 0] + const typePrefix = type ? `${type} funding` : 'Funding' + const msg = `${typePrefix} available at the following URL` + openUrl(url, msg, cb) + } else if (!(fundingSourceNumber >= 1)) { + validSources.forEach(({ type, url }, i) => { + const typePrefix = type ? `${type} funding` : 'Funding' + const msg = `${typePrefix} available at the following URL` + console.log(`${i + 1}: ${msg}: ${url}`) + }) + console.log('Run `npm fund [<@scope>/] --which=1`, for example, to open the first funding URL listed in that package') + cb() + } else { + const noFundingError = new Error(`No valid funding method available for: ${packageName}`) + noFundingError.code = 'ENOFUND' + + throw noFundingError + } + } + + fetchPackageMetadata( + packageName, + '.', + { fullMetadata: true }, + function (err, packageMetadata) { + if (err) return cb(err) + getUrlAndOpen(packageMetadata) + } + ) +} + +function fundCmd (args, cb) { + const opts = FundConfig(npmConfig()) + const dir = path.resolve(npm.dir, '..') + const packageName = args[0] + const numberArg = opts.which + + const fundingSourceNumber = numberArg && parseInt(numberArg, 10) + + if (numberArg !== undefined && (String(fundingSourceNumber) !== numberArg || fundingSourceNumber < 1)) { + const err = new Error('`npm fund [<@scope>/] [--which=fundingSourceNumber]` must be given a positive integer') + err.code = 'EFUNDNUMBER' + throw err + } + + if (opts.global) { + const err = new Error('`npm fund` does not support global packages') + err.code = 'EFUNDGLOBAL' + throw err + } + + if (packageName) { + openFundingUrl(packageName, fundingSourceNumber, cb) + return + } + + readPackageTree(dir, function (err, tree) { + if (err) { + process.exitCode = 1 + return cb(err) + } + + readShrinkwrap.andInflate(tree, function () { + const fundingInfo = getFundingInfo( + mutateIntoLogicalTree.asReadInstalled( + computeMetadata(tree) + ) + ) + + const print = opts.json + ? printJSON + : printHuman + + output( + print( + fundingInfo, + opts + ) + ) + cb(err, tree) + }) + }) +} diff --git a/lib/help.js b/lib/help.js index 3f70f2dc1f84c..61f1f3f94cc66 100644 --- a/lib/help.js +++ b/lib/help.js @@ -63,7 +63,7 @@ function help (args, cb) { // legacy if (section === 'global') section = 'folders' - else if (section === 'json') section = 'package.json' + else if (section.match(/.*json/)) section = section.replace('.json', '-json') // find either /section.n or /npm-section.n // The glob is used in the glob. The regexp is used much @@ -140,24 +140,21 @@ function viewMan (man, cb) { function htmlMan (man) { var sect = +man.match(/([0-9]+)$/)[1] - var f = path.basename(man).replace(/([0-9]+)$/, 'html') + var f = path.basename(man).replace(/[.]([0-9]+)$/, '') switch (sect) { case 1: - sect = 'cli' - break - case 3: - sect = 'api' + sect = 'cli-commands' break case 5: - sect = 'files' + sect = 'configuring-npm' break case 7: - sect = 'misc' + sect = 'using-npm' break default: throw new Error('invalid man section: ' + sect) } - return path.resolve(__dirname, '..', 'html', 'doc', sect, f) + return path.resolve(__dirname, '..', 'docs', 'public', sect, f, 'index.html') } function npmUsage (valid, cb) { diff --git a/lib/hook.js b/lib/hook.js index b0552c74740ea..54aea9f1e9d20 100644 --- a/lib/hook.js +++ b/lib/hook.js @@ -2,129 +2,146 @@ const BB = require('bluebird') -const crypto = require('crypto') -const hookApi = require('libnpmhook') -const log = require('npmlog') -const npm = require('./npm.js') +const hookApi = require('libnpm/hook') +const npmConfig = require('./config/figgy-config.js') const output = require('./utils/output.js') +const otplease = require('./utils/otplease.js') const pudding = require('figgy-pudding') const relativeDate = require('tiny-relative-date') const Table = require('cli-table3') -const usage = require('./utils/usage.js') const validate = require('aproba') -hook.usage = usage([ +hook.usage = [ 'npm hook add [--type=]', 'npm hook ls [pkg]', 'npm hook rm ', 'npm hook update ' -]) +].join('\n') hook.completion = (opts, cb) => { validate('OF', [opts, cb]) return cb(null, []) // fill in this array with completion values } -const npmSession = crypto.randomBytes(8).toString('hex') -const hookConfig = pudding() -function config () { - return hookConfig({ - refer: npm.refer, - projectScope: npm.projectScope, - log, - npmSession - }, npm.config) +const HookConfig = pudding({ + json: {}, + loglevel: {}, + parseable: {}, + silent: {}, + unicode: {} +}) + +function UsageError () { + throw Object.assign(new Error(hook.usage), {code: 'EUSAGE'}) } -module.exports = (args, cb) => BB.try(() => hook(args)).nodeify(cb) +module.exports = (args, cb) => BB.try(() => hook(args)).then( + val => cb(null, val), + err => err.code === 'EUSAGE' ? cb(err.message) : cb(err) +) function hook (args) { - switch (args[0]) { - case 'add': - return add(args[1], args[2], args[3]) - case 'ls': - return ls(args[1]) - case 'rm': - return rm(args[1]) - case 'update': - case 'up': - return update(args[1], args[2], args[3]) - } + return otplease(npmConfig(), opts => { + opts = HookConfig(opts) + switch (args[0]) { + case 'add': + return add(args[1], args[2], args[3], opts) + case 'ls': + return ls(args[1], opts) + case 'rm': + return rm(args[1], opts) + case 'update': + case 'up': + return update(args[1], args[2], args[3], opts) + default: + UsageError() + } + }) } -function add (pkg, uri, secret) { - return hookApi.add(pkg, uri, secret, config()) - .then((hook) => { - if (npm.config.get('json')) { - output(JSON.stringify(hook, null, 2)) - } else { - output(`+ ${hookName(hook)} ${ - npm.config.get('unicode') ? ' ➜ ' : ' -> ' - } ${hook.endpoint}`) - } - }) +function add (pkg, uri, secret, opts) { + return hookApi.add(pkg, uri, secret, opts).then(hook => { + if (opts.json) { + output(JSON.stringify(hook, null, 2)) + } else if (opts.parseable) { + output(Object.keys(hook).join('\t')) + output(Object.keys(hook).map(k => hook[k]).join('\t')) + } else if (!opts.silent && opts.loglevel !== 'silent') { + output(`+ ${hookName(hook)} ${ + opts.unicode ? ' ➜ ' : ' -> ' + } ${hook.endpoint}`) + } + }) } -function ls (pkg) { - return hookApi.ls(pkg, config()) - .then((hooks) => { - if (npm.config.get('json')) { - output(JSON.stringify(hooks, null, 2)) - } else if (!hooks.length) { - output("You don't have any hooks configured yet.") +function ls (pkg, opts) { + return hookApi.ls(opts.concat({package: pkg})).then(hooks => { + if (opts.json) { + output(JSON.stringify(hooks, null, 2)) + } else if (opts.parseable) { + output(Object.keys(hooks[0]).join('\t')) + hooks.forEach(hook => { + output(Object.keys(hook).map(k => hook[k]).join('\t')) + }) + } else if (!hooks.length) { + output("You don't have any hooks configured yet.") + } else if (!opts.silent && opts.loglevel !== 'silent') { + if (hooks.length === 1) { + output('You have one hook configured.') } else { - if (hooks.length === 1) { - output('You have one hook configured.') - } else { - output(`You have ${hooks.length} hooks configured.`) - } - const table = new Table({head: ['id', 'target', 'endpoint']}) - hooks.forEach((hook) => { + output(`You have ${hooks.length} hooks configured.`) + } + const table = new Table({head: ['id', 'target', 'endpoint']}) + hooks.forEach((hook) => { + table.push([ + {rowSpan: 2, content: hook.id}, + hookName(hook), + hook.endpoint + ]) + if (hook.last_delivery) { table.push([ - {rowSpan: 2, content: hook.id}, - hookName(hook), - hook.endpoint + { + colSpan: 1, + content: `triggered ${relativeDate(hook.last_delivery)}` + }, + hook.response_code ]) - if (hook.last_delivery) { - table.push([ - { - colSpan: 1, - content: `triggered ${relativeDate(hook.last_delivery)}` - }, - hook.response_code - ]) - } else { - table.push([{colSpan: 2, content: 'never triggered'}]) - } - }) - output(table.toString()) - } - }) + } else { + table.push([{colSpan: 2, content: 'never triggered'}]) + } + }) + output(table.toString()) + } + }) } -function rm (id) { - return hookApi.rm(id, config()) - .then((hook) => { - if (npm.config.get('json')) { - output(JSON.stringify(hook, null, 2)) - } else { - output(`- ${hookName(hook)} ${ - npm.config.get('unicode') ? ' ✘ ' : ' X ' - } ${hook.endpoint}`) - } - }) +function rm (id, opts) { + return hookApi.rm(id, opts).then(hook => { + if (opts.json) { + output(JSON.stringify(hook, null, 2)) + } else if (opts.parseable) { + output(Object.keys(hook).join('\t')) + output(Object.keys(hook).map(k => hook[k]).join('\t')) + } else if (!opts.silent && opts.loglevel !== 'silent') { + output(`- ${hookName(hook)} ${ + opts.unicode ? ' ✘ ' : ' X ' + } ${hook.endpoint}`) + } + }) } -function update (id, uri, secret) { - return hookApi.update(id, uri, secret, config()) - .then((hook) => { - if (npm.config.get('json')) { - output(JSON.stringify(hook, null, 2)) - } else { - output(`+ ${hookName(hook)} ${ - npm.config.get('unicode') ? ' ➜ ' : ' -> ' - } ${hook.endpoint}`) - } - }) +function update (id, uri, secret, opts) { + return hookApi.update(id, uri, secret, opts).then(hook => { + if (opts.json) { + output(JSON.stringify(hook, null, 2)) + } else if (opts.parseable) { + output(Object.keys(hook).join('\t')) + output(Object.keys(hook).map(k => hook[k]).join('\t')) + } else if (!opts.silent && opts.loglevel !== 'silent') { + output(`+ ${hookName(hook)} ${ + opts.unicode ? ' ➜ ' : ' -> ' + } ${hook.endpoint}`) + } + }) } function hookName (hook) { diff --git a/lib/install.js b/lib/install.js index e15bc47919100..378ada7b05c06 100644 --- a/lib/install.js +++ b/lib/install.js @@ -26,6 +26,7 @@ install.usage = usage( '\nnpm install [<@scope>/]@' + '\nnpm install [<@scope>/]@' + '\nnpm install [<@scope>/]@' + + '\nnpm install @npm:' + '\nnpm install ' + '\nnpm install ' + '\nnpm install ' + @@ -104,7 +105,7 @@ var readPackageJson = require('read-package-json') var chain = require('slide').chain var asyncMap = require('slide').asyncMap var archy = require('archy') -var mkdirp = require('mkdirp') +var mkdirp = require('gentle-fs').mkdir var rimraf = require('rimraf') var iferr = require('iferr') var validate = require('aproba') @@ -138,6 +139,10 @@ var validateArgs = require('./install/validate-args.js') var saveRequested = require('./install/save.js').saveRequested var saveShrinkwrap = require('./install/save.js').saveShrinkwrap var audit = require('./install/audit.js') +var { + getPrintFundingReport, + getPrintFundingReportJSON +} = require('./install/fund.js') var getSaveType = require('./install/save.js').getSaveType var doSerialActions = require('./install/actions.js').doSerial var doReverseSerialActions = require('./install/actions.js').doReverseSerial @@ -240,6 +245,7 @@ function Installer (where, dryrun, args, opts) { this.saveOnlyLock = opts.saveOnlyLock this.global = opts.global != null ? opts.global : this.where === path.resolve(npm.globalDir, '..') this.audit = npm.config.get('audit') && !this.global + this.fund = npm.config.get('fund') && !this.global this.started = Date.now() } Installer.prototype = {} @@ -401,7 +407,7 @@ Installer.prototype.normalizeCurrentTree = function (cb) { if (this.currentTree.error) { for (let child of this.currentTree.children) { if (!child.fakeChild && isExtraneous(child)) { - this.currentTree.package.dependencies[child.package.name] = computeVersionSpec(this.currentTree, child) + this.currentTree.package.dependencies[moduleName(child)] = computeVersionSpec(this.currentTree, child) } } } @@ -703,8 +709,25 @@ Installer.prototype.cloneCurrentTreeToIdealTree = function (cb) { validate('F', arguments) log.silly('install', 'cloneCurrentTreeToIdealTree') - this.idealTree = copyTree(this.currentTree) - this.idealTree.warnings = [] + if (npm.config.get('before')) { + this.idealTree = { + package: this.currentTree.package, + path: this.currentTree.path, + realpath: this.currentTree.realpath, + children: [], + requires: [], + missingDeps: {}, + missingDevDeps: {}, + requiredBy: [], + error: this.currentTree.error, + warnings: [], + isTop: true + } + } else { + this.idealTree = copyTree(this.currentTree) + this.idealTree.warnings = [] + } + cb() } @@ -825,7 +848,11 @@ Installer.prototype.printInstalledForHuman = function (diffs, auditResult) { var report = '' if (this.args.length && (added || updated)) { report += this.args.map((p) => { - return `+ ${p.name}@${p.version}` + return `+ ${p.name}@${p.version}${ + !p._requested.name || p._requested.name === p.name + ? '' + : ` (as ${p._requested.name})` + }` }).join('\n') + '\n' } var actions = [] @@ -851,7 +878,6 @@ Installer.prototype.printInstalledForHuman = function (diffs, auditResult) { report += ' in ' + ((Date.now() - this.started) / 1000) + 's' output(report) - return auditResult && audit.printInstallReport(auditResult) function packages (num) { return num + ' package' + (num > 1 ? 's' : '') @@ -873,9 +899,27 @@ Installer.prototype.printInstalledForHuman = function (diffs, auditResult) { if (argument.url) returned += ' (' + argument.email + ')' return returned } + + const { fund, idealTree } = this + const printFundingReport = getPrintFundingReport({ + fund, + idealTree + }) + if (printFundingReport.length) { + output(printFundingReport) + } + + if (auditResult) { + return audit.printInstallReport(auditResult) + } } Installer.prototype.printInstalledForJSON = function (diffs, auditResult) { + const { fund, idealTree } = this + const printFundingReport = getPrintFundingReportJSON({ + fund, + idealTree + }) var result = { added: [], removed: [], @@ -884,6 +928,7 @@ Installer.prototype.printInstalledForJSON = function (diffs, auditResult) { failed: [], warnings: [], audit: auditResult, + funding: printFundingReport, elapsed: Date.now() - this.started } var self = this @@ -922,10 +967,14 @@ Installer.prototype.printInstalledForJSON = function (diffs, auditResult) { function recordAction (action) { var mutation = action[0] var child = action[1] + const isAlias = child.package && child.package._requested && child.package._requested.type === 'alias' + const name = isAlias + ? child.package._requested.name + : child.package && child.package.name var result = { action: mutation, - name: moduleName(child), - version: child.package && child.package.version, + name, + version: child.package && `${isAlias ? `npm:${child.package.name}@` : ''}${child.package.version}`, path: child.path } if (mutation === 'move') { @@ -947,10 +996,16 @@ Installer.prototype.printInstalledForParseable = function (diffs) { } else if (mutation === 'update') { var previousVersion = child.oldPkg.package && child.oldPkg.package.version } + const isAlias = child.package._requested && child.package._requested.type === 'alias' + const version = child.package && isAlias + ? `npm:${child.package.name}@${child.package.version}` + : child.package + ? child.package.version + : '' output( mutation + '\t' + moduleName(child) + '\t' + - (child.package ? child.package.version : '') + '\t' + + version + '\t' + (child.path ? path.relative(self.where, child.path) : '') + '\t' + (previousVersion || '') + '\t' + (previousPath || '')) diff --git a/lib/install/action/extract-worker.js b/lib/install/action/extract-worker.js index 2b082b4a574c2..225e5b4aeab66 100644 --- a/lib/install/action/extract-worker.js +++ b/lib/install/action/extract-worker.js @@ -3,16 +3,16 @@ const BB = require('bluebird') const extract = require('pacote/extract') -const npmlog = require('npmlog') +// const npmlog = require('npmlog') module.exports = (args, cb) => { const parsed = typeof args === 'string' ? JSON.parse(args) : args const spec = parsed[0] const extractTo = parsed[1] const opts = parsed[2] - if (!opts.log) { - opts.log = npmlog - } - opts.log.level = opts.loglevel || opts.log.level + // if (!opts.log) { + // opts.log = npmlog + // } + // opts.log.level = opts.loglevel || opts.log.level BB.resolve(extract(spec, extractTo, opts)).nodeify(cb) } diff --git a/lib/install/action/extract.js b/lib/install/action/extract.js index e8d7a6c4f6d1f..585580edd29b7 100644 --- a/lib/install/action/extract.js +++ b/lib/install/action/extract.js @@ -2,15 +2,17 @@ const BB = require('bluebird') +const figgyPudding = require('figgy-pudding') const stat = BB.promisify(require('graceful-fs').stat) const gentlyRm = BB.promisify(require('../../utils/gently-rm.js')) -const mkdirp = BB.promisify(require('mkdirp')) +const mkdirp = BB.promisify(require('gentle-fs').mkdir) +const moduleName = require('../../utils/module-name.js') const moduleStagingPath = require('../module-staging-path.js') const move = require('../../utils/move.js') const npa = require('npm-package-arg') const npm = require('../../npm.js') +let npmConfig const packageId = require('../../utils/package-id.js') -let pacoteOpts const path = require('path') const localWorker = require('./extract-worker.js') const workerFarm = require('worker-farm') @@ -19,19 +21,12 @@ const isRegistry = require('../../utils/is-registry.js') const WORKER_PATH = require.resolve('./extract-worker.js') let workers -// NOTE: temporarily disabled on non-OSX due to ongoing issues: -// -// * Seems to make Windows antivirus issues much more common -// * Messes with Docker (I think) -// -// There are other issues that should be fixed that affect OSX too: -// -// * Logging is messed up right now because pacote does its own thing -// * Global deduplication in pacote breaks due to multiple procs -// -// As these get fixed, we can start experimenting with re-enabling it -// at least on some platforms. -const ENABLE_WORKERS = process.platform === 'darwin' +const ExtractOpts = figgyPudding({ + log: {} +}, { other () { return true } }) + +// Disabled for now. Re-enable someday. Just not today. +const ENABLE_WORKERS = false extract.init = () => { if (ENABLE_WORKERS) { @@ -53,10 +48,10 @@ module.exports = extract function extract (staging, pkg, log) { log.silly('extract', packageId(pkg)) const extractTo = moduleStagingPath(staging, pkg) - if (!pacoteOpts) { - pacoteOpts = require('../../config/pacote') + if (!npmConfig) { + npmConfig = require('../../config/figgy-config.js') } - const opts = pacoteOpts({ + let opts = ExtractOpts(npmConfig()).concat({ integrity: pkg.package._integrity, resolved: pkg.package._resolved }) @@ -72,9 +67,18 @@ function extract (staging, pkg, log) { args[0] = spec.raw if (ENABLE_WORKERS && (isRegistry(spec) || spec.type === 'remote')) { // We can't serialize these options - opts.loglevel = opts.log.level - opts.log = null - opts.dirPacker = null + opts = opts.concat({ + loglevel: opts.log.level, + log: null, + dirPacker: null, + Promise: null, + _events: null, + _eventsCount: null, + list: null, + sources: null, + _maxListeners: null, + root: null + }) // workers will run things in parallel! launcher = workers try { @@ -110,7 +114,7 @@ function readBundled (pkg, staging, extractTo) { } function stageBundledModule (bundler, child, staging, parentPath) { - const stageFrom = path.join(parentPath, 'node_modules', child.package.name) + const stageFrom = path.join(parentPath, 'node_modules', moduleName(child)) const stageTo = moduleStagingPath(staging, child) return BB.map(child.children, (child) => { diff --git a/lib/install/action/fetch.js b/lib/install/action/fetch.js index 5ad34e29dd27e..346194e51607e 100644 --- a/lib/install/action/fetch.js +++ b/lib/install/action/fetch.js @@ -3,14 +3,14 @@ const BB = require('bluebird') const finished = BB.promisify(require('mississippi').finished) +const npmConfig = require('../../config/figgy-config.js') const packageId = require('../../utils/package-id.js') const pacote = require('pacote') -const pacoteOpts = require('../../config/pacote') module.exports = fetch function fetch (staging, pkg, log, next) { log.silly('fetch', packageId(pkg)) - const opts = pacoteOpts({integrity: pkg.package._integrity}) + const opts = npmConfig({integrity: pkg.package._integrity}) return finished(pacote.tarball.stream(pkg.package._requested, opts)) .then(() => next(), next) } diff --git a/lib/install/action/finalize.js b/lib/install/action/finalize.js index e46f1b9d83396..1e53c189d210e 100644 --- a/lib/install/action/finalize.js +++ b/lib/install/action/finalize.js @@ -3,7 +3,7 @@ const path = require('path') const fs = require('graceful-fs') const Bluebird = require('bluebird') const rimraf = Bluebird.promisify(require('rimraf')) -const mkdirp = Bluebird.promisify(require('mkdirp')) +const mkdirp = Bluebird.promisify(require('gentle-fs').mkdir) const lstat = Bluebird.promisify(fs.lstat) const readdir = Bluebird.promisify(fs.readdir) const symlink = Bluebird.promisify(fs.symlink) diff --git a/lib/install/action/global-install.js b/lib/install/action/global-install.js index bdc121b693c57..44d2f628f2c73 100644 --- a/lib/install/action/global-install.js +++ b/lib/install/action/global-install.js @@ -3,12 +3,13 @@ var path = require('path') var npm = require('../../npm.js') var Installer = require('../../install.js').Installer var packageId = require('../../utils/package-id.js') +var moduleName = require('../../utils/module-name.js') module.exports = function (staging, pkg, log, next) { log.silly('global-install', packageId(pkg)) var globalRoot = path.resolve(npm.globalDir, '..') npm.config.set('global', true) - var install = new Installer(globalRoot, false, [pkg.package.name + '@' + pkg.package._requested.fetchSpec]) + var install = new Installer(globalRoot, false, [moduleName(pkg) + '@' + pkg.package._requested.rawSpec]) install.link = false install.run(function () { npm.config.set('global', false) diff --git a/lib/install/action/global-link.js b/lib/install/action/global-link.js index f109e5b88a19f..c9d9a8feb2af7 100644 --- a/lib/install/action/global-link.js +++ b/lib/install/action/global-link.js @@ -1,8 +1,9 @@ 'use strict' +var moduleName = require('../../utils/module-name.js') var npm = require('../../npm.js') var packageId = require('../../utils/package-id.js') module.exports = function (staging, pkg, log, next) { log.silly('global-link', packageId(pkg)) - npm.link(pkg.package.name, next) + npm.link(moduleName(pkg), next) } diff --git a/lib/install/action/move.js b/lib/install/action/move.js index 00d58a1592317..8a956f59d6d90 100644 --- a/lib/install/action/move.js +++ b/lib/install/action/move.js @@ -4,7 +4,7 @@ var path = require('path') var chain = require('slide').chain var iferr = require('iferr') var rimraf = require('rimraf') -var mkdirp = require('mkdirp') +var mkdirp = require('gentle-fs').mkdir var rmStuff = require('../../unbuild.js').rmStuff var lifecycle = require('../../utils/lifecycle.js') var move = require('../../utils/move.js') diff --git a/lib/install/action/remove.js b/lib/install/action/remove.js index a852d10c5fd84..f7182d596bed0 100644 --- a/lib/install/action/remove.js +++ b/lib/install/action/remove.js @@ -3,7 +3,7 @@ var path = require('path') var fs = require('graceful-fs') var rimraf = require('rimraf') var asyncMap = require('slide').asyncMap -var mkdirp = require('mkdirp') +var mkdirp = require('gentle-fs').mkdir var npm = require('../../npm.js') var andIgnoreErrors = require('../and-ignore-errors.js') var move = require('../../utils/move.js') diff --git a/lib/install/actions.js b/lib/install/actions.js index a34d03ffe2146..e26432b77c86a 100644 --- a/lib/install/actions.js +++ b/lib/install/actions.js @@ -49,7 +49,7 @@ Object.keys(actions).forEach(function (actionName) { if (pkg.knownInstallable) { actionP = runAction(action, staging, pkg, log) } else { - actionP = isInstallable(pkg.package).then(() => { + actionP = isInstallable(null, pkg.package).then(() => { pkg.knownInstallable = true return runAction(action, staging, pkg, log) }) diff --git a/lib/install/and-add-parent-to-errors.js b/lib/install/and-add-parent-to-errors.js index 62a86bd4a6c36..fe4128230b1af 100644 --- a/lib/install/and-add-parent-to-errors.js +++ b/lib/install/and-add-parent-to-errors.js @@ -1,4 +1,5 @@ 'use strict' +var moduleName = require('../utils/module-name.js') var validate = require('aproba') module.exports = function (parent, cb) { @@ -6,7 +7,7 @@ module.exports = function (parent, cb) { return function (er) { if (!er) return cb.apply(null, arguments) if (er instanceof Error && parent && parent.package && parent.package.name) { - er.parent = parent.package.name + er.parent = moduleName(parent) } cb(er) } diff --git a/lib/install/audit.js b/lib/install/audit.js index f372b425a6fd4..f5bc5ae1a92d6 100644 --- a/lib/install/audit.js +++ b/lib/install/audit.js @@ -7,118 +7,115 @@ exports.printInstallReport = printInstallReport exports.printParseableReport = printParseableReport exports.printFullReport = printFullReport -const Bluebird = require('bluebird') const auditReport = require('npm-audit-report') +const npmConfig = require('../config/figgy-config.js') +const figgyPudding = require('figgy-pudding') const treeToShrinkwrap = require('../shrinkwrap.js').treeToShrinkwrap const packageId = require('../utils/package-id.js') const output = require('../utils/output.js') const npm = require('../npm.js') const qw = require('qw') -const registryFetch = require('npm-registry-fetch') -const zlib = require('zlib') -const gzip = Bluebird.promisify(zlib.gzip) -const log = require('npmlog') +const regFetch = require('npm-registry-fetch') const perf = require('../utils/perf.js') -const url = require('url') const npa = require('npm-package-arg') const uuid = require('uuid') const ssri = require('ssri') const cloneDeep = require('lodash.clonedeep') -const pacoteOpts = require('../config/pacote.js') // used when scrubbing module names/specifiers const runId = uuid.v4() +const InstallAuditConfig = figgyPudding({ + color: {}, + json: {}, + unicode: {} +}, { + other (key) { + return /:registry$/.test(key) + } +}) + function submitForInstallReport (auditData) { - const cfg = npm.config // avoid the no-dynamic-lookups test - const scopedRegistries = cfg.keys.filter(_ => /:registry$/.test(_)).map(_ => cfg.get(_)) - perf.emit('time', 'audit compress') - // TODO: registryFetch will be adding native support for `Content-Encoding: gzip` at which point - // we'll pass in something like `gzip: true` and not need to JSON stringify, gzip or headers. - return gzip(JSON.stringify(auditData)).then(body => { - perf.emit('timeEnd', 'audit compress') - log.info('audit', 'Submitting payload of ' + body.length + 'bytes') - scopedRegistries.forEach(reg => { - // we don't care about the response so destroy the stream if we can, or leave it flowing - // so it can eventually finish and clean up after itself - fetchAudit(url.resolve(reg, '/-/npm/v1/security/audits/quick')) - .then(_ => { - _.body.on('error', () => {}) - if (_.body.destroy) { - _.body.destroy() - } else { - _.body.resume() - } - }, _ => {}) - }) - perf.emit('time', 'audit submit') - return fetchAudit('/-/npm/v1/security/audits/quick', body).then(response => { - perf.emit('timeEnd', 'audit submit') - perf.emit('time', 'audit body') - return response.json() - }).then(result => { - perf.emit('timeEnd', 'audit body') - return result - }) + const opts = InstallAuditConfig(npmConfig()) + const scopedRegistries = [...opts.keys()].filter( + k => /:registry$/.test(k) + ).map(k => opts[k]) + scopedRegistries.forEach(registry => { + // we don't care about the response so destroy the stream if we can, or leave it flowing + // so it can eventually finish and clean up after itself + regFetch('/-/npm/v1/security/audits/quick', opts.concat({ + method: 'POST', + registry, + gzip: true, + body: auditData + })).then(_ => { + _.body.on('error', () => {}) + if (_.body.destroy) { + _.body.destroy() + } else { + _.body.resume() + } + }, _ => {}) }) -} - -function submitForFullReport (auditData) { - perf.emit('time', 'audit compress') - // TODO: registryFetch will be adding native support for `Content-Encoding: gzip` at which point - // we'll pass in something like `gzip: true` and not need to JSON stringify, gzip or headers. - return gzip(JSON.stringify(auditData)).then(body => { - perf.emit('timeEnd', 'audit compress') - log.info('audit', 'Submitting payload of ' + body.length + ' bytes') - perf.emit('time', 'audit submit') - return fetchAudit('/-/npm/v1/security/audits', body).then(response => { - perf.emit('timeEnd', 'audit submit') - perf.emit('time', 'audit body') - return response.json() - }).then(result => { - perf.emit('timeEnd', 'audit body') - result.runId = runId - return result - }) + perf.emit('time', 'audit submit') + return regFetch('/-/npm/v1/security/audits/quick', opts.concat({ + method: 'POST', + gzip: true, + body: auditData + })).then(response => { + perf.emit('timeEnd', 'audit submit') + perf.emit('time', 'audit body') + return response.json() + }).then(result => { + perf.emit('timeEnd', 'audit body') + return result }) } -function fetchAudit (href, body) { - const opts = pacoteOpts() - return registryFetch(href, { +function submitForFullReport (auditData) { + perf.emit('time', 'audit submit') + const opts = InstallAuditConfig(npmConfig()) + return regFetch('/-/npm/v1/security/audits', opts.concat({ method: 'POST', - headers: { 'content-encoding': 'gzip', 'content-type': 'application/json' }, - config: npm.config, - npmSession: opts.npmSession, - projectScope: npm.projectScope, - log: log, - body: body + gzip: true, + body: auditData + })).then(response => { + perf.emit('timeEnd', 'audit submit') + perf.emit('time', 'audit body') + return response.json() + }).then(result => { + perf.emit('timeEnd', 'audit body') + result.runId = runId + return result }) } function printInstallReport (auditResult) { + const opts = InstallAuditConfig(npmConfig()) return auditReport(auditResult, { reporter: 'install', - withColor: npm.color, - withUnicode: npm.config.get('unicode') + withColor: opts.color, + withUnicode: opts.unicode }).then(result => output(result.report)) } function printFullReport (auditResult) { + const opts = InstallAuditConfig(npmConfig()) return auditReport(auditResult, { log: output, - reporter: npm.config.get('json') ? 'json' : 'detail', - withColor: npm.color, - withUnicode: npm.config.get('unicode') + reporter: opts.json ? 'json' : 'detail', + withColor: opts.color, + withUnicode: opts.unicode }).then(result => output(result.report)) } function printParseableReport (auditResult) { + const opts = InstallAuditConfig(npmConfig()) return auditReport(auditResult, { log: output, reporter: 'parseable', - withColor: npm.color, - withUnicode: npm.config.get('unicode') + withColor: opts.color, + withUnicode: opts.unicode }).then(result => output(result.report)) } diff --git a/lib/install/deps.js b/lib/install/deps.js index c36265093b090..3d8b333c64441 100644 --- a/lib/install/deps.js +++ b/lib/install/deps.js @@ -57,6 +57,19 @@ function doesChildVersionMatch (child, requested, requestor) { if (fromSw.toString() === requested.toString()) return true } + if (requested.type === 'git' && requested.gitRange) { + const sameRepo = npa(child.package._from).fetchSpec === requested.fetchSpec + try { + return sameRepo && semver.satisfies(child.package.version, requested.gitRange, true) + } catch (e) { + return false + } + } + + if (requested.type === 'alias') { + return doesChildVersionMatch(child, requested.subSpec, requestor) + } + if (!registryTypes[requested.type]) { var childReq = child.package._requested if (childReq) { @@ -72,7 +85,7 @@ function doesChildVersionMatch (child, requested, requestor) { // You'll see this scenario happen with at least tags and git dependencies. // Some buggy clients will write spaces into the module name part of a _from. if (child.package._from) { - var fromReq = npa.resolve(moduleName(child), child.package._from.replace(new RegExp('^\\s*' + moduleName(child) + '\\s*@'), '')) + var fromReq = npa(child.package._from) if (fromReq.rawSpec === requested.rawSpec) return true if (fromReq.type === requested.type && fromReq.saveSpec && fromReq.saveSpec === requested.saveSpec) return true } @@ -190,10 +203,15 @@ function removeObsoleteDep (child, log) { function packageRelativePath (tree) { if (!tree) return '' var requested = tree.package._requested || {} - var isLocal = requested.type === 'directory' || requested.type === 'file' - return isLocal ? requested.fetchSpec - : (tree.isLink || tree.isInLink) && !preserveSymlinks() ? tree.realpath - : tree.path + if (requested.type === 'directory') { + return requested.fetchSpec + } else if (requested.type === 'file') { + return path.dirname(requested.fetchSpec) + } else if ((tree.isLink || tree.isInLink) && !preserveSymlinks()) { + return tree.realpath + } else { + return tree.path + } } function matchingDep (tree, name) { @@ -289,11 +307,13 @@ function computeVersionSpec (tree, child) { var requested var childReq = child.package._requested if (child.isLink) { - requested = npa.resolve(child.package.name, 'file:' + child.realpath, getTop(tree).path) + requested = npa.resolve(moduleName(child), 'file:' + child.realpath, getTop(tree).path) } else if (childReq && (isNotEmpty(childReq.saveSpec) || (isNotEmpty(childReq.rawSpec) && isNotEmpty(childReq.fetchSpec)))) { requested = child.package._requested } else if (child.package._from) { requested = npa(child.package._from, tree.path) + } else if (child.name && child.name !== child.package.name) { + requested = npa.resolve(child.name, `npm:${child.package.name}@${child.package.version})`) } else { requested = npa.resolve(child.package.name, child.package.version) } @@ -305,6 +325,9 @@ function computeVersionSpec (tree, child) { !npm.config.get('save-exact')) { rangeDescriptor = npm.config.get('save-prefix') } + if (requested.type === 'alias') { + rangeDescriptor = `npm:${requested.subSpec.name}@${rangeDescriptor}` + } return rangeDescriptor + version } else if (requested.type === 'directory' || requested.type === 'file') { return 'file:' + unixFormatPath(path.relative(getTop(tree).path, requested.fetchSpec)) @@ -324,7 +347,7 @@ exports.removeDeps = function (args, tree, saveToDependencies, next) { for (let pkg of args) { var pkgName = moduleName(pkg) var toRemove = tree.children.filter(moduleNameMatches(pkgName)) - var pkgToRemove = toRemove[0] || createChild({package: {name: pkgName}}) + var pkgToRemove = toRemove[0] || createChild({name: pkgName}) var saveType = getSaveType(tree, pkg) || 'dependencies' if (tree.isTop && saveToDependencies) { pkgToRemove.save = saveType @@ -647,16 +670,18 @@ function resolveWithNewModule (pkg, tree, log, next) { validate('OOOF', arguments) log.silly('resolveWithNewModule', packageId(pkg), 'checking installable status') - return isInstallable(pkg, (err) => { + return isInstallable(tree, pkg, (err) => { let installable = !err addBundled(pkg, (bundleErr) => { var parent = earliestInstallable(tree, tree, pkg, log) || tree var isLink = pkg._requested.type === 'directory' + var name = pkg._requested.name || pkg.name var child = createChild({ + name, package: pkg, parent: parent, - path: path.join(parent.isLink ? parent.realpath : parent.path, 'node_modules', pkg.name), - realpath: isLink ? pkg._requested.fetchSpec : path.join(parent.realpath, 'node_modules', pkg.name), + path: path.join(parent.isLink ? parent.realpath : parent.path, 'node_modules', name), + realpath: isLink ? pkg._requested.fetchSpec : path.join(parent.realpath, 'node_modules', name), children: pkg._bundled || [], isLink: isLink, isInLink: parent.isLink, @@ -691,6 +716,12 @@ function resolveWithNewModule (pkg, tree, log, next) { }) } +var isOptionalPeerDep = exports.isOptionalPeerDep = function (tree, pkgname) { + if (!tree.package.peerDependenciesMeta) return + if (!tree.package.peerDependenciesMeta[pkgname]) return + return !!tree.package.peerDependenciesMeta[pkgname].optional +} + var validatePeerDeps = exports.validatePeerDeps = function (tree, onInvalid) { if (!tree.package.peerDependencies) return Object.keys(tree.package.peerDependencies).forEach(function (pkgname) { @@ -699,7 +730,7 @@ var validatePeerDeps = exports.validatePeerDeps = function (tree, onInvalid) { var spec = npa.resolve(pkgname, version) } catch (e) {} var match = spec && findRequirement(tree.parent || tree, pkgname, spec) - if (!match) onInvalid(tree, pkgname, version) + if (!match && !isOptionalPeerDep(tree, pkgname)) onInvalid(tree, pkgname, version) }) } @@ -759,7 +790,7 @@ var earliestInstallable = exports.earliestInstallable = function (requiredBy, tr validate('OOOO', arguments) function undeletedModuleMatches (child) { - return !child.removed && moduleName(child) === pkg.name + return !child.removed && moduleName(child) === ((pkg._requested && pkg._requested.name) || pkg.name) } const undeletedMatches = tree.children.filter(undeletedModuleMatches) if (undeletedMatches.length) { diff --git a/lib/install/diff-trees.js b/lib/install/diff-trees.js index 346846fdc0ffe..147aa9b8e74f3 100644 --- a/lib/install/diff-trees.js +++ b/lib/install/diff-trees.js @@ -11,7 +11,7 @@ var moduleName = require('../utils/module-name.js') var isOnlyOptional = require('./is-only-optional.js') // we don't use get-requested because we're operating on files on disk, and -// we don't want to extropolate from what _should_ be there. +// we don't want to extrapolate from what _should_ be there. function pkgRequested (pkg) { return pkg._requested || (pkg._resolved && npa(pkg._resolved)) || (pkg._from && npa(pkg._from)) } diff --git a/lib/install/fund.js b/lib/install/fund.js new file mode 100644 index 0000000000000..809c05b33878b --- /dev/null +++ b/lib/install/fund.js @@ -0,0 +1,48 @@ +'use strict' + +const { EOL } = require('os') + +const computeMetadata = require('./deps.js').computeMetadata +const mutateIntoLogicalTree = require('./mutate-into-logical-tree.js') +var { getFundingInfo } = require('../utils/funding.js') + +exports.getPrintFundingReport = getPrintFundingReport +exports.getPrintFundingReportJSON = getPrintFundingReportJSON + +function getFundingResult ({ fund, idealTree }) { + if (fund) { + const fundingInfoTree = + mutateIntoLogicalTree.asReadInstalled( + computeMetadata(idealTree) + ) + const fundResult = getFundingInfo(fundingInfoTree, { countOnly: true }) + return fundResult + } else { + return {} + } +} + +function getPrintFundingReport ({ fund, idealTree }, opts) { + const fundResult = getFundingResult({ fund, idealTree }) + const { length } = fundResult || {} + const { json } = opts || {} + + function padding (msg) { + return json ? '' : (EOL + msg) + } + + function packageQuantity (amount) { + return `package${amount > 1 ? 's are' : ' is'}` + } + + if (!length) return '' + + return padding('') + length + ' ' + + packageQuantity(length) + + ' looking for funding' + + padding(' run `npm fund` for details\n') +} + +function getPrintFundingReportJSON ({ fund, idealTree }) { + return getPrintFundingReport({ fund, idealTree }, { json: true }) +} diff --git a/lib/install/inflate-shrinkwrap.js b/lib/install/inflate-shrinkwrap.js index bf1ab7065724c..1ec4f9ba6dcfd 100644 --- a/lib/install/inflate-shrinkwrap.js +++ b/lib/install/inflate-shrinkwrap.js @@ -74,7 +74,7 @@ function quotemeta (str) { } function tarballToVersion (name, tb) { - const registry = quotemeta(npm.config.get('registry')) + const registry = quotemeta(npm.config.get('registry') || '') .replace(/https?:/, 'https?:') .replace(/([^/])$/, '$1/') let matchRegTarball @@ -141,6 +141,7 @@ function isGit (sw) { } function makeFakeChild (name, topPath, tree, sw, requested) { + const isDirectory = requested.type === 'directory' const from = sw.from || requested.raw const pkg = { name: name, @@ -167,17 +168,17 @@ function makeFakeChild (name, topPath, tree, sw, requested) { } const child = createChild({ package: pkg, - loaded: false, + loaded: isDirectory, parent: tree, children: [], fromShrinkwrap: requested, fakeChild: sw, fromBundle: sw.bundled ? tree.fromBundle || tree : null, path: childPath(tree.path, pkg), - realpath: requested.type === 'directory' ? requested.fetchSpec : childPath(tree.realpath, pkg), + realpath: isDirectory ? requested.fetchSpec : childPath(tree.realpath, pkg), location: (tree.location === '/' ? '' : tree.location + '/') + pkg.name, - isLink: requested.type === 'directory', - isInLink: tree.isLink, + isLink: isDirectory, + isInLink: tree.isLink || tree.isInLink, swRequires: sw.requires }) tree.children.push(child) diff --git a/lib/install/is-only-dev.js b/lib/install/is-only-dev.js index ef41e8ad1a265..804497393eaa3 100644 --- a/lib/install/is-only-dev.js +++ b/lib/install/is-only-dev.js @@ -29,7 +29,9 @@ function andIsOnlyDev (name, seen) { } else { if (seen.has(req)) return true seen.add(req) - return isOnlyDev(req, seen) + const result = isOnlyDev(req, seen) + seen.delete(req) + return result } } } diff --git a/lib/install/is-only-optional.js b/lib/install/is-only-optional.js index 72d6f065e6745..ea27eadcfa809 100644 --- a/lib/install/is-only-optional.js +++ b/lib/install/is-only-optional.js @@ -2,6 +2,7 @@ module.exports = isOptional const isOptDep = require('./is-opt-dep.js') +const moduleName = require('../utils/module-name.js') function isOptional (node, seen) { if (!seen) seen = new Set() @@ -12,8 +13,10 @@ function isOptional (node, seen) { } seen.add(node) const swOptional = node.fromShrinkwrap && node.package._optional - return node.requiredBy.every(function (req) { + const result = node.requiredBy.every(function (req) { if (req.fakeChild && swOptional) return true - return isOptDep(req, node.package.name) || isOptional(req, seen) + return isOptDep(req, moduleName(node)) || isOptional(req, seen) }) + seen.delete(node) + return result } diff --git a/lib/install/read-shrinkwrap.js b/lib/install/read-shrinkwrap.js index 7074678011127..1d9fb99c1d807 100644 --- a/lib/install/read-shrinkwrap.js +++ b/lib/install/read-shrinkwrap.js @@ -19,8 +19,7 @@ function readShrinkwrap (child, next) { maybeReadFile('npm-shrinkwrap.json', child), // Don't read non-root lockfiles child.isTop && maybeReadFile('package-lock.json', child), - child.isTop && maybeReadFile('package.json', child), - (shrinkwrap, lockfile, pkgJson) => { + (shrinkwrap, lockfile) => { if (shrinkwrap && lockfile) { log.warn('read-shrinkwrap', 'Ignoring package-lock.json because there is already an npm-shrinkwrap.json. Please use only one of the two.') } diff --git a/lib/install/save.js b/lib/install/save.js index 7227e78852ac7..92b44a108099b 100644 --- a/lib/install/save.js +++ b/lib/install/save.js @@ -44,7 +44,7 @@ exports.saveShrinkwrap = saveShrinkwrap function saveShrinkwrap (tree, next) { validate('OF', arguments) - if (!npm.config.get('shrinkwrap') || !npm.config.get('package-lock')) { + if (!npm.config.get('package-lock-only') && (!npm.config.get('shrinkwrap') || !npm.config.get('package-lock'))) { return next() } require('../shrinkwrap.js').createShrinkwrap(tree, {silent: false}, next) diff --git a/lib/install/validate-args.js b/lib/install/validate-args.js index 65b660417a4ca..b680a1b24ba47 100644 --- a/lib/install/validate-args.js +++ b/lib/install/validate-args.js @@ -16,7 +16,7 @@ module.exports = function (idealTree, args, next) { chain([ [hasMinimumFields, pkg], [checkSelf, idealTree, pkg, force], - [isInstallable, pkg] + [isInstallable, idealTree, pkg] ], done) }, next) } @@ -31,13 +31,24 @@ function hasMinimumFields (pkg, cb) { } } -function getWarnings (pkg) { - while (pkg.parent) pkg = pkg.parent - if (!pkg.warnings) pkg.warnings = [] - return pkg.warnings +function setWarnings (idealTree, warn) { + function top (tree) { + if (tree.parent) return top(tree.parent) + return tree + } + + var topTree = top(idealTree) + if (!topTree.warnings) topTree.warnings = [] + + if (topTree.warnings.every(i => ( + i.code !== warn.code || + i.required !== warn.required || + i.pkgid !== warn.pkgid))) { + topTree.warnings.push(warn) + } } -var isInstallable = module.exports.isInstallable = function (pkg, next) { +var isInstallable = module.exports.isInstallable = function (idealTree, pkg, next) { var force = npm.config.get('force') var nodeVersion = npm.config.get('node-version') if (/-/.test(nodeVersion)) { @@ -48,7 +59,7 @@ var isInstallable = module.exports.isInstallable = function (pkg, next) { var strict = npm.config.get('engine-strict') checkEngine(pkg, npm.version, nodeVersion, force, strict, iferr(next, thenWarnEngineIssues)) function thenWarnEngineIssues (warn) { - if (warn) getWarnings(pkg).push(warn) + if (idealTree && warn) setWarnings(idealTree, warn) checkPlatform(pkg, force, next) } } diff --git a/lib/logout.js b/lib/logout.js index a3287d42d1659..411f547210b8f 100644 --- a/lib/logout.js +++ b/lib/logout.js @@ -1,43 +1,44 @@ -module.exports = logout +'use strict' -var dezalgo = require('dezalgo') -var log = require('npmlog') +const BB = require('bluebird') -var npm = require('./npm.js') -var mapToRegistry = require('./utils/map-to-registry.js') +const eu = encodeURIComponent +const getAuth = require('npm-registry-fetch/auth.js') +const log = require('npmlog') +const npm = require('./npm.js') +const npmConfig = require('./config/figgy-config.js') +const npmFetch = require('libnpm/fetch') logout.usage = 'npm logout [--registry=] [--scope=<@scope>]' -function afterLogout (normalized, cb) { +function afterLogout (normalized) { var scope = npm.config.get('scope') if (scope) npm.config.del(scope + ':registry') npm.config.clearCredentialsByURI(normalized) - npm.config.save('user', cb) + return BB.fromNode(cb => npm.config.save('user', cb)) } +module.exports = logout function logout (args, cb) { - cb = dezalgo(cb) - - mapToRegistry('/', npm.config, function (err, uri, auth, normalized) { - if (err) return cb(err) - + const opts = npmConfig() + BB.try(() => { + const reg = npmFetch.pickRegistry('foo', opts) + const auth = getAuth(reg, opts) if (auth.token) { - log.verbose('logout', 'clearing session token for', normalized) - npm.registry.logout(normalized, { auth: auth }, function (err) { - if (err) return cb(err) - - afterLogout(normalized, cb) - }) + log.verbose('logout', 'clearing session token for', reg) + return npmFetch(`/-/user/token/${eu(auth.token)}`, opts.concat({ + method: 'DELETE', + ignoreBody: true + })).then(() => afterLogout(reg)) } else if (auth.username || auth.password) { - log.verbose('logout', 'clearing user credentials for', normalized) - - afterLogout(normalized, cb) + log.verbose('logout', 'clearing user credentials for', reg) + return afterLogout(reg) } else { - cb(new Error( - 'Not logged in to', normalized + ',', "so can't log out." - )) + throw new Error( + 'Not logged in to', reg + ',', "so can't log out." + ) } - }) + }).nodeify(cb) } diff --git a/lib/ls.js b/lib/ls.js index bb5e433f78fde..78a2b1d791c7d 100644 --- a/lib/ls.js +++ b/lib/ls.js @@ -12,6 +12,7 @@ var readPackageTree = require('read-package-tree') var archy = require('archy') var semver = require('semver') var color = require('ansicolors') +var moduleName = require('./utils/module-name.js') var npa = require('npm-package-arg') var sortedObject = require('sorted-object') var npm = require('./npm.js') @@ -59,7 +60,9 @@ var lsFromTree = ls.fromTree = function (dir, physicalTree, args, silent, cb) { args = [] } else { args = args.map(function (a) { - if (typeof a === 'object') { + if (typeof a === 'object' && a.package._requested.type === 'alias') { + return [moduleName(a), `npm:${a.package.name}@${a.package.version}`, a] + } else if (typeof a === 'object') { return [a.package.name, a.package.version, a] } else { var p = npa(a) @@ -305,7 +308,7 @@ function filterFound (root, args) { if (!markDeps) continue Object.keys(markDeps).forEach(function (depName) { var dep = markDeps[depName] - if (dep.peerMissing) return + if (dep.peerMissing && !dep._from) return dep._parent = markPkg for (var ii = 0; ii < args.length; ii++) { var argName = args[ii][0] @@ -392,8 +395,11 @@ function makeArchy_ (data, long, dir, depth, parent, d) { } var out = {} - // the top level is a bit special. - out.label = data._id || '' + if (data._requested && data._requested.type === 'alias') { + out.label = `${d}@npm:${data._id}` + } else { + out.label = data._id || '' + } if (data._found === 'explicit' && data._id) { if (npm.color) { out.label = color.bgBlack(color.yellow(out.label.trim())) + ' ' diff --git a/lib/npm.js b/lib/npm.js index da5a363602122..74ef6ad2c6e74 100644 --- a/lib/npm.js +++ b/lib/npm.js @@ -40,9 +40,7 @@ var which = require('which') var glob = require('glob') var rimraf = require('rimraf') - var lazyProperty = require('lazy-property') var parseJSON = require('./utils/parse-json.js') - var clientConfig = require('./config/reg-client.js') var aliases = require('./config/cmd-list').aliases var cmdList = require('./config/cmd-list').cmdList var plumbing = require('./config/cmd-list').plumbing @@ -106,7 +104,6 @@ }) var registryRefer - var registryLoaded Object.keys(abbrevs).concat(plumbing).forEach(function addCommand (c) { Object.defineProperty(npm.commands, c, { get: function () { @@ -153,7 +150,7 @@ }).filter(function (arg) { return arg && arg.match }).join(' ') - if (registryLoaded) npm.registry.refer = registryRefer + npm.referer = registryRefer } cmd.apply(npm, args) @@ -284,7 +281,33 @@ ua = ua.replace(/\{npm-version\}/gi, npm.version) ua = ua.replace(/\{platform\}/gi, process.platform) ua = ua.replace(/\{arch\}/gi, process.arch) - config.set('user-agent', ua) + + // continuous integration platforms + const ciName = process.env.GERRIT_PROJECT ? 'gerrit' + : process.env.GITLAB_CI ? 'gitlab' + : process.env.APPVEYOR ? 'appveyor' + : process.env.CIRCLECI ? 'circle-ci' + : process.env.SEMAPHORE ? 'semaphore' + : process.env.DRONE ? 'drone' + : process.env.GITHUB_ACTION ? 'github-actions' + : process.env.TDDIUM ? 'tddium' + : process.env.JENKINS_URL ? 'jenkins' + : process.env['bamboo.buildKey'] ? 'bamboo' + : process.env.GO_PIPELINE_NAME ? 'gocd' + // codeship and a few others + : process.env.CI_NAME ? process.env.CI_NAME + // test travis after the others, since several CI systems mimic it + : process.env.TRAVIS ? 'travis-ci' + // aws CodeBuild/CodePipeline + : process.env.CODEBUILD_SRC_DIR ? 'aws-codebuild' + : process.env.CI === 'true' || process.env.CI === '1' ? 'custom' + // Google Cloud Build - it sets almost nothing + : process.env.BUILDER_OUTPUT ? 'builder' + : false + const ci = ciName ? `ci/${ciName}` : '' + ua = ua.replace(/\{ci\}/gi, ci) + + config.set('user-agent', ua.trim()) if (config.get('metrics-registry') == null) { config.set('metrics-registry', config.get('registry')) @@ -357,17 +380,6 @@ npm.projectScope = config.get('scope') || scopeifyScope(getProjectScope(npm.prefix)) - // at this point the configs are all set. - // go ahead and spin up the registry client. - lazyProperty(npm, 'registry', function () { - registryLoaded = true - var RegClient = require('npm-registry-client') - var registry = new RegClient(clientConfig(npm, log, npm.config)) - registry.version = npm.version - registry.refer = registryRefer - return registry - }) - startMetrics() return cb(null, npm) diff --git a/lib/org.js b/lib/org.js new file mode 100644 index 0000000000000..d8f857e3dfdd9 --- /dev/null +++ b/lib/org.js @@ -0,0 +1,151 @@ +'use strict' + +const figgyPudding = require('figgy-pudding') +const liborg = require('libnpm/org') +const npmConfig = require('./config/figgy-config.js') +const output = require('./utils/output.js') +const otplease = require('./utils/otplease.js') +const Table = require('cli-table3') + +module.exports = org + +org.subcommands = ['set', 'rm', 'ls'] + +org.usage = + 'npm org set orgname username [developer | admin | owner]\n' + + 'npm org rm orgname username\n' + + 'npm org ls orgname []' + +const OrgConfig = figgyPudding({ + json: {}, + loglevel: {}, + parseable: {}, + silent: {} +}) + +org.completion = function (opts, cb) { + var argv = opts.conf.argv.remain + if (argv.length === 2) { + return cb(null, org.subcommands) + } + switch (argv[2]) { + case 'ls': + case 'add': + case 'rm': + case 'set': + return cb(null, []) + default: + return cb(new Error(argv[2] + ' not recognized')) + } +} + +function UsageError () { + throw Object.assign(new Error(org.usage), {code: 'EUSAGE'}) +} + +function org ([cmd, orgname, username, role], cb) { + otplease(npmConfig(), opts => { + opts = OrgConfig(opts) + switch (cmd) { + case 'add': + case 'set': + return orgSet(orgname, username, role, opts) + case 'rm': + return orgRm(orgname, username, opts) + case 'ls': + return orgList(orgname, username, opts) + default: + UsageError() + } + }).then( + x => cb(null, x), + err => cb(err.code === 'EUSAGE' ? err.message : err) + ) +} + +function orgSet (org, user, role, opts) { + role = role || 'developer' + if (!org) { + throw new Error('First argument `orgname` is required.') + } + if (!user) { + throw new Error('Second argument `username` is required.') + } + if (!['owner', 'admin', 'developer'].find(x => x === role)) { + throw new Error('Third argument `role` must be one of `owner`, `admin`, or `developer`, with `developer` being the default value if omitted.') + } + return liborg.set(org, user, role, opts).then(memDeets => { + if (opts.json) { + output(JSON.stringify(memDeets, null, 2)) + } else if (opts.parseable) { + output(['org', 'orgsize', 'user', 'role'].join('\t')) + output([ + memDeets.org.name, + memDeets.org.size, + memDeets.user, + memDeets.role + ]) + } else if (!opts.silent && opts.loglevel !== 'silent') { + output(`Added ${memDeets.user} as ${memDeets.role} to ${memDeets.org.name}. You now ${memDeets.org.size} member${memDeets.org.size === 1 ? '' : 's'} in this org.`) + } + return memDeets + }) +} + +function orgRm (org, user, opts) { + if (!org) { + throw new Error('First argument `orgname` is required.') + } + if (!user) { + throw new Error('Second argument `username` is required.') + } + return liborg.rm(org, user, opts).then(() => { + return liborg.ls(org, opts) + }).then(roster => { + user = user.replace(/^[~@]?/, '') + org = org.replace(/^[~@]?/, '') + const userCount = Object.keys(roster).length + if (opts.json) { + output(JSON.stringify({ + user, + org, + userCount, + deleted: true + })) + } else if (opts.parseable) { + output(['user', 'org', 'userCount', 'deleted'].join('\t')) + output([user, org, userCount, true].join('\t')) + } else if (!opts.silent && opts.loglevel !== 'silent') { + output(`Successfully removed ${user} from ${org}. You now have ${userCount} member${userCount === 1 ? '' : 's'} in this org.`) + } + }) +} + +function orgList (org, user, opts) { + if (!org) { + throw new Error('First argument `orgname` is required.') + } + return liborg.ls(org, opts).then(roster => { + if (user) { + const newRoster = {} + if (roster[user]) { + newRoster[user] = roster[user] + } + roster = newRoster + } + if (opts.json) { + output(JSON.stringify(roster, null, 2)) + } else if (opts.parseable) { + output(['user', 'role'].join('\t')) + Object.keys(roster).forEach(user => { + output([user, roster[user]].join('\t')) + }) + } else if (!opts.silent && opts.loglevel !== 'silent') { + const table = new Table({head: ['user', 'role']}) + Object.keys(roster).sort().forEach(user => { + table.push([user, roster[user]]) + }) + output(table.toString()) + } + }) +} diff --git a/lib/outdated.js b/lib/outdated.js index 024e076c4f9ad..5b84ae35587c8 100644 --- a/lib/outdated.js +++ b/lib/outdated.js @@ -20,28 +20,47 @@ outdated.usage = 'npm outdated [[<@scope>/] ...]' outdated.completion = require('./utils/completion/installed-deep.js') -var os = require('os') -var url = require('url') -var path = require('path') -var readPackageTree = require('read-package-tree') -var asyncMap = require('slide').asyncMap -var color = require('ansicolors') -var styles = require('ansistyles') -var table = require('text-table') -var semver = require('semver') -var npa = require('npm-package-arg') -var pickManifest = require('npm-pick-manifest') -var fetchPackageMetadata = require('./fetch-package-metadata.js') -var mutateIntoLogicalTree = require('./install/mutate-into-logical-tree.js') -var npm = require('./npm.js') -var long = npm.config.get('long') -var mapToRegistry = require('./utils/map-to-registry.js') -var isExtraneous = require('./install/is-extraneous.js') -var computeMetadata = require('./install/deps.js').computeMetadata -var computeVersionSpec = require('./install/deps.js').computeVersionSpec -var moduleName = require('./utils/module-name.js') -var output = require('./utils/output.js') -var ansiTrim = require('./utils/ansi-trim') +const os = require('os') +const url = require('url') +const path = require('path') +const readPackageTree = require('read-package-tree') +const asyncMap = require('slide').asyncMap +const color = require('ansicolors') +const styles = require('ansistyles') +const table = require('text-table') +const semver = require('semver') +const npa = require('libnpm/parse-arg') +const pickManifest = require('npm-pick-manifest') +const fetchPackageMetadata = require('./fetch-package-metadata.js') +const mutateIntoLogicalTree = require('./install/mutate-into-logical-tree.js') +const npm = require('./npm.js') +const npmConfig = require('./config/figgy-config.js') +const figgyPudding = require('figgy-pudding') +const packument = require('libnpm/packument') +const long = npm.config.get('long') +const isExtraneous = require('./install/is-extraneous.js') +const computeMetadata = require('./install/deps.js').computeMetadata +const computeVersionSpec = require('./install/deps.js').computeVersionSpec +const moduleName = require('./utils/module-name.js') +const output = require('./utils/output.js') +const ansiTrim = require('./utils/ansi-trim') + +const OutdatedConfig = figgyPudding({ + also: {}, + color: {}, + depth: {}, + dev: 'development', + development: {}, + global: {}, + json: {}, + only: {}, + parseable: {}, + prod: 'production', + production: {}, + save: {}, + 'save-dev': {}, + 'save-optional': {} +}) function uniq (list) { // we maintain the array because we need an array, not iterator, return @@ -68,26 +87,30 @@ function outdated (args, silent, cb) { cb = silent silent = false } + let opts = OutdatedConfig(npmConfig()) var dir = path.resolve(npm.dir, '..') // default depth for `outdated` is 0 (cf. `ls`) - if (npm.config.get('depth') === Infinity) npm.config.set('depth', 0) + if (opts.depth === Infinity) opts = opts.concat({depth: 0}) readPackageTree(dir, andComputeMetadata(function (er, tree) { if (!tree) return cb(er) mutateIntoLogicalTree(tree) - outdated_(args, '', tree, {}, 0, function (er, list) { + outdated_(args, '', tree, {}, 0, opts, function (er, list) { list = uniq(list || []).sort(function (aa, bb) { return aa[0].path.localeCompare(bb[0].path) || aa[1].localeCompare(bb[1]) }) - if (er || silent || list.length === 0) return cb(er, list) - if (npm.config.get('json')) { - output(makeJSON(list)) - } else if (npm.config.get('parseable')) { - output(makeParseable(list)) + if (er || silent || + (list.length === 0 && !opts.json)) { + return cb(er, list) + } + if (opts.json) { + output(makeJSON(list, opts)) + } else if (opts.parseable) { + output(makeParseable(list, opts)) } else { - var outList = list.map(makePretty) + var outList = list.map(x => makePretty(x, opts)) var outHead = [ 'Package', 'Current', 'Wanted', @@ -97,7 +120,7 @@ function outdated (args, silent, cb) { if (long) outHead.push('Package Type', 'Homepage') var outTable = [outHead].concat(outList) - if (npm.color) { + if (opts.color) { outTable[0] = outTable[0].map(function (heading) { return styles.underline(heading) }) @@ -109,35 +132,35 @@ function outdated (args, silent, cb) { } output(table(outTable, tableOpts)) } - process.exitCode = 1 + process.exitCode = list.length ? 1 : 0 cb(null, list.map(function (item) { return [item[0].parent.path].concat(item.slice(1, 7)) })) }) })) } // [[ dir, dep, has, want, latest, type ]] -function makePretty (p) { +function makePretty (p, opts) { var depname = p[1] var has = p[2] var want = p[3] var latest = p[4] var type = p[6] var deppath = p[7] - var homepage = p[0].package.homepage + var homepage = p[0].package.homepage || '' var columns = [ depname, has || 'MISSING', want, latest, - deppath + deppath || 'global' ] if (long) { columns[5] = type columns[6] = homepage } - if (npm.color) { - columns[0] = color[has === want || want === 'linked' ? 'yellow' : 'red'](columns[0]) // dep + if (opts.color) { + columns[0] = color[has === want ? 'yellow' : 'red'](columns[0]) // dep columns[2] = color.green(columns[2]) // want columns[3] = color.magenta(columns[3]) // latest } @@ -167,7 +190,7 @@ function makeParseable (list) { }).join(os.EOL) } -function makeJSON (list) { +function makeJSON (list, opts) { var out = {} list.forEach(function (p) { var dep = p[0] @@ -177,7 +200,7 @@ function makeJSON (list) { var want = p[3] var latest = p[4] var type = p[6] - if (!npm.config.get('global')) { + if (!opts.global) { dir = path.relative(process.cwd(), dir) } out[depname] = { current: has, @@ -193,11 +216,11 @@ function makeJSON (list) { return JSON.stringify(out, null, 2) } -function outdated_ (args, path, tree, parentHas, depth, cb) { +function outdated_ (args, path, tree, parentHas, depth, opts, cb) { if (!tree.package) tree.package = {} - if (path && tree.package.name) path += ' > ' + tree.package.name - if (!path && tree.package.name) path = tree.package.name - if (depth > npm.config.get('depth')) { + if (path && moduleName(tree)) path += ' > ' + tree.package.name + if (!path && moduleName(tree)) path = tree.package.name + if (depth > opts.depth) { return cb(null, []) } var types = {} @@ -227,11 +250,14 @@ function outdated_ (args, path, tree, parentHas, depth, cb) { // (All the save checking here is because this gets called from npm-update currently // and that requires this logic around dev deps.) // FIXME: Refactor npm update to not be in terms of outdated. - var dev = npm.config.get('dev') || /^dev(elopment)?$/.test(npm.config.get('also')) - var prod = npm.config.get('production') || /^prod(uction)?$/.test(npm.config.get('only')) - if ((dev || !prod) && - (npm.config.get('save-dev') || ( - !npm.config.get('save') && !npm.config.get('save-optional')))) { + var dev = opts.dev || /^dev(elopment)?$/.test(opts.also) + var prod = opts.production || /^prod(uction)?$/.test(opts.only) + if ( + (dev || !prod) && + ( + opts['save-dev'] || (!opts.save && !opts['save-optional']) + ) + ) { Object.keys(tree.missingDevDeps).forEach(function (name) { deps.push({ package: { name: name }, @@ -245,15 +271,15 @@ function outdated_ (args, path, tree, parentHas, depth, cb) { }) } - if (npm.config.get('save-dev')) { + if (opts['save-dev']) { deps = deps.filter(function (dep) { return pkg.devDependencies[moduleName(dep)] }) deps.forEach(function (dep) { types[moduleName(dep)] = 'devDependencies' }) - } else if (npm.config.get('save')) { + } else if (opts.save) { // remove optional dependencies from dependencies during --save. deps = deps.filter(function (dep) { return !pkg.optionalDependencies[moduleName(dep)] }) - } else if (npm.config.get('save-optional')) { + } else if (opts['save-optional']) { deps = deps.filter(function (dep) { return pkg.optionalDependencies[moduleName(dep)] }) deps.forEach(function (dep) { types[moduleName(dep)] = 'optionalDependencies' @@ -262,7 +288,7 @@ function outdated_ (args, path, tree, parentHas, depth, cb) { var doUpdate = dev || ( !prod && !Object.keys(parentHas).length && - !npm.config.get('global') + !opts.global ) if (doUpdate) { Object.keys(pkg.devDependencies || {}).forEach(function (k) { @@ -275,10 +301,10 @@ function outdated_ (args, path, tree, parentHas, depth, cb) { var has = Object.create(parentHas) tree.children.forEach(function (child) { - if (child.package.name && child.package.private) { + if (moduleName(child) && child.package.private) { deps = deps.filter(function (dep) { return dep !== child }) } - has[child.package.name] = { + has[moduleName(child)] = { version: child.isLink ? 'linked' : child.package.version, from: child.isLink ? 'file:' + child.path : child.package._from } @@ -300,13 +326,13 @@ function outdated_ (args, path, tree, parentHas, depth, cb) { required = computeVersionSpec(tree, dep) } - if (!long) return shouldUpdate(args, dep, name, has, required, depth, path, cb) + if (!long) return shouldUpdate(args, dep, name, has, required, depth, path, opts, cb) - shouldUpdate(args, dep, name, has, required, depth, path, cb, types[name]) + shouldUpdate(args, dep, name, has, required, depth, path, opts, cb, types[name]) }, cb) } -function shouldUpdate (args, tree, dep, has, req, depth, pkgpath, cb, type) { +function shouldUpdate (args, tree, dep, has, req, depth, pkgpath, opts, cb, type) { // look up the most recent version. // if that's what we already have, or if it's not on the args list, // then dive into it. Otherwise, cb() with the data. @@ -322,16 +348,10 @@ function shouldUpdate (args, tree, dep, has, req, depth, pkgpath, cb, type) { tree, has, depth + 1, + opts, cb) } - function doIt (wanted, latest) { - if (!long) { - return cb(null, [[tree, dep, curr && curr.version, wanted, latest, req, null, pkgpath]]) - } - cb(null, [[tree, dep, curr && curr.version, wanted, latest, req, type, pkgpath]]) - } - if (args.length && args.indexOf(dep) === -1) return skip() if (tree.isLink && req == null) return skip() @@ -349,12 +369,23 @@ function shouldUpdate (args, tree, dep, has, req, depth, pkgpath, cb, type) { return doIt('git', 'git') } else if (parsed.type === 'file') { return updateLocalDeps() + } else if (parsed.type === 'remote') { + return doIt('remote', 'remote') } else { - return mapToRegistry(dep, npm.config, function (er, uri, auth) { - if (er) return cb(er) + return packument(parsed, opts.concat({ + 'prefer-online': true + })).nodeify(updateDeps) + } - npm.registry.get(uri, { auth: auth }, updateDeps) - }) + function doIt (wanted, latest) { + let c = curr && curr.version + if (parsed.type === 'alias') { + c = `npm:${parsed.subSpec.name}@${c}` + } + if (!long) { + return cb(null, [[tree, dep, c, wanted, latest, req, null, pkgpath]]) + } + cb(null, [[tree, dep, c, wanted, latest, req, type, pkgpath]]) } function updateLocalDeps (latestRegistryVersion) { @@ -383,11 +414,14 @@ function shouldUpdate (args, tree, dep, has, req, depth, pkgpath, cb, type) { function updateDeps (er, d) { if (er) return cb(er) + if (parsed.type === 'alias') { + req = parsed.subSpec.rawSpec + } try { var l = pickManifest(d, 'latest') var m = pickManifest(d, req) } catch (er) { - if (er.code === 'ETARGET') { + if (er.code === 'ETARGET' || er.code === 'E403') { return skip(er) } else { return skip() @@ -399,11 +433,20 @@ function shouldUpdate (args, tree, dep, has, req, depth, pkgpath, cb, type) { var dFromUrl = m._from && url.parse(m._from).protocol var cFromUrl = curr && curr.from && url.parse(curr.from).protocol - if (!curr || - (dFromUrl && cFromUrl && m._from !== curr.from) || - m.version !== curr.version || - m.version !== l.version) { - doIt(m.version, l.version) + if ( + !curr || + (dFromUrl && cFromUrl && m._from !== curr.from) || + m.version !== curr.version || + m.version !== l.version + ) { + if (parsed.type === 'alias') { + doIt( + `npm:${parsed.subSpec.name}@${m.version}`, + `npm:${parsed.subSpec.name}@${l.version}` + ) + } else { + doIt(m.version, l.version) + } } else { skip() } diff --git a/lib/owner.js b/lib/owner.js index 3c2660ace113d..a64cb5e14ccef 100644 --- a/lib/owner.js +++ b/lib/owner.js @@ -1,12 +1,17 @@ -/* eslint-disable standard/no-callback-literal */ module.exports = owner -var npm = require('./npm.js') -var log = require('npmlog') -var mapToRegistry = require('./utils/map-to-registry.js') -var readLocalPkg = require('./utils/read-local-package.js') -var usage = require('./utils/usage') -var output = require('./utils/output.js') +const BB = require('bluebird') + +const log = require('npmlog') +const npa = require('libnpm/parse-arg') +const npmConfig = require('./config/figgy-config.js') +const npmFetch = require('libnpm/fetch') +const output = require('./utils/output.js') +const otplease = require('./utils/otplease.js') +const packument = require('libnpm/packument') +const readLocalPkg = BB.promisify(require('./utils/read-local-package.js')) +const usage = require('./utils/usage') +const whoami = BB.promisify(require('./whoami.js')) owner.usage = usage( 'owner', @@ -14,8 +19,9 @@ owner.usage = usage( '\nnpm owner rm [<@scope>/]' + '\nnpm owner ls [<@scope>/]' ) + owner.completion = function (opts, cb) { - var argv = opts.conf.argv.remain + const argv = opts.conf.argv.remain if (argv.length > 4) return cb() if (argv.length <= 2) { var subs = ['add', 'rm'] @@ -23,130 +29,109 @@ owner.completion = function (opts, cb) { else subs.push('ls', 'list') return cb(null, subs) } - - npm.commands.whoami([], true, function (er, username) { - if (er) return cb() - - var un = encodeURIComponent(username) - var byUser, theUser - switch (argv[2]) { - case 'ls': - // FIXME: there used to be registry completion here, but it stopped - // making sense somewhere around 50,000 packages on the registry - return cb() - - case 'rm': - if (argv.length > 3) { - theUser = encodeURIComponent(argv[3]) - byUser = '-/by-user/' + theUser + '|' + un - return mapToRegistry(byUser, npm.config, function (er, uri, auth) { - if (er) return cb(er) - - console.error(uri) - npm.registry.get(uri, { auth: auth }, function (er, d) { - if (er) return cb(er) - // return the intersection - return cb(null, d[theUser].filter(function (p) { + BB.try(() => { + const opts = npmConfig() + return whoami([], true).then(username => { + const un = encodeURIComponent(username) + let byUser, theUser + switch (argv[2]) { + case 'ls': + // FIXME: there used to be registry completion here, but it stopped + // making sense somewhere around 50,000 packages on the registry + return + case 'rm': + if (argv.length > 3) { + theUser = encodeURIComponent(argv[3]) + byUser = `/-/by-user/${theUser}|${un}` + return npmFetch.json(byUser, opts).then(d => { + return d[theUser].filter( // kludge for server adminery. - return un === 'isaacs' || d[un].indexOf(p) === -1 - })) + p => un === 'isaacs' || d[un].indexOf(p) === -1 + ) }) - }) - } - // else fallthrough - /* eslint no-fallthrough:0 */ - case 'add': - if (argv.length > 3) { - theUser = encodeURIComponent(argv[3]) - byUser = '-/by-user/' + theUser + '|' + un - return mapToRegistry(byUser, npm.config, function (er, uri, auth) { - if (er) return cb(er) - - console.error(uri) - npm.registry.get(uri, { auth: auth }, function (er, d) { - console.error(uri, er || d) - // return mine that they're not already on. - if (er) return cb(er) + } + // else fallthrough + /* eslint no-fallthrough:0 */ + case 'add': + if (argv.length > 3) { + theUser = encodeURIComponent(argv[3]) + byUser = `/-/by-user/${theUser}|${un}` + return npmFetch.json(byUser, opts).then(d => { var mine = d[un] || [] var theirs = d[theUser] || [] - return cb(null, mine.filter(function (p) { - return theirs.indexOf(p) === -1 - })) + return mine.filter(p => theirs.indexOf(p) === -1) }) - }) - } - // just list all users who aren't me. - return mapToRegistry('-/users', npm.config, function (er, uri, auth) { - if (er) return cb(er) + } else { + // just list all users who aren't me. + return npmFetch.json('/-/users', opts).then(list => { + return Object.keys(list).filter(n => n !== un) + }) + } - npm.registry.get(uri, { auth: auth }, function (er, list) { - if (er) return cb() - return cb(null, Object.keys(list).filter(function (n) { - return n !== un - })) - }) - }) + default: + return cb() + } + }) + }).nodeify(cb) +} - default: - return cb() - } - }) +function UsageError () { + throw Object.assign(new Error(owner.usage), {code: 'EUSAGE'}) } -function owner (args, cb) { - var action = args.shift() - switch (action) { - case 'ls': case 'list': return ls(args[0], cb) - case 'add': return add(args[0], args[1], cb) - case 'rm': case 'remove': return rm(args[0], args[1], cb) - default: return unknown(action, cb) - } +function owner ([action, ...args], cb) { + const opts = npmConfig() + BB.try(() => { + switch (action) { + case 'ls': case 'list': return ls(args[0], opts) + case 'add': return add(args[0], args[1], opts) + case 'rm': case 'remove': return rm(args[0], args[1], opts) + default: UsageError() + } + }).then( + data => cb(null, data), + err => err.code === 'EUSAGE' ? cb(err.message) : cb(err) + ) } -function ls (pkg, cb) { +function ls (pkg, opts) { if (!pkg) { - return readLocalPkg(function (er, pkg) { - if (er) return cb(er) - if (!pkg) return cb(owner.usage) - ls(pkg, cb) + return readLocalPkg().then(pkg => { + if (!pkg) { UsageError() } + return ls(pkg, opts) }) } - mapToRegistry(pkg, npm.config, function (er, uri, auth) { - if (er) return cb(er) - - npm.registry.get(uri, { auth: auth }, function (er, data) { - var msg = '' - if (er) { - log.error('owner ls', "Couldn't get owner data", pkg) - return cb(er) - } + const spec = npa(pkg) + return packument(spec, opts.concat({fullMetadata: true})).then( + data => { var owners = data.maintainers if (!owners || !owners.length) { - msg = 'admin party!' + output('admin party!') } else { - msg = owners.map(function (o) { - return o.name + ' <' + o.email + '>' - }).join('\n') + output(owners.map(o => `${o.name} <${o.email}>`).join('\n')) } - output(msg) - cb(er, owners) - }) - }) + return owners + }, + err => { + log.error('owner ls', "Couldn't get owner data", pkg) + throw err + } + ) } -function add (user, pkg, cb) { - if (!user) return cb(owner.usage) +function add (user, pkg, opts) { + if (!user) { UsageError() } if (!pkg) { - return readLocalPkg(function (er, pkg) { - if (er) return cb(er) - if (!pkg) return cb(new Error(owner.usage)) - add(user, pkg, cb) + return readLocalPkg().then(pkg => { + if (!pkg) { UsageError() } + return add(user, pkg, opts) }) } - log.verbose('owner add', '%s to %s', user, pkg) - mutate(pkg, user, function (u, owners) { + + const spec = npa(pkg) + return withMutation(spec, user, opts, (u, owners) => { if (!owners) owners = [] for (var i = 0, l = owners.length; i < l; i++) { var o = owners[i] @@ -160,22 +145,23 @@ function add (user, pkg, cb) { } owners.push(u) return owners - }, cb) + }) } -function rm (user, pkg, cb) { +function rm (user, pkg, opts) { + if (!user) { UsageError() } if (!pkg) { - return readLocalPkg(function (er, pkg) { - if (er) return cb(er) - if (!pkg) return cb(new Error(owner.usage)) - rm(user, pkg, cb) + return readLocalPkg().then(pkg => { + if (!pkg) { UsageError() } + return add(user, pkg, opts) }) } - log.verbose('owner rm', '%s from %s', user, pkg) - mutate(pkg, user, function (u, owners) { - var found = false - var m = owners.filter(function (o) { + + const spec = npa(pkg) + return withMutation(spec, user, opts, function (u, owners) { + let found = false + const m = owners.filter(function (o) { var match = (o.name === user) found = found || match return !match @@ -187,92 +173,70 @@ function rm (user, pkg, cb) { } if (!m.length) { - return new Error( + throw new Error( 'Cannot remove all owners of a package. Add someone else first.' ) } return m - }, cb) + }) } -function mutate (pkg, user, mutation, cb) { - if (user) { - var byUser = '-/user/org.couchdb.user:' + user - mapToRegistry(byUser, npm.config, function (er, uri, auth) { - if (er) return cb(er) - - npm.registry.get(uri, { auth: auth }, mutate_) - }) - } else { - mutate_(null, null) - } +function withMutation (spec, user, opts, mutation) { + return BB.try(() => { + if (user) { + const uri = `/-/user/org.couchdb.user:${encodeURIComponent(user)}` + return npmFetch.json(uri, opts).then(mutate_, err => { + log.error('owner mutate', 'Error getting user data for %s', user) + throw err + }) + } else { + return mutate_(null) + } + }) - function mutate_ (er, u) { - if (!er && user && (!u || u.error)) { - er = new Error( + function mutate_ (u) { + if (user && (!u || u.error)) { + throw new Error( "Couldn't get user data for " + user + ': ' + JSON.stringify(u) ) } - if (er) { - log.error('owner mutate', 'Error getting user data for %s', user) - return cb(er) - } - if (u) u = { name: u.name, email: u.email } - mapToRegistry(pkg, npm.config, function (er, uri, auth) { - if (er) return cb(er) - - npm.registry.get(uri, { auth: auth }, function (er, data) { - if (er) { - log.error('owner mutate', 'Error getting package data for %s', pkg) - return cb(er) - } - - // save the number of maintainers before mutation so that we can figure - // out if maintainers were added or removed - var beforeMutation = data.maintainers.length - - var m = mutation(u, data.maintainers) - if (!m) return cb() // handled - if (m instanceof Error) return cb(m) // error - - data = { - _id: data._id, - _rev: data._rev, - maintainers: m - } - var dataPath = pkg.replace('/', '%2f') + '/-rev/' + data._rev - mapToRegistry(dataPath, npm.config, function (er, uri, auth) { - if (er) return cb(er) - - var params = { - method: 'PUT', - body: data, - auth: auth - } - npm.registry.request(uri, params, function (er, data) { - if (!er && data.error) { - er = new Error('Failed to update package metadata: ' + JSON.stringify(data)) - } - - if (er) { - log.error('owner mutate', 'Failed to update package metadata') - } else if (m.length > beforeMutation) { - output('+ %s (%s)', user, pkg) - } else if (m.length < beforeMutation) { - output('- %s (%s)', user, pkg) - } - - cb(er, data) - }) + return packument(spec, opts.concat({ + fullMetadata: true + })).then(data => { + // save the number of maintainers before mutation so that we can figure + // out if maintainers were added or removed + const beforeMutation = data.maintainers.length + + const m = mutation(u, data.maintainers) + if (!m) return // handled + if (m instanceof Error) throw m // error + + data = { + _id: data._id, + _rev: data._rev, + maintainers: m + } + const dataPath = `/${spec.escapedName}/-rev/${encodeURIComponent(data._rev)}` + return otplease(opts, opts => { + const reqOpts = opts.concat({ + method: 'PUT', + body: data, + spec }) + return npmFetch.json(dataPath, reqOpts) + }).then(data => { + if (data.error) { + throw new Error('Failed to update package metadata: ' + JSON.stringify(data)) + } else if (m.length > beforeMutation) { + output('+ %s (%s)', user, spec.name) + } else if (m.length < beforeMutation) { + output('- %s (%s)', user, spec.name) + } + return data }) }) } } - -function unknown (action, cb) { - cb('Usage: \n' + owner.usage) -} diff --git a/lib/pack.js b/lib/pack.js index 3b3f5b7bbc700..8189cb6cedc0b 100644 --- a/lib/pack.js +++ b/lib/pack.js @@ -4,6 +4,9 @@ // Packs the specified package into a .tgz file, which can then // be installed. +// Set this early to avoid issues with circular dependencies. +module.exports = pack + const BB = require('bluebird') const byteSize = require('byte-size') @@ -18,9 +21,9 @@ const lifecycle = BB.promisify(require('./utils/lifecycle')) const log = require('npmlog') const move = require('move-concurrently') const npm = require('./npm') +const npmConfig = require('./config/figgy-config.js') const output = require('./utils/output') const pacote = require('pacote') -const pacoteOpts = require('./config/pacote') const path = require('path') const PassThrough = require('stream').PassThrough const pathIsInside = require('path-is-inside') @@ -37,7 +40,6 @@ pack.usage = 'npm pack [[<@scope>/]...] [--dry-run]' // if it can be installed, it can be packed. pack.completion = install.completion -module.exports = pack function pack (args, silent, cb) { const cwd = process.cwd() if (typeof cb !== 'function') { @@ -88,8 +90,8 @@ function pack_ (pkg, dir) { } function packFromPackage (arg, target, filename) { - const opts = pacoteOpts() - return pacote.tarball.toFile(arg, target, pacoteOpts()) + const opts = npmConfig() + return pacote.tarball.toFile(arg, target, opts) .then(() => cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'unpacking'}, (tmp) => { const tmpTarget = path.join(tmp, filename) return pacote.extract(arg, tmpTarget, opts) diff --git a/lib/ping.js b/lib/ping.js index 13f390397ce18..3023bab00e994 100644 --- a/lib/ping.js +++ b/lib/ping.js @@ -1,5 +1,16 @@ -var npm = require('./npm.js') -var output = require('./utils/output.js') +'use strict' + +const npmConfig = require('./config/figgy-config.js') +const fetch = require('libnpm/fetch') +const figgyPudding = require('figgy-pudding') +const log = require('npmlog') +const npm = require('./npm.js') +const output = require('./utils/output.js') + +const PingConfig = figgyPudding({ + json: {}, + registry: {} +}) module.exports = ping @@ -10,18 +21,27 @@ function ping (args, silent, cb) { cb = silent silent = false } - var registry = npm.config.get('registry') - if (!registry) return cb(new Error('no default registry set')) - var auth = npm.config.getCredentialsByURI(registry) - npm.registry.ping(registry, {auth: auth}, function (er, pong, data, res) { - if (!silent) { - if (er) { - output('Ping error: ' + er) - } else { - output('Ping success: ' + JSON.stringify(pong)) + const opts = PingConfig(npmConfig()) + const registry = opts.registry + log.notice('PING', registry) + const start = Date.now() + return fetch('/-/ping?write=true', opts).then( + res => res.json().catch(() => ({})) + ).then(details => { + if (silent) { + } else { + const time = Date.now() - start + log.notice('PONG', `${time / 1000}ms`) + if (npm.config.get('json')) { + output(JSON.stringify({ + registry, + time, + details + }, null, 2)) + } else if (Object.keys(details).length) { + log.notice('PONG', `${JSON.stringify(details, null, 2)}`) } } - cb(er, er ? null : pong, data, res) - }) + }).nodeify(cb) } diff --git a/lib/profile.js b/lib/profile.js index ff01db90f722f..7ce9cb5cce5df 100644 --- a/lib/profile.js +++ b/lib/profile.js @@ -1,18 +1,23 @@ 'use strict' -const profile = require('npm-profile') -const npm = require('./npm.js') + +const BB = require('bluebird') + +const ansistyles = require('ansistyles') +const figgyPudding = require('figgy-pudding') +const inspect = require('util').inspect const log = require('npmlog') +const npm = require('./npm.js') +const npmConfig = require('./config/figgy-config.js') +const otplease = require('./utils/otplease.js') const output = require('./utils/output.js') +const profile = require('libnpm/profile') +const pulseTillDone = require('./utils/pulse-till-done.js') +const qrcodeTerminal = require('qrcode-terminal') +const queryString = require('query-string') const qw = require('qw') -const Table = require('cli-table3') -const ansistyles = require('ansistyles') -const Bluebird = require('bluebird') const readUserInfo = require('./utils/read-user-info.js') -const qrcodeTerminal = require('qrcode-terminal') +const Table = require('cli-table3') const url = require('url') -const queryString = require('query-string') -const pulseTillDone = require('./utils/pulse-till-done.js') -const inspect = require('util').inspect module.exports = profileCmd @@ -48,6 +53,13 @@ function withCb (prom, cb) { prom.then((value) => cb(null, value), cb) } +const ProfileOpts = figgyPudding({ + json: {}, + otp: {}, + parseable: {}, + registry: {} +}) + function profileCmd (args, cb) { if (args.length === 0) return cb(new Error(profileCmd.usage)) log.gauge.show('profile') @@ -75,36 +87,13 @@ function profileCmd (args, cb) { } } -function config () { - const conf = { - json: npm.config.get('json'), - parseable: npm.config.get('parseable'), - registry: npm.config.get('registry'), - otp: npm.config.get('otp') - } - const creds = npm.config.getCredentialsByURI(conf.registry) - if (creds.token) { - conf.auth = {token: creds.token} - } else if (creds.username) { - conf.auth = {basic: {username: creds.username, password: creds.password}} - } else if (creds.auth) { - const auth = Buffer.from(creds.auth, 'base64').toString().split(':', 2) - conf.auth = {basic: {username: auth[0], password: auth[1]}} - } else { - conf.auth = {} - } - - if (conf.otp) conf.auth.otp = conf.otp - return conf -} - const knownProfileKeys = qw` name email ${'two-factor auth'} fullname homepage freenode twitter github created updated` function get (args) { const tfa = 'two-factor auth' - const conf = config() + const conf = ProfileOpts(npmConfig()) return pulseTillDone.withPromise(profile.get(conf)).then((info) => { if (!info.cidr_whitelist) delete info.cidr_whitelist if (conf.json) { @@ -150,7 +139,7 @@ const writableProfileKeys = qw` email password fullname homepage freenode twitter github` function set (args) { - const conf = config() + let conf = ProfileOpts(npmConfig()) const prop = (args[0] || '').toLowerCase().trim() let value = args.length > 1 ? args.slice(1).join(' ') : null if (prop !== 'password' && value === null) { @@ -164,7 +153,7 @@ function set (args) { if (writableProfileKeys.indexOf(prop) === -1) { return Promise.reject(Error(`"${prop}" is not a property we can set. Valid properties are: ` + writableProfileKeys.join(', '))) } - return Bluebird.try(() => { + return BB.try(() => { if (prop === 'password') { return readUserInfo.password('Current password: ').then((current) => { return readPasswords().then((newpassword) => { @@ -193,23 +182,18 @@ function set (args) { const newUser = {} writableProfileKeys.forEach((k) => { newUser[k] = user[k] }) newUser[prop] = value - return profile.set(newUser, conf).catch((err) => { - if (err.code !== 'EOTP') throw err - return readUserInfo.otp().then((otp) => { - conf.auth.otp = otp - return profile.set(newUser, conf) + return otplease(conf, conf => profile.set(newUser, conf)) + .then((result) => { + if (conf.json) { + output(JSON.stringify({[prop]: result[prop]}, null, 2)) + } else if (conf.parseable) { + output(prop + '\t' + result[prop]) + } else if (result[prop] != null) { + output('Set', prop, 'to', result[prop]) + } else { + output('Set', prop) + } }) - }).then((result) => { - if (conf.json) { - output(JSON.stringify({[prop]: result[prop]}, null, 2)) - } else if (conf.parseable) { - output(prop + '\t' + result[prop]) - } else if (result[prop] != null) { - output('Set', prop, 'to', result[prop]) - } else { - output('Set', prop) - } - }) })) }) } @@ -225,7 +209,7 @@ function enable2fa (args) { ' auth-only - Require two-factor authentication only when logging in\n' + ' auth-and-writes - Require two-factor authentication when logging in AND when publishing')) } - const conf = config() + const conf = ProfileOpts(npmConfig()) if (conf.json || conf.parseable) { return Promise.reject(new Error( 'Enabling two-factor authentication is an interactive operation and ' + @@ -238,15 +222,18 @@ function enable2fa (args) { } } - return Bluebird.try(() => { + return BB.try(() => { // if they're using legacy auth currently then we have to update them to a // bearer token before continuing. - if (conf.auth.basic) { + const auth = getAuth(conf) + if (auth.basic) { log.info('profile', 'Updating authentication to bearer token') - return profile.login(conf.auth.basic.username, conf.auth.basic.password, conf).then((result) => { + return profile.createToken( + auth.basic.password, false, [], conf + ).then((result) => { if (!result.token) throw new Error('Your registry ' + conf.registry + 'does not seem to support bearer tokens. Bearer tokens are required for two-factor authentication') npm.config.setCredentialsByURI(conf.registry, {token: result.token}) - return Bluebird.fromNode((cb) => npm.config.save('user', cb)) + return BB.fromNode((cb) => npm.config.save('user', cb)) }) } }).then(() => { @@ -295,18 +282,36 @@ function enable2fa (args) { }) } +function getAuth (conf) { + const creds = npm.config.getCredentialsByURI(conf.registry) + let auth + if (creds.token) { + auth = {token: creds.token} + } else if (creds.username) { + auth = {basic: {username: creds.username, password: creds.password}} + } else if (creds.auth) { + const basic = Buffer.from(creds.auth, 'base64').toString().split(':', 2) + auth = {basic: {username: basic[0], password: basic[1]}} + } else { + auth = {} + } + + if (conf.otp) auth.otp = conf.otp + return auth +} + function disable2fa (args) { - const conf = config() + let conf = ProfileOpts(npmConfig()) return pulseTillDone.withPromise(profile.get(conf)).then((info) => { if (!info.tfa || info.tfa.pending) { output('Two factor authentication not enabled.') return } return readUserInfo.password().then((password) => { - return Bluebird.try(() => { - if (conf.auth.otp) return + return BB.try(() => { + if (conf.otp) return return readUserInfo.otp('Enter one-time password from your authenticator: ').then((otp) => { - conf.auth.otp = otp + conf = conf.concat({otp}) }) }).then(() => { log.info('profile', 'disabling tfa') diff --git a/lib/publish.js b/lib/publish.js index 25f2134b1b16d..e81fc1a057454 100644 --- a/lib/publish.js +++ b/lib/publish.js @@ -3,20 +3,20 @@ const BB = require('bluebird') const cacache = require('cacache') -const createReadStream = require('graceful-fs').createReadStream -const getPublishConfig = require('./utils/get-publish-config.js') +const figgyPudding = require('figgy-pudding') +const libpub = require('libnpm/publish') +const libunpub = require('libnpm/unpublish') const lifecycle = BB.promisify(require('./utils/lifecycle.js')) const log = require('npmlog') -const mapToRegistry = require('./utils/map-to-registry.js') -const npa = require('npm-package-arg') -const npm = require('./npm.js') +const npa = require('libnpm/parse-arg') +const npmConfig = require('./config/figgy-config.js') const output = require('./utils/output.js') +const otplease = require('./utils/otplease.js') const pack = require('./pack') -const pacote = require('pacote') -const pacoteOpts = require('./config/pacote') +const { tarball, extract } = require('libnpm') const path = require('path') +const readFileAsync = BB.promisify(require('graceful-fs').readFile) const readJson = BB.promisify(require('read-package-json')) -const readUserInfo = require('./utils/read-user-info.js') const semver = require('semver') const statAsync = BB.promisify(require('graceful-fs').stat) @@ -31,6 +31,16 @@ publish.completion = function (opts, cb) { return cb() } +const PublishConfig = figgyPudding({ + dryRun: 'dry-run', + 'dry-run': { default: false }, + force: { default: false }, + json: { default: false }, + Promise: { default: () => Promise }, + tag: { default: 'latest' }, + tmp: {} +}) + module.exports = publish function publish (args, isRetry, cb) { if (typeof cb !== 'function') { @@ -42,15 +52,16 @@ function publish (args, isRetry, cb) { log.verbose('publish', args) - const t = npm.config.get('tag').trim() + const opts = PublishConfig(npmConfig()) + const t = opts.tag.trim() if (semver.validRange(t)) { return cb(new Error('Tag name must not be a valid SemVer range: ' + t)) } - return publish_(args[0]) + return publish_(args[0], opts) .then((tarball) => { const silent = log.level === 'silent' - if (!silent && npm.config.get('json')) { + if (!silent && opts.json) { output(JSON.stringify(tarball, null, 2)) } else if (!silent) { output(`+ ${tarball.id}`) @@ -59,7 +70,7 @@ function publish (args, isRetry, cb) { .nodeify(cb) } -function publish_ (arg) { +function publish_ (arg, opts) { return statAsync(arg).then((stat) => { if (stat.isDirectory()) { return stat @@ -69,17 +80,17 @@ function publish_ (arg) { throw err } }).then(() => { - return publishFromDirectory(arg) + return publishFromDirectory(arg, opts) }, (err) => { if (err.code !== 'ENOENT' && err.code !== 'ENOTDIR') { throw err } else { - return publishFromPackage(arg) + return publishFromPackage(arg, opts) } }) } -function publishFromDirectory (arg) { +function publishFromDirectory (arg, opts) { // All this readJson is because any of the given scripts might modify the // package.json in question, so we need to refresh after every step. let contents @@ -90,12 +101,12 @@ function publishFromDirectory (arg) { }).then(() => { return readJson(path.join(arg, 'package.json')) }).then((pkg) => { - return cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'fromDir'}, (tmpDir) => { + return cacache.tmp.withTmp(opts.tmp, {tmpPrefix: 'fromDir'}, (tmpDir) => { const target = path.join(tmpDir, 'package.tgz') return pack.packDirectory(pkg, arg, target, null, true) .tap((c) => { contents = c }) - .then((c) => !npm.config.get('json') && pack.logContents(c)) - .then(() => upload(arg, pkg, false, target)) + .then((c) => !opts.json && pack.logContents(c)) + .then(() => upload(pkg, false, target, opts)) }) }).then(() => { return readJson(path.join(arg, 'package.json')) @@ -107,121 +118,50 @@ function publishFromDirectory (arg) { .then(() => contents) } -function publishFromPackage (arg) { - return cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'fromPackage'}, (tmp) => { +function publishFromPackage (arg, opts) { + return cacache.tmp.withTmp(opts.tmp, {tmpPrefix: 'fromPackage'}, tmp => { const extracted = path.join(tmp, 'package') const target = path.join(tmp, 'package.json') - const opts = pacoteOpts() - return pacote.tarball.toFile(arg, target, opts) - .then(() => pacote.extract(arg, extracted, opts)) + return tarball.toFile(arg, target, opts) + .then(() => extract(arg, extracted, opts)) .then(() => readJson(path.join(extracted, 'package.json'))) .then((pkg) => { return BB.resolve(pack.getContents(pkg, target)) - .tap((c) => !npm.config.get('json') && pack.logContents(c)) - .tap(() => upload(arg, pkg, false, target)) + .tap((c) => !opts.json && pack.logContents(c)) + .tap(() => upload(pkg, false, target, opts)) }) }) } -function upload (arg, pkg, isRetry, cached) { - if (!pkg) { - return BB.reject(new Error('no package.json file found')) - } - if (pkg.private) { - return BB.reject(new Error( - 'This package has been marked as private\n' + - "Remove the 'private' field from the package.json to publish it." - )) - } - const mappedConfig = getPublishConfig( - pkg.publishConfig, - npm.config, - npm.registry - ) - const config = mappedConfig.config - const registry = mappedConfig.client - - pkg._npmVersion = npm.version - pkg._nodeVersion = process.versions.node - - delete pkg.modules - - return BB.fromNode((cb) => { - mapToRegistry(pkg.name, config, (err, registryURI, auth, registryBase) => { - if (err) { return cb(err) } - cb(null, [registryURI, auth, registryBase]) - }) - }).spread((registryURI, auth, registryBase) => { - // we just want the base registry URL in this case - log.verbose('publish', 'registryBase', registryBase) - log.silly('publish', 'uploading', cached) - - pkg._npmUser = { - name: auth.username, - email: auth.email - } - - const params = { - metadata: pkg, - body: !npm.config.get('dry-run') && createReadStream(cached), - auth: auth - } - - function closeFile () { - if (!npm.config.get('dry-run')) { - params.body.close() - } - } - - // registry-frontdoor cares about the access level, which is only - // configurable for scoped packages - if (config.get('access')) { - if (!npa(pkg.name).scope && config.get('access') === 'restricted') { - throw new Error("Can't restrict access to unscoped packages.") - } - - params.access = config.get('access') - } - - if (npm.config.get('dry-run')) { - log.verbose('publish', '--dry-run mode enabled. Skipping upload.') - return BB.resolve() - } - - log.showProgress('publish:' + pkg._id) - return BB.fromNode((cb) => { - registry.publish(registryBase, params, cb) - }).catch((err) => { - if ( - err.code === 'EPUBLISHCONFLICT' && - npm.config.get('force') && - !isRetry - ) { - log.warn('publish', 'Forced publish over ' + pkg._id) - return BB.fromNode((cb) => { - npm.commands.unpublish([pkg._id], cb) - }).finally(() => { - // close the file we are trying to upload, we will open it again. - closeFile() - // ignore errors. Use the force. Reach out with your feelings. - return upload(arg, pkg, true, cached).catch(() => { - // but if it fails again, then report the first error. - throw err +function upload (pkg, isRetry, cached, opts) { + if (!opts.dryRun) { + return readFileAsync(cached).then(tarball => { + return otplease(opts, opts => { + return libpub(pkg, tarball, opts) + }).catch(err => { + if ( + err.code === 'EPUBLISHCONFLICT' && + opts.force && + !isRetry + ) { + log.warn('publish', 'Forced publish over ' + pkg._id) + return otplease(opts, opts => libunpub( + npa.resolve(pkg.name, pkg.version), opts + )).finally(() => { + // ignore errors. Use the force. Reach out with your feelings. + return otplease(opts, opts => { + return upload(pkg, true, tarball, opts) + }).catch(() => { + // but if it fails again, then report the first error. + throw err + }) }) - }) - } else { - // close the file we are trying to upload, all attempts to resume will open it again - closeFile() - throw err - } - }) - }).catch((err) => { - if (err.code !== 'EOTP' && !(err.code === 'E401' && /one-time pass/.test(err.message))) throw err - // we prompt on stdout and read answers from stdin, so they need to be ttys. - if (!process.stdin.isTTY || !process.stdout.isTTY) throw err - return readUserInfo.otp().then((otp) => { - npm.config.set('otp', otp) - return upload(arg, pkg, isRetry, cached) + } else { + throw err + } + }) }) - }) + } else { + return opts.Promise.resolve(true) + } } diff --git a/lib/repo.js b/lib/repo.js index d5aa81a6a00eb..b930402aedf95 100644 --- a/lib/repo.js +++ b/lib/repo.js @@ -2,10 +2,10 @@ module.exports = repo repo.usage = 'npm repo []' -var openUrl = require('./utils/open-url') -var hostedGitInfo = require('hosted-git-info') -var url_ = require('url') -var fetchPackageMetadata = require('./fetch-package-metadata.js') +const openUrl = require('./utils/open-url') +const hostedGitInfo = require('hosted-git-info') +const url_ = require('url') +const fetchPackageMetadata = require('./fetch-package-metadata.js') repo.completion = function (opts, cb) { // FIXME: there used to be registry completion here, but it stopped making @@ -14,7 +14,7 @@ repo.completion = function (opts, cb) { } function repo (args, cb) { - var n = args.length ? args[0] : '.' + const n = args.length ? args[0] : '.' fetchPackageMetadata(n, '.', {fullMetadata: true}, function (er, d) { if (er) return cb(er) getUrlAndOpen(d, cb) @@ -22,12 +22,12 @@ function repo (args, cb) { } function getUrlAndOpen (d, cb) { - var r = d.repository + const r = d.repository if (!r) return cb(new Error('no repository')) // XXX remove this when npm@v1.3.10 from node 0.10 is deprecated // from https://github.com/npm/npm-www/issues/418 - var info = hostedGitInfo.fromUrl(r.url) - var url = info ? info.browse() : unknownHostedUrl(r.url) + const info = hostedGitInfo.fromUrl(r.url) + const url = info ? info.browse() : unknownHostedUrl(r.url) if (!url) return cb(new Error('no repository: could not get url')) @@ -36,12 +36,12 @@ function getUrlAndOpen (d, cb) { function unknownHostedUrl (url) { try { - var idx = url.indexOf('@') + const idx = url.indexOf('@') if (idx !== -1) { url = url.slice(idx + 1).replace(/:([^\d]+)/, '/$1') } url = url_.parse(url) - var protocol = url.protocol === 'https:' + const protocol = url.protocol === 'https:' ? 'https:' : 'http:' return protocol + '//' + (url.host || '') + diff --git a/lib/root.js b/lib/root.js index 82a804aff5391..97d76c120f807 100644 --- a/lib/root.js +++ b/lib/root.js @@ -1,7 +1,7 @@ module.exports = root -var npm = require('./npm.js') -var output = require('./utils/output.js') +const npm = require('./npm.js') +const output = require('./utils/output.js') root.usage = 'npm root [-g]' diff --git a/lib/run-script.js b/lib/run-script.js index 3302576311719..476591c051e73 100644 --- a/lib/run-script.js +++ b/lib/run-script.js @@ -136,7 +136,7 @@ function run (pkg, wd, cmd, args, cb) { 'prestart', 'start', 'poststart' ] } else { - if (!pkg.scripts[cmd]) { + if (pkg.scripts[cmd] == null) { if (cmd === 'test') { pkg.scripts.test = 'echo \'Error: no test specified\'' } else if (cmd === 'env') { diff --git a/lib/search.js b/lib/search.js index 3987be135c9ae..3c59f8b43d15b 100644 --- a/lib/search.js +++ b/lib/search.js @@ -2,14 +2,16 @@ module.exports = exports = search -var npm = require('./npm.js') -var allPackageSearch = require('./search/all-package-search') -var esearch = require('./search/esearch.js') -var formatPackageStream = require('./search/format-package-stream.js') -var usage = require('./utils/usage') -var output = require('./utils/output.js') -var log = require('npmlog') -var ms = require('mississippi') +const npm = require('./npm.js') +const allPackageSearch = require('./search/all-package-search') +const figgyPudding = require('figgy-pudding') +const formatPackageStream = require('./search/format-package-stream.js') +const libSearch = require('libnpm/search') +const log = require('npmlog') +const ms = require('mississippi') +const npmConfig = require('./config/figgy-config.js') +const output = require('./utils/output.js') +const usage = require('./utils/usage') search.usage = usage( 'search', @@ -20,46 +22,50 @@ search.completion = function (opts, cb) { cb(null, []) } +const SearchOpts = figgyPudding({ + description: {}, + exclude: {}, + include: {}, + limit: {}, + log: {}, + staleness: {}, + unicode: {} +}) + function search (args, cb) { - var searchOpts = { + const opts = SearchOpts(npmConfig()).concat({ description: npm.config.get('description'), exclude: prepareExcludes(npm.config.get('searchexclude')), include: prepareIncludes(args, npm.config.get('searchopts')), - limit: npm.config.get('searchlimit'), + limit: npm.config.get('searchlimit') || 20, log: log, staleness: npm.config.get('searchstaleness'), unicode: npm.config.get('unicode') - } - - if (searchOpts.include.length === 0) { + }) + if (opts.include.length === 0) { return cb(new Error('search must be called with arguments')) } // Used later to figure out whether we had any packages go out - var anyOutput = false + let anyOutput = false - var entriesStream = ms.through.obj() + const entriesStream = ms.through.obj() - var esearchWritten = false - esearch(searchOpts).on('data', function (pkg) { + let esearchWritten = false + libSearch.stream(opts.include, opts).on('data', pkg => { entriesStream.write(pkg) !esearchWritten && (esearchWritten = true) - }).on('error', function (e) { + }).on('error', err => { if (esearchWritten) { // If esearch errored after already starting output, we can't fall back. - return entriesStream.emit('error', e) + return entriesStream.emit('error', err) } log.warn('search', 'fast search endpoint errored. Using old search.') - allPackageSearch(searchOpts).on('data', function (pkg) { - entriesStream.write(pkg) - }).on('error', function (e) { - entriesStream.emit('error', e) - }).on('end', function () { - entriesStream.end() - }) - }).on('end', function () { - entriesStream.end() - }) + allPackageSearch(opts) + .on('data', pkg => entriesStream.write(pkg)) + .on('error', err => entriesStream.emit('error', err)) + .on('end', () => entriesStream.end()) + }).on('end', () => entriesStream.end()) // Grab a configured output stream that will spit out packages in the // desired format. @@ -71,14 +77,14 @@ function search (args, cb) { parseable: npm.config.get('parseable'), color: npm.color }) - outputStream.on('data', function (chunk) { + outputStream.on('data', chunk => { if (!anyOutput) { anyOutput = true } output(chunk.toString('utf8')) }) log.silly('search', 'searching packages') - ms.pipe(entriesStream, outputStream, function (er) { - if (er) return cb(er) + ms.pipe(entriesStream, outputStream, err => { + if (err) return cb(err) if (!anyOutput && !npm.config.get('json') && !npm.config.get('parseable')) { output('No matches found for ' + (args.map(JSON.stringify).join(' '))) } diff --git a/lib/search/all-package-metadata.js b/lib/search/all-package-metadata.js index 5a27bdbcee658..388b4f61f0496 100644 --- a/lib/search/all-package-metadata.js +++ b/lib/search/all-package-metadata.js @@ -1,21 +1,28 @@ 'use strict' -var fs = require('graceful-fs') -var path = require('path') -var mkdir = require('mkdirp') -var chownr = require('chownr') -var npm = require('../npm.js') -var log = require('npmlog') -var cacheFile = require('npm-cache-filename') -var correctMkdir = require('../utils/correct-mkdir.js') -var mapToRegistry = require('../utils/map-to-registry.js') -var jsonstream = require('JSONStream') -var writeStreamAtomic = require('fs-write-stream-atomic') -var ms = require('mississippi') -var sortedUnionStream = require('sorted-union-stream') -var once = require('once') -var gunzip = require('../utils/gunzip-maybe') +const BB = require('bluebird') +const cacheFile = require('npm-cache-filename') +const chownr = BB.promisify(require('chownr')) +const correctMkdir = BB.promisify(require('../utils/correct-mkdir.js')) +const figgyPudding = require('figgy-pudding') +const fs = require('graceful-fs') +const JSONStream = require('JSONStream') +const log = require('npmlog') +const mkdir = BB.promisify(require('gentle-fs').mkdir) +const ms = require('mississippi') +const npmFetch = require('libnpm/fetch') +const path = require('path') +const sortedUnionStream = require('sorted-union-stream') +const url = require('url') +const writeStreamAtomic = require('fs-write-stream-atomic') + +const statAsync = BB.promisify(fs.stat) + +const APMOpts = figgyPudding({ + cache: {}, + registry: {} +}) // Returns a sorted stream of all package metadata. Internally, takes care of // maintaining its metadata cache and making partial or full remote requests, // according to staleness, validity, etc. @@ -27,63 +34,70 @@ var gunzip = require('../utils/gunzip-maybe') // 4. It must include all entries that exist in the metadata endpoint as of // the value in `_updated` module.exports = allPackageMetadata -function allPackageMetadata (staleness) { - var stream = ms.through.obj() - - mapToRegistry('-/all', npm.config, function (er, uri, auth) { - if (er) return stream.emit('error', er) +function allPackageMetadata (opts) { + const staleness = opts.staleness + const stream = ms.through.obj() - var cacheBase = cacheFile(npm.config.get('cache'))(uri) - var cachePath = path.join(cacheBase, '.cache.json') - - createEntryStream(cachePath, uri, auth, staleness, function (err, entryStream, latest, newEntries) { - if (err) return stream.emit('error', err) - log.silly('all-package-metadata', 'entry stream created') - if (entryStream && newEntries) { - createCacheWriteStream(cachePath, latest, function (err, writeStream) { - if (err) return stream.emit('error', err) - log.silly('all-package-metadata', 'output stream created') - ms.pipeline.obj(entryStream, writeStream, stream) - }) - } else if (entryStream) { - ms.pipeline.obj(entryStream, stream) - } else { - stream.emit('error', new Error('No search sources available')) - } - }) - }) + opts = APMOpts(opts) + const cacheBase = cacheFile(path.resolve(path.dirname(opts.cache)))(url.resolve(opts.registry, '/-/all')) + const cachePath = path.join(cacheBase, '.cache.json') + createEntryStream( + cachePath, staleness, opts + ).then(({entryStream, latest, newEntries}) => { + log.silly('all-package-metadata', 'entry stream created') + if (entryStream && newEntries) { + return createCacheWriteStream(cachePath, latest, opts).then(writer => { + log.silly('all-package-metadata', 'output stream created') + ms.pipeline.obj(entryStream, writer, stream) + }) + } else if (entryStream) { + ms.pipeline.obj(entryStream, stream) + } else { + stream.emit('error', new Error('No search sources available')) + } + }).catch(err => stream.emit('error', err)) return stream } // Creates a stream of the latest available package metadata. // Metadata will come from a combination of the local cache and remote data. module.exports._createEntryStream = createEntryStream -function createEntryStream (cachePath, uri, auth, staleness, cb) { - createCacheEntryStream(cachePath, function (err, cacheStream, cacheLatest) { +function createEntryStream (cachePath, staleness, opts) { + return createCacheEntryStream( + cachePath, opts + ).catch(err => { + log.warn('', 'Failed to read search cache. Rebuilding') + log.silly('all-package-metadata', 'cache read error: ', err) + return {} + }).then(({ + updateStream: cacheStream, + updatedLatest: cacheLatest + }) => { cacheLatest = cacheLatest || 0 - if (err) { - log.warn('', 'Failed to read search cache. Rebuilding') - log.silly('all-package-metadata', 'cache read error: ', err) - } - createEntryUpdateStream(uri, auth, staleness, cacheLatest, function (err, updateStream, updatedLatest) { + return createEntryUpdateStream(staleness, cacheLatest, opts).catch(err => { + log.warn('', 'Search data request failed, search might be stale') + log.silly('all-package-metadata', 'update request error: ', err) + return {} + }).then(({updateStream, updatedLatest}) => { updatedLatest = updatedLatest || 0 - var latest = updatedLatest || cacheLatest + const latest = updatedLatest || cacheLatest if (!cacheStream && !updateStream) { - return cb(new Error('No search sources available')) - } - if (err) { - log.warn('', 'Search data request failed, search might be stale') - log.silly('all-package-metadata', 'update request error: ', err) + throw new Error('No search sources available') } if (cacheStream && updateStream) { // Deduped, unioned, sorted stream from the combination of both. - cb(null, - createMergedStream(cacheStream, updateStream), + return { + entryStream: createMergedStream(cacheStream, updateStream), latest, - !!updatedLatest) + newEntries: !!updatedLatest + } } else { // Either one works if one or the other failed - cb(null, cacheStream || updateStream, latest, !!updatedLatest) + return { + entryStream: cacheStream || updateStream, + latest, + newEntries: !!updatedLatest + } } }) }) @@ -96,66 +110,51 @@ function createEntryStream (cachePath, uri, auth, staleness, cb) { module.exports._createMergedStream = createMergedStream function createMergedStream (a, b) { linkStreams(a, b) - return sortedUnionStream(b, a, function (pkg) { return pkg.name }) + return sortedUnionStream(b, a, ({name}) => name) } // Reads the local index and returns a stream that spits out package data. module.exports._createCacheEntryStream = createCacheEntryStream -function createCacheEntryStream (cacheFile, cb) { +function createCacheEntryStream (cacheFile, opts) { log.verbose('all-package-metadata', 'creating entry stream from local cache') log.verbose('all-package-metadata', cacheFile) - fs.stat(cacheFile, function (err, stat) { - if (err) return cb(err) + return statAsync(cacheFile).then(stat => { // TODO - This isn't very helpful if `cacheFile` is empty or just `{}` - var entryStream = ms.pipeline.obj( + const entryStream = ms.pipeline.obj( fs.createReadStream(cacheFile), - jsonstream.parse('*'), + JSONStream.parse('*'), // I believe this passthrough is necessary cause `jsonstream` returns // weird custom streams that behave funny sometimes. ms.through.obj() ) - extractUpdated(entryStream, 'cached-entry-stream', cb) + return extractUpdated(entryStream, 'cached-entry-stream', opts) }) } // Stream of entry updates from the server. If `latest` is `0`, streams the // entire metadata object from the registry. module.exports._createEntryUpdateStream = createEntryUpdateStream -function createEntryUpdateStream (all, auth, staleness, latest, cb) { +function createEntryUpdateStream (staleness, latest, opts) { log.verbose('all-package-metadata', 'creating remote entry stream') - var params = { - timeout: 600, - follow: true, - staleOk: true, - auth: auth, - streaming: true - } - var partialUpdate = false + let partialUpdate = false + let uri = '/-/all' if (latest && (Date.now() - latest < (staleness * 1000))) { // Skip the request altogether if our `latest` isn't stale. log.verbose('all-package-metadata', 'Local data up to date, skipping update') - return cb(null) + return BB.resolve({}) } else if (latest === 0) { log.warn('', 'Building the local index for the first time, please be patient') log.verbose('all-package-metadata', 'No cached data: requesting full metadata db') } else { log.verbose('all-package-metadata', 'Cached data present with timestamp:', latest, 'requesting partial index update') - all += '/since?stale=update_after&startkey=' + latest + uri += '/since?stale=update_after&startkey=' + latest partialUpdate = true } - npm.registry.request(all, params, function (er, res) { - if (er) return cb(er) + return npmFetch(uri, opts).then(res => { log.silly('all-package-metadata', 'request stream opened, code:', res.statusCode) - // NOTE - The stream returned by `request` seems to be very persnickety - // and this is almost a magic incantation to get it to work. - // Modify how `res` is used here at your own risk. - var entryStream = ms.pipeline.obj( - res, - ms.through(function (chunk, enc, cb) { - cb(null, chunk) - }), - gunzip(), - jsonstream.parse('*', function (pkg, key) { + let entryStream = ms.pipeline.obj( + res.body, + JSONStream.parse('*', (pkg, key) => { if (key[0] === '_updated' || key[0][0] !== '_') { return pkg } @@ -164,9 +163,12 @@ function createEntryUpdateStream (all, auth, staleness, latest, cb) { if (partialUpdate) { // The `/all/since` endpoint doesn't return `_updated`, so we // just use the request's own timestamp. - cb(null, entryStream, Date.parse(res.headers.date)) + return { + updateStream: entryStream, + updatedLatest: Date.parse(res.headers.get('date')) + } } else { - extractUpdated(entryStream, 'entry-update-stream', cb) + return extractUpdated(entryStream, 'entry-update-stream', opts) } }) } @@ -175,36 +177,37 @@ function createEntryUpdateStream (all, auth, staleness, latest, cb) { // first returned entries. This is the "latest" unix timestamp for the metadata // in question. This code does a bit of juggling with the data streams // so that we can pretend that field doesn't exist, but still extract `latest` -function extractUpdated (entryStream, label, cb) { - cb = once(cb) +function extractUpdated (entryStream, label, opts) { log.silly('all-package-metadata', 'extracting latest') - function nope (msg) { - return function () { - log.warn('all-package-metadata', label, msg) - entryStream.removeAllListeners() - entryStream.destroy() - cb(new Error(msg)) - } - } - var onErr = nope('Failed to read stream') - var onEnd = nope('Empty or invalid stream') - entryStream.on('error', onErr) - entryStream.on('end', onEnd) - entryStream.once('data', function (latest) { - log.silly('all-package-metadata', 'got first stream entry for', label, latest) - entryStream.removeListener('error', onErr) - entryStream.removeListener('end', onEnd) - // Because `.once()` unpauses the stream, we re-pause it after the first - // entry so we don't vomit entries into the void. - entryStream.pause() - if (typeof latest === 'number') { - // The extra pipeline is to return a stream that will implicitly unpause - // after having an `.on('data')` listener attached, since using this - // `data` event broke its initial state. - cb(null, ms.pipeline.obj(entryStream, ms.through.obj()), latest) - } else { - cb(new Error('expected first entry to be _updated')) + return new BB((resolve, reject) => { + function nope (msg) { + return function () { + log.warn('all-package-metadata', label, msg) + entryStream.removeAllListeners() + entryStream.destroy() + reject(new Error(msg)) + } } + const onErr = nope('Failed to read stream') + const onEnd = nope('Empty or invalid stream') + entryStream.on('error', onErr) + entryStream.on('end', onEnd) + entryStream.once('data', latest => { + log.silly('all-package-metadata', 'got first stream entry for', label, latest) + entryStream.removeListener('error', onErr) + entryStream.removeListener('end', onEnd) + if (typeof latest === 'number') { + // The extra pipeline is to return a stream that will implicitly unpause + // after having an `.on('data')` listener attached, since using this + // `data` event broke its initial state. + resolve({ + updateStream: entryStream.pipe(ms.through.obj()), + updatedLatest: latest + }) + } else { + reject(new Error('expected first entry to be _updated')) + } + }) }) } @@ -213,44 +216,55 @@ function extractUpdated (entryStream, label, cb) { // The stream is also passthrough, so entries going through it will also // be output from it. module.exports._createCacheWriteStream = createCacheWriteStream -function createCacheWriteStream (cacheFile, latest, cb) { - _ensureCacheDirExists(cacheFile, function (err) { - if (err) return cb(err) +function createCacheWriteStream (cacheFile, latest, opts) { + return _ensureCacheDirExists(cacheFile, opts).then(({uid, gid}) => { log.silly('all-package-metadata', 'creating output stream') - var outStream = _createCacheOutStream() - var cacheFileStream = writeStreamAtomic(cacheFile) - var inputStream = _createCacheInStream(cacheFileStream, outStream, latest) + const outStream = _createCacheOutStream() + const cacheFileStream = writeStreamAtomic(cacheFile) + const inputStream = _createCacheInStream( + cacheFileStream, outStream, latest + ) // Glue together the various streams so they fail together. // `cacheFileStream` errors are already handled by the `inputStream` // pipeline - var errEmitted = false - linkStreams(inputStream, outStream, function () { errEmitted = true }) + let errEmitted = false + linkStreams(inputStream, outStream, () => { errEmitted = true }) - cacheFileStream.on('close', function () { !errEmitted && outStream.end() }) + cacheFileStream.on('close', () => { + if (!errEmitted) { + if (typeof uid === 'number' && + typeof gid === 'number' && + process.getuid && + process.getgid && + (process.getuid() !== uid || process.getgid() !== gid)) { + chownr.sync(cacheFile, uid, gid) + } + outStream.end() + } + }) - cb(null, ms.duplex.obj(inputStream, outStream)) + return ms.duplex.obj(inputStream, outStream) }) } -function _ensureCacheDirExists (cacheFile, cb) { +// return the {uid,gid} that the cache should have +function _ensureCacheDirExists (cacheFile, opts) { var cacheBase = path.dirname(cacheFile) log.silly('all-package-metadata', 'making sure cache dir exists at', cacheBase) - correctMkdir(npm.cache, function (er, st) { - if (er) return cb(er) - mkdir(cacheBase, function (er, made) { - if (er) return cb(er) - chownr(made || cacheBase, st.uid, st.gid, cb) - }) + return correctMkdir(opts.cache).then(st => { + return mkdir(cacheBase).then(made => { + return chownr(made || cacheBase, st.uid, st.gid) + }).then(() => ({ uid: st.uid, gid: st.gid })) }) } function _createCacheOutStream () { + // NOTE: this looks goofy, but it's necessary in order to get + // JSONStream to play nice with the rest of everything. return ms.pipeline.obj( - // These two passthrough `through` streams compensate for some - // odd behavior with `jsonstream`. ms.through(), - jsonstream.parse('*', function (obj, key) { + JSONStream.parse('*', (obj, key) => { // This stream happens to get _updated passed through it, for // implementation reasons. We make sure to filter it out cause // the fact that it comes t @@ -263,9 +277,9 @@ function _createCacheOutStream () { } function _createCacheInStream (writer, outStream, latest) { - var updatedWritten = false - var inStream = ms.pipeline.obj( - ms.through.obj(function (pkg, enc, cb) { + let updatedWritten = false + const inStream = ms.pipeline.obj( + ms.through.obj((pkg, enc, cb) => { if (!updatedWritten && typeof pkg === 'number') { // This is the `_updated` value getting sent through. updatedWritten = true @@ -277,13 +291,11 @@ function _createCacheInStream (writer, outStream, latest) { cb(null, [pkg.name, pkg]) } }), - jsonstream.stringifyObject('{', ',', '}'), - ms.through(function (chunk, enc, cb) { + JSONStream.stringifyObject('{', ',', '}'), + ms.through((chunk, enc, cb) => { // This tees off the buffer data to `outStream`, and then continues // the pipeline as usual - outStream.write(chunk, enc, function () { - cb(null, chunk) - }) + outStream.write(chunk, enc, () => cb(null, chunk)) }), // And finally, we write to the cache file. writer @@ -300,14 +312,14 @@ function linkStreams (a, b, cb) { if (err !== lastError) { lastError = err b.emit('error', err) - cb(err) + cb && cb(err) } }) b.on('error', function (err) { if (err !== lastError) { lastError = err a.emit('error', err) - cb(err) + cb && cb(err) } }) } diff --git a/lib/search/all-package-search.js b/lib/search/all-package-search.js index 7a893d517b82c..fef343bcbc3ba 100644 --- a/lib/search/all-package-search.js +++ b/lib/search/all-package-search.js @@ -8,7 +8,7 @@ function allPackageSearch (opts) { // Get a stream with *all* the packages. This takes care of dealing // with the local cache as well, but that's an internal detail. - var allEntriesStream = allPackageMetadata(opts.staleness) + var allEntriesStream = allPackageMetadata(opts) // Grab a stream that filters those packages according to given params. var filterStream = streamFilter(function (pkg) { diff --git a/lib/search/esearch.js b/lib/search/esearch.js deleted file mode 100644 index f4beb7ade66b1..0000000000000 --- a/lib/search/esearch.js +++ /dev/null @@ -1,64 +0,0 @@ -'use strict' - -var npm = require('../npm.js') -var log = require('npmlog') -var mapToRegistry = require('../utils/map-to-registry.js') -var jsonstream = require('JSONStream') -var ms = require('mississippi') -var gunzip = require('../utils/gunzip-maybe') - -module.exports = esearch - -function esearch (opts) { - var stream = ms.through.obj() - - mapToRegistry('-/v1/search', npm.config, function (er, uri, auth) { - if (er) return stream.emit('error', er) - createResultStream(uri, auth, opts, function (err, resultStream) { - if (err) return stream.emit('error', err) - ms.pipeline.obj(resultStream, stream) - }) - }) - return stream -} - -function createResultStream (uri, auth, opts, cb) { - log.verbose('esearch', 'creating remote entry stream') - var params = { - timeout: 600, - follow: true, - staleOk: true, - auth: auth, - streaming: true - } - var q = buildQuery(opts) - npm.registry.request(uri + '?text=' + encodeURIComponent(q) + '&size=' + opts.limit, params, function (err, res) { - if (err) return cb(err) - log.silly('esearch', 'request stream opened, code:', res.statusCode) - // NOTE - The stream returned by `request` seems to be very persnickety - // and this is almost a magic incantation to get it to work. - // Modify how `res` is used here at your own risk. - var entryStream = ms.pipeline.obj( - res, - ms.through(function (chunk, enc, cb) { - cb(null, chunk) - }), - gunzip(), - jsonstream.parse('objects.*.package', function (data) { - return { - name: data.name, - description: data.description, - maintainers: data.maintainers, - keywords: data.keywords, - version: data.version, - date: data.date ? new Date(data.date) : null - } - }) - ) - return cb(null, entryStream) - }) -} - -function buildQuery (opts) { - return opts.include.join(' ') -} diff --git a/lib/shrinkwrap.js b/lib/shrinkwrap.js index 90a4426523cab..5428e7255b81d 100644 --- a/lib/shrinkwrap.js +++ b/lib/shrinkwrap.js @@ -25,6 +25,13 @@ const writeFileAtomic = require('write-file-atomic') const unixFormatPath = require('./utils/unix-format-path.js') const isRegistry = require('./utils/is-registry.js') +const { chown } = require('fs') +const inferOwner = require('infer-owner') +const selfOwner = { + uid: process.getuid && process.getuid(), + gid: process.getgid && process.getgid() +} + const PKGLOCK = 'package-lock.json' const SHRINKWRAP = 'npm-shrinkwrap.json' const PKGLOCK_VERSION = npm.lockfileVersion @@ -110,17 +117,18 @@ function shrinkwrapDeps (deps, top, tree, seen) { var childIsOnlyDev = isOnlyDev(child) var pkginfo = deps[moduleName(child)] = {} var requested = getRequested(child) || child.package._requested || {} + var linked = child.isLink || child.isInLink pkginfo.version = childVersion(top, child, requested) if (requested.type === 'git' && child.package._from) { pkginfo.from = child.package._from } - if (child.fromBundle || child.isInLink) { + if (child.fromBundle && !linked) { pkginfo.bundled = true } else { if (isRegistry(requested)) { pkginfo.resolved = child.package._resolved } - // no integrity for git deps as integirty hashes are based on the + // no integrity for git deps as integrity hashes are based on the // tarball and we can't (yet) create consistent tarballs from a stable // source. if (requested.type !== 'git') { @@ -139,6 +147,7 @@ function shrinkwrapDeps (deps, top, tree, seen) { pkginfo.requires[moduleName(required)] = childRequested(top, required, requested) }) } + // iterate into children on non-links and links contained within the top level package if (child.children.length) { pkginfo.dependencies = {} shrinkwrapDeps(pkginfo.dependencies, top, child, seen) @@ -159,6 +168,8 @@ function childVersion (top, child, req) { return 'file:' + unixFormatPath(path.relative(top.path, child.package._resolved || req.fetchSpec)) } else if (!isRegistry(req) && !child.fromBundle) { return child.package._resolved || req.saveSpec || req.rawSpec + } else if (req.type === 'alias') { + return `npm:${child.package.name}@${child.package.version}` } else { return child.package.version } @@ -167,6 +178,8 @@ function childVersion (top, child, req) { function childRequested (top, child, requested) { if (requested.type === 'directory' || requested.type === 'file') { return 'file:' + unixFormatPath(path.relative(top.path, child.package._resolved || requested.fetchSpec)) + } else if (requested.type === 'git' && child.package._from) { + return child.package._from } else if (!isRegistry(requested) && !child.fromBundle) { return child.package._resolved || requested.saveSpec || requested.rawSpec } else if (requested.type === 'tag') { @@ -211,13 +224,19 @@ function save (dir, pkginfo, opts, cb) { log.verbose('shrinkwrap', `skipping write for ${path.basename(info.path)} because there were no changes.`) cb(null, pkginfo) } else { - writeFileAtomic(info.path, swdata, (err) => { - if (err) return cb(err) - if (opts.silent) return cb(null, pkginfo) - if (!shrinkwrap && !lockfile) { - log.notice('', `created a lockfile as ${path.basename(info.path)}. You should commit this file.`) - } - cb(null, pkginfo) + inferOwner(info.path).then(owner => { + writeFileAtomic(info.path, swdata, (err) => { + if (err) return cb(err) + if (opts.silent) return cb(null, pkginfo) + if (!shrinkwrap && !lockfile) { + log.notice('', `created a lockfile as ${path.basename(info.path)}. You should commit this file.`) + } + if (selfOwner.uid === 0 && (selfOwner.uid !== owner.uid || selfOwner.gid !== owner.gid)) { + chown(info.path, owner.uid, owner.gid, er => cb(er, pkginfo)) + } else { + cb(null, pkginfo) + } + }) }) } } @@ -263,11 +282,15 @@ function checkPackageFile (dir, name) { return readFile( file, 'utf8' ).then((data) => { + const format = npm.config.get('format-package-lock') !== false + const indent = format ? detectIndent(data).indent : 0 + const newline = format ? detectNewline(data) : 0 + return { path: file, raw: data, - indent: detectIndent(data).indent, - newline: detectNewline(data) + indent, + newline } }).catch({code: 'ENOENT'}, () => {}) } diff --git a/lib/star.js b/lib/star.js index f19cb4b07bebb..44a762b15c0c0 100644 --- a/lib/star.js +++ b/lib/star.js @@ -1,11 +1,20 @@ -module.exports = star +'use strict' + +const BB = require('bluebird') + +const fetch = require('libnpm/fetch') +const figgyPudding = require('figgy-pudding') +const log = require('npmlog') +const npa = require('libnpm/parse-arg') +const npm = require('./npm.js') +const npmConfig = require('./config/figgy-config.js') +const output = require('./utils/output.js') +const usage = require('./utils/usage.js') +const whoami = require('./whoami.js') -var npm = require('./npm.js') -var log = require('npmlog') -var asyncMap = require('slide').asyncMap -var mapToRegistry = require('./utils/map-to-registry.js') -var usage = require('./utils/usage') -var output = require('./utils/output.js') +const StarConfig = figgyPudding({ + 'unicode': {} +}) star.usage = usage( 'star', @@ -19,27 +28,50 @@ star.completion = function (opts, cb) { cb() } +module.exports = star function star (args, cb) { - if (!args.length) return cb(star.usage) - var s = npm.config.get('unicode') ? '\u2605 ' : '(*)' - var u = npm.config.get('unicode') ? '\u2606 ' : '( )' - var using = !(npm.command.match(/^un/)) - if (!using) s = u - asyncMap(args, function (pkg, cb) { - mapToRegistry(pkg, npm.config, function (er, uri, auth) { - if (er) return cb(er) + const opts = StarConfig(npmConfig()) + return BB.try(() => { + if (!args.length) throw new Error(star.usage) + let s = opts.unicode ? '\u2605 ' : '(*)' + const u = opts.unicode ? '\u2606 ' : '( )' + const using = !(npm.command.match(/^un/)) + if (!using) s = u + return BB.map(args.map(npa), pkg => { + return BB.all([ + whoami([pkg], true, () => {}), + fetch.json(pkg.escapedName, opts.concat({ + spec: pkg, + query: {write: true}, + 'prefer-online': true + })) + ]).then(([username, fullData]) => { + if (!username) { throw new Error('You need to be logged in!') } + const body = { + _id: fullData._id, + _rev: fullData._rev, + users: fullData.users || {} + } - var params = { - starred: using, - auth: auth - } - npm.registry.star(uri, params, function (er, data, raw, req) { - if (!er) { - output(s + ' ' + pkg) - log.verbose('star', data) + if (using) { + log.info('star', 'starring', body._id) + body.users[username] = true + log.verbose('star', 'starring', body) + } else { + delete body.users[username] + log.info('star', 'unstarring', body._id) + log.verbose('star', 'unstarring', body) } - cb(er, data, raw, req) + return fetch.json(pkg.escapedName, opts.concat({ + spec: pkg, + method: 'PUT', + body + })) + }).then(data => { + output(s + ' ' + pkg.name) + log.verbose('star', data) + return data }) }) - }, cb) + }).nodeify(cb) } diff --git a/lib/stars.js b/lib/stars.js index 4771079356a17..8b28baffd8c9a 100644 --- a/lib/stars.js +++ b/lib/stars.js @@ -1,47 +1,37 @@ -module.exports = stars - -stars.usage = 'npm stars []' - -var npm = require('./npm.js') -var log = require('npmlog') -var mapToRegistry = require('./utils/map-to-registry.js') -var output = require('./utils/output.js') +'use strict' -function stars (args, cb) { - npm.commands.whoami([], true, function (er, username) { - var name = args.length === 1 ? args[0] : username +const BB = require('bluebird') - if (er) { - if (er.code === 'ENEEDAUTH' && !name) { - var needAuth = new Error("'npm stars' on your own user account requires auth") - needAuth.code = 'ENEEDAUTH' - return cb(needAuth) - } - - if (er.code !== 'ENEEDAUTH') return cb(er) - } +const npmConfig = require('./config/figgy-config.js') +const fetch = require('libnpm/fetch') +const log = require('npmlog') +const output = require('./utils/output.js') +const whoami = require('./whoami.js') - mapToRegistry('', npm.config, function (er, uri, auth) { - if (er) return cb(er) +stars.usage = 'npm stars []' - var params = { - username: name, - auth: auth +module.exports = stars +function stars ([user], cb) { + const opts = npmConfig() + return BB.try(() => { + return (user ? BB.resolve(user) : whoami([], true, () => {})).then(usr => { + return fetch.json('/-/_view/starredByUser', opts.concat({ + query: {key: `"${usr}"`} // WHY. WHY THE ""?! + })) + }).then(data => data.rows).then(stars => { + if (stars.length === 0) { + log.warn('stars', 'user has not starred any packages.') + } else { + stars.forEach(s => output(s.value)) } - npm.registry.stars(uri, params, showstars) }) - }) - - function showstars (er, data) { - if (er) return cb(er) - - if (data.rows.length === 0) { - log.warn('stars', 'user has not starred any packages.') - } else { - data.rows.forEach(function (a) { - output(a.value) + }).catch(err => { + if (err.code === 'ENEEDAUTH') { + throw Object.assign(new Error("'npm stars' on your own user account requires auth"), { + code: 'ENEEDAUTH' }) + } else { + throw err } - cb() - } + }).nodeify(cb) } diff --git a/lib/team.js b/lib/team.js index 2d9e61cd4384b..2b56e3b14f95b 100644 --- a/lib/team.js +++ b/lib/team.js @@ -1,19 +1,37 @@ /* eslint-disable standard/no-callback-literal */ -var mapToRegistry = require('./utils/map-to-registry.js') -var npm = require('./npm') -var output = require('./utils/output.js') + +const columns = require('cli-columns') +const figgyPudding = require('figgy-pudding') +const libteam = require('libnpm/team') +const npmConfig = require('./config/figgy-config.js') +const output = require('./utils/output.js') +const otplease = require('./utils/otplease.js') +const usage = require('./utils/usage') module.exports = team team.subcommands = ['create', 'destroy', 'add', 'rm', 'ls', 'edit'] -team.usage = +team.usage = usage( + 'team', 'npm team create \n' + 'npm team destroy \n' + 'npm team add \n' + 'npm team rm \n' + 'npm team ls |\n' + 'npm team edit ' +) + +const TeamConfig = figgyPudding({ + json: {}, + loglevel: {}, + parseable: {}, + silent: {} +}) + +function UsageError () { + throw Object.assign(new Error(team.usage), {code: 'EUSAGE'}) +} team.completion = function (opts, cb) { var argv = opts.conf.argv.remain @@ -33,24 +51,121 @@ team.completion = function (opts, cb) { } } -function team (args, cb) { +function team ([cmd, entity = '', user = ''], cb) { // Entities are in the format : - var cmd = args.shift() - var entity = (args.shift() || '').split(':') - return mapToRegistry('/', npm.config, function (err, uri, auth) { - if (err) { return cb(err) } - try { - return npm.registry.team(cmd, uri, { - auth: auth, - scope: entity[0].replace(/^@/, ''), // '@' prefix on scope is optional. - team: entity[1], - user: args.shift() - }, function (err, data) { - !err && data && output(JSON.stringify(data, undefined, 2)) - cb(err, data) - }) - } catch (e) { - cb(e.message + '\n\nUsage:\n' + team.usage) + otplease(npmConfig(), opts => { + opts = TeamConfig(opts).concat({description: null}) + entity = entity.replace(/^@/, '') + switch (cmd) { + case 'create': return teamCreate(entity, opts) + case 'destroy': return teamDestroy(entity, opts) + case 'add': return teamAdd(entity, user, opts) + case 'rm': return teamRm(entity, user, opts) + case 'ls': { + const match = entity.match(/[^:]+:.+/) + if (match) { + return teamListUsers(entity, opts) + } else { + return teamListTeams(entity, opts) + } + } + case 'edit': + throw new Error('`npm team edit` is not implemented yet.') + default: + UsageError() + } + }).then( + data => cb(null, data), + err => err.code === 'EUSAGE' ? cb(err.message) : cb(err) + ) +} + +function teamCreate (entity, opts) { + return libteam.create(entity, opts).then(() => { + if (opts.json) { + output(JSON.stringify({ + created: true, + team: entity + })) + } else if (opts.parseable) { + output(`${entity}\tcreated`) + } else if (!opts.silent && opts.loglevel !== 'silent') { + output(`+@${entity}`) + } + }) +} + +function teamDestroy (entity, opts) { + return libteam.destroy(entity, opts).then(() => { + if (opts.json) { + output(JSON.stringify({ + deleted: true, + team: entity + })) + } else if (opts.parseable) { + output(`${entity}\tdeleted`) + } else if (!opts.silent && opts.loglevel !== 'silent') { + output(`-@${entity}`) + } + }) +} + +function teamAdd (entity, user, opts) { + return libteam.add(user, entity, opts).then(() => { + if (opts.json) { + output(JSON.stringify({ + added: true, + team: entity, + user + })) + } else if (opts.parseable) { + output(`${user}\t${entity}\tadded`) + } else if (!opts.silent && opts.loglevel !== 'silent') { + output(`${user} added to @${entity}`) + } + }) +} + +function teamRm (entity, user, opts) { + return libteam.rm(user, entity, opts).then(() => { + if (opts.json) { + output(JSON.stringify({ + removed: true, + team: entity, + user + })) + } else if (opts.parseable) { + output(`${user}\t${entity}\tremoved`) + } else if (!opts.silent && opts.loglevel !== 'silent') { + output(`${user} removed from @${entity}`) + } + }) +} + +function teamListUsers (entity, opts) { + return libteam.lsUsers(entity, opts).then(users => { + users = users.sort() + if (opts.json) { + output(JSON.stringify(users, null, 2)) + } else if (opts.parseable) { + output(users.join('\n')) + } else if (!opts.silent && opts.loglevel !== 'silent') { + output(`\n@${entity} has ${users.length} user${users.length === 1 ? '' : 's'}:\n`) + output(columns(users, {padding: 1})) + } + }) +} + +function teamListTeams (entity, opts) { + return libteam.lsTeams(entity, opts).then(teams => { + teams = teams.sort() + if (opts.json) { + output(JSON.stringify(teams, null, 2)) + } else if (opts.parseable) { + output(teams.join('\n')) + } else if (!opts.silent && opts.loglevel !== 'silent') { + output(`\n@${entity} has ${teams.length} team${teams.length === 1 ? '' : 's'}:\n`) + output(columns(teams.map(t => `@${t}`), {padding: 1})) } }) } diff --git a/lib/token.js b/lib/token.js index d442d37eb806b..96a05e45664f4 100644 --- a/lib/token.js +++ b/lib/token.js @@ -1,7 +1,11 @@ 'use strict' -const profile = require('npm-profile') + +const profile = require('libnpm/profile') const npm = require('./npm.js') +const figgyPudding = require('figgy-pudding') +const npmConfig = require('./config/figgy-config.js') const output = require('./utils/output.js') +const otplease = require('./utils/otplease.js') const Table = require('cli-table3') const Bluebird = require('bluebird') const isCidrV4 = require('is-cidr').v4 @@ -76,23 +80,44 @@ function generateTokenIds (tokens, minLength) { return byId } +const TokenConfig = figgyPudding({ + auth: {}, + registry: {}, + otp: {}, + cidr: {}, + 'read-only': {}, + json: {}, + parseable: {} +}) + function config () { - const conf = { - json: npm.config.get('json'), - parseable: npm.config.get('parseable'), - registry: npm.config.get('registry'), - otp: npm.config.get('otp') - } + let conf = TokenConfig(npmConfig()) const creds = npm.config.getCredentialsByURI(conf.registry) if (creds.token) { - conf.auth = {token: creds.token} + conf = conf.concat({ + auth: { token: creds.token } + }) } else if (creds.username) { - conf.auth = {basic: {username: creds.username, password: creds.password}} + conf = conf.concat({ + auth: { + basic: { + username: creds.username, + password: creds.password + } + } + }) } else if (creds.auth) { const auth = Buffer.from(creds.auth, 'base64').toString().split(':', 2) - conf.auth = {basic: {username: auth[0], password: auth[1]}} + conf = conf.concat({ + auth: { + basic: { + username: auth[0], + password: auth[1] + } + } + }) } else { - conf.auth = {} + conf = conf.concat({ auth: {} }) } if (conf.otp) conf.auth.otp = conf.otp return conf @@ -161,13 +186,8 @@ function rm (args) { } }) return Bluebird.map(toRemove, (key) => { - return profile.removeToken(key, conf).catch((ex) => { - if (ex.code !== 'EOTP') throw ex - log.info('token', 'failed because revoking this token requires OTP') - return readUserInfo.otp().then((otp) => { - conf.auth.otp = otp - return profile.removeToken(key, conf) - }) + return otplease(conf, conf => { + return profile.removeToken(key, conf) }) }) })).then(() => { @@ -183,21 +203,15 @@ function rm (args) { function create (args) { const conf = config() - const cidr = npm.config.get('cidr') - const readonly = npm.config.get('read-only') + const cidr = conf.cidr + const readonly = conf['read-only'] const validCIDR = validateCIDRList(cidr) return readUserInfo.password().then((password) => { log.info('token', 'creating') - return profile.createToken(password, readonly, validCIDR, conf).catch((ex) => { - if (ex.code !== 'EOTP') throw ex - log.info('token', 'failed because it requires OTP') - return readUserInfo.otp().then((otp) => { - conf.auth.otp = otp - log.info('token', 'creating with OTP') - return pulseTillDone.withPromise(profile.createToken(password, readonly, validCIDR, conf)) - }) - }) + return pulseTillDone.withPromise(otplease(conf, conf => { + return profile.createToken(password, readonly, validCIDR, conf) + })) }).then((result) => { delete result.key delete result.updated diff --git a/lib/unbuild.js b/lib/unbuild.js index d527778e92b07..3e8d3e4f1f3ed 100644 --- a/lib/unbuild.js +++ b/lib/unbuild.js @@ -2,17 +2,17 @@ module.exports = unbuild module.exports.rmStuff = rmStuff unbuild.usage = 'npm unbuild \n(this is plumbing)' -var readJson = require('read-package-json') -var gentlyRm = require('./utils/gently-rm.js') -var npm = require('./npm.js') -var path = require('path') -var isInside = require('path-is-inside') -var lifecycle = require('./utils/lifecycle.js') -var asyncMap = require('slide').asyncMap -var chain = require('slide').chain -var log = require('npmlog') -var build = require('./build.js') -var output = require('./utils/output.js') +const readJson = require('read-package-json') +const gentlyRm = require('./utils/gently-rm.js') +const npm = require('./npm.js') +const path = require('path') +const isInside = require('path-is-inside') +const lifecycle = require('./utils/lifecycle.js') +const asyncMap = require('slide').asyncMap +const chain = require('slide').chain +const log = require('npmlog') +const build = require('./build.js') +const output = require('./utils/output.js') // args is a list of folders. // remove any bins/etc, and then delete the folder. @@ -30,7 +30,7 @@ function unbuild_ (silent) { cb_(er, path.relative(npm.root, folder)) } folder = path.resolve(folder) - var base = isInside(folder, npm.prefix) ? npm.prefix : folder + const base = isInside(folder, npm.prefix) ? npm.prefix : folder delete build._didBuild[folder] log.verbose('unbuild', folder.substr(npm.prefix.length + 1)) readJson(path.resolve(folder, 'package.json'), function (er, pkg) { @@ -58,11 +58,13 @@ function rmStuff (pkg, folder, cb) { // if it's global, and folder is in {prefix}/node_modules, // then bins are in {prefix}/bin // otherwise, then bins are in folder/../.bin - var parent = pkg.name[0] === '@' ? path.dirname(path.dirname(folder)) : path.dirname(folder) - var gnm = npm.dir + const dir = path.dirname(folder) + const scope = path.basename(dir) + const parent = scope.charAt(0) === '@' ? path.dirname(dir) : dir + const gnm = npm.dir // gnm might be an absolute path, parent might be relative // this checks they're the same directory regardless - var top = path.relative(gnm, parent) === '' + const top = path.relative(gnm, parent) === '' log.verbose('unbuild rmStuff', pkg._id, 'from', gnm) if (!top) log.verbose('unbuild rmStuff', 'in', parent) @@ -73,11 +75,14 @@ function rmStuff (pkg, folder, cb) { function rmBins (pkg, folder, parent, top, cb) { if (!pkg.bin) return cb() - var binRoot = top ? npm.bin : path.resolve(parent, '.bin') + const binRoot = top ? npm.bin : path.resolve(parent, '.bin') asyncMap(Object.keys(pkg.bin), function (b, cb) { if (process.platform === 'win32') { - chain([ [gentlyRm, path.resolve(binRoot, b) + '.cmd', true, folder], - [gentlyRm, path.resolve(binRoot, b), true, folder] ], cb) + chain([ + [gentlyRm, path.resolve(binRoot, b) + '.ps1', true, folder], + [gentlyRm, path.resolve(binRoot, b) + '.cmd', true, folder], + [gentlyRm, path.resolve(binRoot, b), true, folder] + ], cb) } else { gentlyRm(path.resolve(binRoot, b), true, folder, cb) } @@ -96,7 +101,7 @@ function rmMans (pkg, folder, parent, top, cb) { !npm.config.get('global')) { return cb() } - var manRoot = path.resolve(npm.config.get('prefix'), 'share', 'man') + const manRoot = path.resolve(npm.config.get('prefix'), 'share', 'man') log.verbose('rmMans', 'man files are', pkg.man, 'in', manRoot) asyncMap(pkg.man, function (man, cb) { if (Array.isArray(man)) { @@ -107,7 +112,7 @@ function rmMans (pkg, folder, parent, top, cb) { function rmMan (man) { log.silly('rmMan', 'preparing to remove', man) - var parseMan = man.match(/(.*\.([0-9]+)(\.gz)?)$/) + const parseMan = man.match(/(.*\.([0-9]+)(\.gz)?)$/) if (!parseMan) { log.error( 'rmMan', man, 'is not a valid name for a man file.', @@ -117,11 +122,11 @@ function rmMans (pkg, folder, parent, top, cb) { return cb() } - var stem = parseMan[1] - var sxn = parseMan[2] - var gz = parseMan[3] || '' - var bn = path.basename(stem) - var manDest = path.join( + const stem = parseMan[1] + const sxn = parseMan[2] + const gz = parseMan[3] || '' + const bn = path.basename(stem) + const manDest = path.join( manRoot, 'man' + sxn, (bn.indexOf(pkg.name) === 0 ? bn : pkg.name + '-' + bn) + '.' + sxn + gz diff --git a/lib/unpublish.js b/lib/unpublish.js index c2e9edd8006f5..59d7d1b5887d2 100644 --- a/lib/unpublish.js +++ b/lib/unpublish.js @@ -1,119 +1,114 @@ /* eslint-disable standard/no-callback-literal */ +'use strict' module.exports = unpublish -var log = require('npmlog') -var npm = require('./npm.js') -var readJson = require('read-package-json') -var path = require('path') -var mapToRegistry = require('./utils/map-to-registry.js') -var npa = require('npm-package-arg') -var getPublishConfig = require('./utils/get-publish-config.js') -var output = require('./utils/output.js') - -unpublish.usage = 'npm unpublish [<@scope>/][@]' - -unpublish.completion = function (opts, cb) { - if (opts.conf.argv.remain.length >= 3) return cb() - npm.commands.whoami([], true, function (er, username) { - if (er) return cb() - - var un = encodeURIComponent(username) - if (!un) return cb() - var byUser = '-/by-user/' + un - mapToRegistry(byUser, npm.config, function (er, uri, auth) { - if (er) return cb(er) - - npm.registry.get(uri, { auth: auth }, function (er, pkgs) { - // do a bit of filtering at this point, so that we don't need - // to fetch versions for more than one thing, but also don't - // accidentally a whole project. - pkgs = pkgs[un] - if (!pkgs || !pkgs.length) return cb() - var pp = npa(opts.partialWord).name - pkgs = pkgs.filter(function (p) { - return p.indexOf(pp) === 0 - }) - if (pkgs.length > 1) return cb(null, pkgs) - mapToRegistry(pkgs[0], npm.config, function (er, uri, auth) { - if (er) return cb(er) +const BB = require('bluebird') + +const figgyPudding = require('figgy-pudding') +const libaccess = require('libnpm/access') +const libunpub = require('libnpm/unpublish') +const log = require('npmlog') +const npa = require('npm-package-arg') +const npm = require('./npm.js') +const npmConfig = require('./config/figgy-config.js') +const npmFetch = require('npm-registry-fetch') +const otplease = require('./utils/otplease.js') +const output = require('./utils/output.js') +const path = require('path') +const readJson = BB.promisify(require('read-package-json')) +const usage = require('./utils/usage.js') +const whoami = BB.promisify(require('./whoami.js')) + +unpublish.usage = usage( + 'unpublish', + '\nnpm unpublish [<@scope>/]@' + + '\nnpm unpublish [<@scope>/] --force' +) + +function UsageError () { + throw Object.assign(new Error(`Usage: ${unpublish.usage}`), { + code: 'EUSAGE' + }) +} - npm.registry.get(uri, { auth: auth }, function (er, d) { - if (er) return cb(er) - var vers = Object.keys(d.versions) - if (!vers.length) return cb(null, pkgs) - return cb(null, vers.map(function (v) { - return pkgs[0] + '@' + v - })) - }) - }) +const UnpublishConfig = figgyPudding({ + force: {}, + loglevel: {}, + silent: {} +}) + +unpublish.completion = function (cliOpts, cb) { + if (cliOpts.conf.argv.remain.length >= 3) return cb() + + whoami([], true).then(username => { + if (!username) { return [] } + const opts = UnpublishConfig(npmConfig()) + return libaccess.lsPackages(username, opts).then(access => { + // do a bit of filtering at this point, so that we don't need + // to fetch versions for more than one thing, but also don't + // accidentally a whole project. + let pkgs = Object.keys(access) + if (!cliOpts.partialWord || !pkgs.length) { return pkgs } + const pp = npa(cliOpts.partialWord).name + pkgs = pkgs.filter(p => !p.indexOf(pp)) + if (pkgs.length > 1) return pkgs + return npmFetch.json(npa(pkgs[0]).escapedName, opts).then(doc => { + const vers = Object.keys(doc.versions) + if (!vers.length) { + return pkgs + } else { + return vers.map(v => `${pkgs[0]}@${v}`) + } }) }) - }) + }).nodeify(cb) } function unpublish (args, cb) { if (args.length > 1) return cb(unpublish.usage) - var thing = args.length ? npa(args[0]) : {} - var project = thing.name - var version = thing.rawSpec - - log.silly('unpublish', 'args[0]', args[0]) - log.silly('unpublish', 'thing', thing) - if (!version && !npm.config.get('force')) { - return cb( - 'Refusing to delete entire project.\n' + - 'Run with --force to do this.\n' + - unpublish.usage - ) - } - - if (!project || path.resolve(project) === npm.localPrefix) { - // if there's a package.json in the current folder, then - // read the package name and version out of that. - var cwdJson = path.join(npm.localPrefix, 'package.json') - return readJson(cwdJson, function (er, data) { - if (er && er.code !== 'ENOENT' && er.code !== 'ENOTDIR') return cb(er) - if (er) return cb('Usage:\n' + unpublish.usage) - log.verbose('unpublish', data) - gotProject(data.name, data.version, data.publishConfig, cb) - }) - } - return gotProject(project, version, cb) -} - -function gotProject (project, version, publishConfig, cb_) { - if (typeof cb_ !== 'function') { - cb_ = publishConfig - publishConfig = null - } - - function cb (er) { - if (er) return cb_(er) - output('- ' + project + (version ? '@' + version : '')) - cb_() - } - - var mappedConfig = getPublishConfig(publishConfig, npm.config, npm.registry) - var config = mappedConfig.config - var registry = mappedConfig.client - - // remove from the cache first - // npm.commands.cache(['clean', project, version], function (er) { - // if (er) { - // log.error('unpublish', 'Failed to clean cache') - // return cb(er) - // } - - mapToRegistry(project, config, function (er, uri, auth) { - if (er) return cb(er) - - var params = { - version: version, - auth: auth + const spec = args.length && npa(args[0]) + const opts = UnpublishConfig(npmConfig()) + const version = spec.rawSpec + BB.try(() => { + log.silly('unpublish', 'args[0]', args[0]) + log.silly('unpublish', 'spec', spec) + if (!version && !opts.force) { + throw Object.assign(new Error( + 'Refusing to delete entire project.\n' + + 'Run with --force to do this.\n' + + unpublish.usage + ), { code: 'EUSAGE' }) } - registry.unpublish(uri, params, cb) - }) - // }) + if (!spec || path.resolve(spec.name) === npm.localPrefix) { + // if there's a package.json in the current folder, then + // read the package name and version out of that. + const cwdJson = path.join(npm.localPrefix, 'package.json') + return readJson(cwdJson).then(data => { + log.verbose('unpublish', data) + return otplease(opts, opts => { + return libunpub(npa.resolve(data.name, data.version), opts.concat(data.publishConfig)) + }) + }, err => { + if (err && err.code !== 'ENOENT' && err.code !== 'ENOTDIR') { + throw err + } else { + UsageError() + } + }) + } else { + return otplease(opts, opts => libunpub(spec, opts)) + } + }).then( + ret => { + if (!opts.silent && opts.loglevel !== 'silent') { + output(`- ${spec.name}${ + spec.type === 'version' ? `@${spec.rawSpec}` : '' + }`) + } + cb(null, ret) + }, + err => err.code === 'EUSAGE' ? cb(err.message) : cb(err) + ) } diff --git a/lib/update.js b/lib/update.js index 9b1345f9dfbfb..fdb934fac6730 100644 --- a/lib/update.js +++ b/lib/update.js @@ -46,7 +46,7 @@ function update_ (args) { "because it's currently at the maximum version that matches its specified semver range" ) } - return ww.current !== ww.wanted && ww.latest !== 'linked' + return ww.current !== ww.wanted }) if (wanted.length === 0) return diff --git a/lib/utils/cache-file.js b/lib/utils/cache-file.js new file mode 100644 index 0000000000000..7b3136b2e2db9 --- /dev/null +++ b/lib/utils/cache-file.js @@ -0,0 +1,70 @@ +// XXX use infer-owner or gentle-fs.mkdir here +const npm = require('../npm.js') +const path = require('path') +const chownr = require('chownr') +const writeFileAtomic = require('write-file-atomic') +const mkdirp = require('mkdirp') +const fs = require('graceful-fs') + +let cache = null +let cacheUid = null +let cacheGid = null +let needChown = typeof process.getuid === 'function' + +const getCacheOwner = () => { + let st + try { + st = fs.lstatSync(cache) + } catch (er) { + if (er.code !== 'ENOENT') { + throw er + } + st = fs.lstatSync(path.dirname(cache)) + } + + cacheUid = st.uid + cacheGid = st.gid + + needChown = st.uid !== process.getuid() || + st.gid !== process.getgid() +} + +const writeOrAppend = (method, file, data) => { + if (!cache) { + cache = npm.config.get('cache') + } + + // redundant if already absolute, but prevents non-absolute files + // from being written as if they're part of the cache. + file = path.resolve(cache, file) + + if (cacheUid === null && needChown) { + getCacheOwner() + } + + const dir = path.dirname(file) + const firstMade = mkdirp.sync(dir) + + if (!needChown) { + return method(file, data) + } + + let methodThrew = true + try { + method(file, data) + methodThrew = false + } finally { + // always try to leave it in the right ownership state, even on failure + // let the method error fail it instead of the chownr error, though + if (!methodThrew) { + chownr.sync(firstMade || file, cacheUid, cacheGid) + } else { + try { + chownr.sync(firstMade || file, cacheUid, cacheGid) + } catch (_) {} + } + } +} + +exports.append = (file, data) => writeOrAppend(fs.appendFileSync, file, data) +exports.write = (file, data) => writeOrAppend(writeFileAtomic.sync, file, data) diff --git a/lib/utils/correct-mkdir.js b/lib/utils/correct-mkdir.js index 68c4a4ad79ae2..57368e946e37f 100644 --- a/lib/utils/correct-mkdir.js +++ b/lib/utils/correct-mkdir.js @@ -1,123 +1,37 @@ -var chownr = require('chownr') -var dezalgo = require('dezalgo') -var fs = require('graceful-fs') -var inflight = require('inflight') -var log = require('npmlog') -var mkdirp = require('mkdirp') +// XXX this can probably be replaced with gentle-fs.mkdir everywhere it's used +const chownr = require('chownr') +const inflight = require('inflight') +const log = require('npmlog') +const mkdirp = require('mkdirp') +const inferOwner = require('infer-owner') + +// retain ownership of the parent dir +// this matches behavior in cacache to infer the cache ownership +// based on the ownership of the cache folder or it is parent. -// memoize the directories created by this step -var stats = {} -var effectiveOwner module.exports = function correctMkdir (path, cb) { - cb = dezalgo(cb) - cb = inflight('correctMkdir:' + path, cb) + cb = inflight('correctMkdir: ' + path, cb) if (!cb) { return log.verbose('correctMkdir', path, 'correctMkdir already in flight; waiting') } else { log.verbose('correctMkdir', path, 'correctMkdir not in flight; initializing') } - if (stats[path]) return cb(null, stats[path]) - - fs.stat(path, function (er, st) { - if (er) return makeDirectory(path, cb) - - if (!st.isDirectory()) { - log.error('correctMkdir', 'invalid dir %s', path) - return cb(er) - } - - var ownerStats = calculateOwner() - // there's always a chance the permissions could have been frobbed, so fix - if (st.uid !== ownerStats.uid) { - stats[path] = ownerStats - setPermissions(path, ownerStats, cb) - } else { - stats[path] = st - cb(null, stats[path]) - } - }) -} - -function calculateOwner () { - if (!effectiveOwner) { - effectiveOwner = { uid: 0, gid: 0 } - - // Pretty much only on windows - if (!process.getuid) { - return effectiveOwner - } - - effectiveOwner.uid = +process.getuid() - effectiveOwner.gid = +process.getgid() - - if (effectiveOwner.uid === 0) { - if (process.env.SUDO_UID) effectiveOwner.uid = +process.env.SUDO_UID - if (process.env.SUDO_GID) effectiveOwner.gid = +process.env.SUDO_GID - } - } - - return effectiveOwner -} - -function makeDirectory (path, cb) { - cb = inflight('makeDirectory:' + path, cb) - if (!cb) { - return log.verbose('makeDirectory', path, 'creation already in flight; waiting') - } else { - log.verbose('makeDirectory', path, 'creation not in flight; initializing') - } - - var owner = calculateOwner() - if (!process.getuid) { - return mkdirp(path, function (er) { - log.verbose('makeCacheDir', 'UID & GID are irrelevant on', process.platform) - - stats[path] = owner - return cb(er, stats[path]) - }) + log.verbose('makeCacheDir', 'UID & GID are irrelevant on', process.platform) + return mkdirp(path, (er, made) => cb(er, { uid: 0, gid: 0 })) } - if (owner.uid !== 0 || !process.env.HOME) { - log.silly( - 'makeDirectory', path, - 'uid:', owner.uid, - 'gid:', owner.gid - ) - stats[path] = owner - mkdirp(path, afterMkdir) - } else { - fs.stat(process.env.HOME, function (er, st) { + inferOwner(path).then(owner => { + mkdirp(path, (er, made) => { if (er) { - log.error('makeDirectory', 'homeless?') + log.error('correctMkdir', 'failed to make directory %s', path) return cb(er) } - - log.silly( - 'makeDirectory', path, - 'uid:', st.uid, - 'gid:', st.gid - ) - stats[path] = st - mkdirp(path, afterMkdir) + chownr(made || path, owner.uid, owner.gid, (er) => cb(er, owner)) }) - } - - function afterMkdir (er, made) { - if (er || !stats[path] || isNaN(stats[path].uid) || isNaN(stats[path].gid)) { - return cb(er, stats[path]) - } - - if (!made) return cb(er, stats[path]) - - setPermissions(made, stats[path], cb) - } -} - -function setPermissions (path, st, cb) { - chownr(path, st.uid, st.gid, function (er) { - if (er && er.code === 'ENOENT') return cb(null, st) - return cb(er, st) + }, er => { + log.error('correctMkdir', 'failed to infer path ownership %s', path) + return cb(er) }) } diff --git a/lib/utils/error-handler.js b/lib/utils/error-handler.js index c6481abf6737d..3e6f176ef8206 100644 --- a/lib/utils/error-handler.js +++ b/lib/utils/error-handler.js @@ -11,11 +11,10 @@ var wroteLogFile = false var exitCode = 0 var rollbacks = npm.rollbacks var chain = require('slide').chain -var writeFileAtomic = require('write-file-atomic') var errorMessage = require('./error-message.js') var stopMetrics = require('./metrics.js').stop -var mkdirp = require('mkdirp') -var fs = require('graceful-fs') + +const cacheFile = require('./cache-file.js') var logFileName function getLogFile () { @@ -37,10 +36,10 @@ process.on('timing', function (name, value) { process.on('exit', function (code) { process.emit('timeEnd', 'npm') log.disableProgress() - if (npm.config.loaded && npm.config.get('timing')) { + if (npm.config && npm.config.loaded && npm.config.get('timing')) { try { timings.logfile = getLogFile() - fs.appendFileSync(path.join(npm.config.get('cache'), '_timing.json'), JSON.stringify(timings) + '\n') + cacheFile.append('_timing.json', JSON.stringify(timings) + '\n') } catch (_) { // ignore } @@ -65,7 +64,7 @@ process.on('exit', function (code) { log.verbose('code', code) } } - if (npm.config.loaded && npm.config.get('timing') && !wroteLogFile) writeLogFile() + if (npm.config && npm.config.loaded && npm.config.get('timing') && !wroteLogFile) writeLogFile() if (wroteLogFile) { // just a line break if (log.levels[log.level] <= log.levels.error) console.error('') @@ -80,7 +79,7 @@ process.on('exit', function (code) { wroteLogFile = false } - var doExit = npm.config.loaded && npm.config.get('_exit') + var doExit = npm.config && npm.config.loaded && npm.config.get('_exit') if (doExit) { // actually exit. if (exitCode === 0 && !itWorked) { @@ -95,7 +94,7 @@ process.on('exit', function (code) { function exit (code, noLog) { exitCode = exitCode || process.exitCode || code - var doExit = npm.config.loaded ? npm.config.get('_exit') : true + var doExit = npm.config && npm.config.loaded ? npm.config.get('_exit') : true log.verbose('exit', [code, doExit]) if (log.level === 'silent') noLog = true @@ -188,11 +187,12 @@ function errorHandler (er) { log.verbose('npm ', 'v' + npm.version) ;[ + 'code', + 'syscall', 'file', 'path', - 'code', - 'errno', - 'syscall' + 'dest', + 'errno' ].forEach(function (k) { var v = er[k] if (v) log.error(k, v) @@ -202,7 +202,7 @@ function errorHandler (er) { msg.summary.concat(msg.detail).forEach(function (errline) { log.error.apply(log, errline) }) - if (npm.config.get('json')) { + if (npm.config && npm.config.get('json')) { var error = { error: { code: er.code, @@ -228,7 +228,6 @@ function writeLogFile () { var os = require('os') try { - mkdirp.sync(path.resolve(npm.config.get('cache'), '_logs')) var logOutput = '' log.record.forEach(function (m) { var pref = [m.id, m.level] @@ -241,7 +240,7 @@ function writeLogFile () { logOutput += line + os.EOL }) }) - writeFileAtomic.sync(getLogFile(), logOutput) + cacheFile.write(getLogFile(), logOutput) // truncate once it's been written. log.record.length = 0 diff --git a/lib/utils/error-message.js b/lib/utils/error-message.js index 6e148981833d3..3faa78f308914 100644 --- a/lib/utils/error-message.js +++ b/lib/utils/error-message.js @@ -2,6 +2,7 @@ var npm = require('../npm.js') var util = require('util') var nameValidator = require('validate-npm-package-name') +var npmlog = require('npmlog') module.exports = errorMessage @@ -33,18 +34,56 @@ function errorMessage (er) { case 'EACCES': case 'EPERM': - short.push(['', er]) + const isCachePath = typeof er.path === 'string' && + npm.config && er.path.startsWith(npm.config.get('cache')) + const isCacheDest = typeof er.dest === 'string' && + npm.config && er.dest.startsWith(npm.config.get('cache')) + + const isWindows = process.platform === 'win32' + + if (!isWindows && (isCachePath || isCacheDest)) { + // user probably doesn't need this, but still add it to the debug log + npmlog.verbose(er.stack) + short.push([ + '', + [ + '', + 'Your cache folder contains root-owned files, due to a bug in', + 'previous versions of npm which has since been addressed.', + '', + 'To permanently fix this problem, please run:', + ` sudo chown -R ${process.getuid()}:${process.getgid()} ${JSON.stringify(npm.config.get('cache'))}` + ].join('\n') + ]) + } else { + short.push(['', er]) + detail.push([ + '', + [ + '\nThe operation was rejected by your operating system.', + (process.platform === 'win32' + ? 'It\'s possible that the file was already in use (by a text editor or antivirus),\n' + + 'or that you lack permissions to access it.' + : 'It is likely you do not have the permissions to access this file as the current user'), + '\nIf you believe this might be a permissions issue, please double-check the', + 'permissions of the file and its containing directories, or try running', + 'the command again as root/Administrator.' + ].join('\n')]) + } + break + + case 'EUIDLOOKUP': + short.push(['lifecycle', er.message]) detail.push([ '', [ - '\nThe operation was rejected by your operating system.', - (process.platform === 'win32' - ? 'It\'s possible that the file was already in use (by a text editor or antivirus),\nor that you lack permissions to access it.' - : 'It is likely you do not have the permissions to access this file as the current user'), - '\nIf you believe this might be a permissions issue, please double-check the', - 'permissions of the file and its containing directories, or try running', - 'the command again as root/Administrator (though this is not recommended).' - ].join('\n')]) + '', + 'Failed to look up the user/group for running scripts.', + '', + 'Try again with a different --user or --group settings, or', + 'run with --unsafe-perm to execute scripts as root.' + ].join('\n') + ]) break case 'ELIFECYCLE': @@ -103,8 +142,7 @@ function errorMessage (er) { case 'EOTP': case 'E401': - // the E401 message checking is a hack till we replace npm-registry-client with something - // OTP aware. + // E401 is for places where we accidentally neglect OTP stuff if (er.code === 'EOTP' || /one-time pass/.test(er.message)) { short.push(['', 'This operation requires a one-time password from your authenticator.']) detail.push([ @@ -155,10 +193,12 @@ function errorMessage (er) { var msg = er.message.replace(/^404\s+/, '') short.push(['404', msg]) if (er.pkgid && er.pkgid !== '-') { + var pkg = er.pkgid.replace(/(?!^)@.*$/, '') + detail.push(['404', '']) detail.push(['404', '', "'" + er.pkgid + "' is not in the npm registry."]) - var valResult = nameValidator(er.pkgid) + var valResult = nameValidator(pkg) if (valResult.validForNewPackages) { detail.push(['404', 'You should bug the author to publish it (or use the name yourself!)']) @@ -240,8 +280,9 @@ function errorMessage (er) { case 'EEXIST': short.push(['', er.message]) - short.push(['', 'File exists: ' + er.path]) - detail.push(['', 'Move it away, and try again.']) + short.push(['', 'File exists: ' + (er.dest || er.path)]) + detail.push(['', 'Remove the existing file and try again, or run npm']) + detail.push(['', 'with --force to overwrite files recklessly.']) break case 'ENEEDAUTH': @@ -287,6 +328,18 @@ function errorMessage (er) { detail.push(['notarget', msg.join('\n')]) break + case 'E403': + short.push(['403', er.message]) + msg = [ + 'In most cases, you or one of your dependencies are requesting', + 'a package version that is forbidden by your security policy.' + ] + if (er.parent) { + msg.push("\nIt was specified as a dependency of '" + er.parent + "'\n") + } + detail.push(['403', msg.join('\n')]) + break + case 'ENOTSUP': if (er.required) { short.push(['notsup', er.message]) diff --git a/lib/utils/escape-arg.js b/lib/utils/escape-arg.js index d12ee5edf5820..114abaadaa090 100644 --- a/lib/utils/escape-arg.js +++ b/lib/utils/escape-arg.js @@ -1,6 +1,6 @@ 'use strict' var path = require('path') -var isWindowsShell = require('./is-windows-shell.js') +var isWindows = require('./is-windows.js') /* Escape the name of an executable suitable for passing to the system shell. @@ -15,7 +15,7 @@ any single quotes in the filename. module.exports = escapify function escapify (str) { - if (isWindowsShell) { + if (isWindows) { return '"' + path.normalize(str) + '"' } else { if (/[^-_.~/\w]/.test(str)) { diff --git a/lib/utils/escape-exec-path.js b/lib/utils/escape-exec-path.js index bf94886efa331..42b64934867dd 100644 --- a/lib/utils/escape-exec-path.js +++ b/lib/utils/escape-exec-path.js @@ -1,6 +1,6 @@ 'use strict' var path = require('path') -var isWindowsShell = require('./is-windows-shell.js') +var isWindows = require('./is-windows.js') /* Escape the name of an executable suitable for passing to the system shell. @@ -20,7 +20,7 @@ function windowsQuotes (str) { } function escapify (str) { - if (isWindowsShell) { + if (isWindows) { return path.normalize(str).split(/\\/).map(windowsQuotes).join('\\') } else if (/[^-_.~/\w]/.test(str)) { return "'" + str.replace(/'/g, "'\"'\"'") + "'" diff --git a/lib/utils/funding.js b/lib/utils/funding.js new file mode 100644 index 0000000000000..5375639109ee2 --- /dev/null +++ b/lib/utils/funding.js @@ -0,0 +1,183 @@ +'use strict' + +const URL = require('url').URL + +exports.getFundingInfo = getFundingInfo +exports.retrieveFunding = retrieveFunding +exports.validFundingField = validFundingField + +const flatCacheSymbol = Symbol('npm flat cache') +exports.flatCacheSymbol = flatCacheSymbol + +// supports object funding and string shorthand, or an array of these +// if original was an array, returns an array; else returns the lone item +function retrieveFunding (funding) { + const sources = [].concat(funding || []).map(item => ( + typeof item === 'string' + ? { url: item } + : item + )) + return Array.isArray(funding) ? sources : sources[0] +} + +// Is the value of a `funding` property of a `package.json` +// a valid type+url for `npm fund` to display? +function validFundingField (funding) { + if (!funding) return false + + if (Array.isArray(funding)) { + return funding.every(f => !Array.isArray(f) && validFundingField(f)) + } + + try { + var parsed = new URL(funding.url || funding) + } catch (error) { + return false + } + + if ( + parsed.protocol !== 'https:' && + parsed.protocol !== 'http:' + ) return false + + return Boolean(parsed.host) +} + +const empty = () => Object.create(null) + +function getFundingInfo (idealTree, opts) { + let packageWithFundingCount = 0 + const flat = empty() + const seen = new Set() + const { countOnly } = opts || {} + const _trailingDependencies = Symbol('trailingDependencies') + + function tracked (name, version) { + const key = String(name) + String(version) + if (seen.has(key)) { + return true + } + seen.add(key) + } + + function retrieveDependencies (dependencies) { + const trailing = dependencies[_trailingDependencies] + + if (trailing) { + return Object.assign( + empty(), + dependencies, + trailing + ) + } + + return dependencies + } + + function hasDependencies (dependencies) { + return dependencies && ( + Object.keys(dependencies).length || + dependencies[_trailingDependencies] + ) + } + + function addToFlatCache (funding, dep) { + [].concat(funding || []).forEach((f) => { + const key = f.url + if (!Array.isArray(flat[key])) { + flat[key] = [] + } + flat[key].push(dep) + }) + } + + function attachFundingInfo (target, funding, dep) { + if (funding && validFundingField(funding)) { + target.funding = retrieveFunding(funding) + if (!countOnly) { + addToFlatCache(target.funding, dep) + } + + packageWithFundingCount++ + } + } + + function getFundingDependencies (tree) { + const deps = tree && tree.dependencies + if (!deps) return empty() + + const directDepsWithFunding = Object.keys(deps).map((key) => { + const dep = deps[key] + const { name, funding, version } = dep + + // avoids duplicated items within the funding tree + if (tracked(name, version)) return empty() + + const fundingItem = {} + + if (version) { + fundingItem.version = version + } + + attachFundingInfo(fundingItem, funding, dep) + + return { + dep, + fundingItem + } + }) + + return directDepsWithFunding.reduce((res, { dep: directDep, fundingItem }, i) => { + if (!fundingItem || fundingItem.length === 0) return res + + // recurse + const transitiveDependencies = directDep.dependencies && + Object.keys(directDep.dependencies).length > 0 && + getFundingDependencies(directDep) + + // if we're only counting items there's no need + // to add all the data to the resulting object + if (countOnly) return null + + if (hasDependencies(transitiveDependencies)) { + fundingItem.dependencies = retrieveDependencies(transitiveDependencies) + } + + if (fundingItem.funding && fundingItem.funding.length !== 0) { + res[directDep.name] = fundingItem + } else if (fundingItem.dependencies) { + res[_trailingDependencies] = + Object.assign( + empty(), + res[_trailingDependencies], + fundingItem.dependencies + ) + } + + return res + }, countOnly ? null : empty()) + } + + const idealTreeDependencies = getFundingDependencies(idealTree) + const result = { + length: packageWithFundingCount + } + + if (!countOnly) { + result.name = idealTree.name || idealTree.path + + if (idealTree && idealTree.version) { + result.version = idealTree.version + } + + if (idealTree && idealTree.funding) { + result.funding = retrieveFunding(idealTree.funding) + } + + result.dependencies = retrieveDependencies(idealTreeDependencies) + + result[flatCacheSymbol] = flat + } + + return result +} diff --git a/lib/utils/get-publish-config.js b/lib/utils/get-publish-config.js deleted file mode 100644 index ac0ef0934201a..0000000000000 --- a/lib/utils/get-publish-config.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const clientConfig = require('../config/reg-client.js') -const Conf = require('../config/core.js').Conf -const log = require('npmlog') -const npm = require('../npm.js') -const RegClient = require('npm-registry-client') - -module.exports = getPublishConfig - -function getPublishConfig (publishConfig, defaultConfig, defaultClient) { - let config = defaultConfig - let client = defaultClient - log.verbose('getPublishConfig', publishConfig) - if (publishConfig) { - config = new Conf(defaultConfig) - config.save = defaultConfig.save.bind(defaultConfig) - - // don't modify the actual publishConfig object, in case we have - // to set a login token or some other data. - config.unshift(Object.keys(publishConfig).reduce(function (s, k) { - s[k] = publishConfig[k] - return s - }, {})) - client = new RegClient(clientConfig(npm, log, config)) - } - - return { config: config, client: client } -} diff --git a/lib/utils/git.js b/lib/utils/git.js index 6770853dd9622..1951640e81568 100644 --- a/lib/utils/git.js +++ b/lib/utils/git.js @@ -28,6 +28,16 @@ function execGit (args, options, cb) { function spawnGit (args, options) { log.info('git', args) + // If we're already in a git command (eg, running test as an exec + // line in an interactive rebase) then these environment variables + // will force git to operate on the current project, instead of + // checking out/fetching/etc. whatever the user actually intends. + options.env = options.env || Object.keys(process.env) + .filter(k => !/^GIT/.test(k)) + .reduce((set, k) => { + set[k] = process.env[k] + return set + }, {}) return spawn(git, prefixGitArgs().concat(args || []), options) } diff --git a/lib/utils/map-to-registry.js b/lib/utils/map-to-registry.js deleted file mode 100644 index d6e0a5b01f4d5..0000000000000 --- a/lib/utils/map-to-registry.js +++ /dev/null @@ -1,103 +0,0 @@ -var url = require('url') - -var log = require('npmlog') -var npa = require('npm-package-arg') -var config - -module.exports = mapToRegistry - -function mapToRegistry (name, config, cb) { - log.silly('mapToRegistry', 'name', name) - var registry - - // the name itself takes precedence - var data = npa(name) - if (data.scope) { - // the name is definitely scoped, so escape now - name = name.replace('/', '%2f') - - log.silly('mapToRegistry', 'scope (from package name)', data.scope) - - registry = config.get(data.scope + ':registry') - if (!registry) { - log.verbose('mapToRegistry', 'no registry URL found in name for scope', data.scope) - } - } - - // ...then --scope=@scope or --scope=scope - var scope = config.get('scope') - if (!registry && scope) { - // I'm an enabler, sorry - if (scope.charAt(0) !== '@') scope = '@' + scope - - log.silly('mapToRegistry', 'scope (from config)', scope) - - registry = config.get(scope + ':registry') - if (!registry) { - log.verbose('mapToRegistry', 'no registry URL found in config for scope', scope) - } - } - - // ...and finally use the default registry - if (!registry) { - log.silly('mapToRegistry', 'using default registry') - registry = config.get('registry') - } - - log.silly('mapToRegistry', 'registry', registry) - - var auth = config.getCredentialsByURI(registry) - - // normalize registry URL so resolution doesn't drop a piece of registry URL - var normalized = registry.slice(-1) !== '/' ? registry + '/' : registry - var uri - log.silly('mapToRegistry', 'data', data) - if (data.type === 'remote') { - uri = data.fetchSpec - } else { - uri = url.resolve(normalized, name) - } - - log.silly('mapToRegistry', 'uri', uri) - - cb(null, uri, scopeAuth(uri, registry, auth), normalized) -} - -function scopeAuth (uri, registry, auth) { - var cleaned = { - scope: auth.scope, - email: auth.email, - alwaysAuth: auth.alwaysAuth, - token: undefined, - username: undefined, - password: undefined, - auth: undefined - } - - var requestHost - var registryHost - - if (auth.token || auth.auth || (auth.username && auth.password)) { - requestHost = url.parse(uri).hostname - registryHost = url.parse(registry).hostname - - if (requestHost === registryHost) { - cleaned.token = auth.token - cleaned.auth = auth.auth - cleaned.username = auth.username - cleaned.password = auth.password - } else if (auth.alwaysAuth) { - log.verbose('scopeAuth', 'alwaysAuth set for', registry) - cleaned.token = auth.token - cleaned.auth = auth.auth - cleaned.username = auth.username - cleaned.password = auth.password - } else { - log.silly('scopeAuth', uri, "doesn't share host with registry", registry) - } - if (!config) config = require('../npm').config - if (config.get('otp')) cleaned.otp = config.get('otp') - } - - return cleaned -} diff --git a/lib/utils/metrics.js b/lib/utils/metrics.js index c51136e78cdb7..1a4bb79a6e85d 100644 --- a/lib/utils/metrics.js +++ b/lib/utils/metrics.js @@ -4,12 +4,14 @@ exports.stop = stopMetrics exports.save = saveMetrics exports.send = sendMetrics -var fs = require('fs') -var path = require('path') -var npm = require('../npm.js') -var uuid = require('uuid') +const fs = require('fs') +const path = require('path') +const npm = require('../npm.js') +const regFetch = require('libnpm/fetch') +const uuid = require('uuid') +const cacheFile = require('./cache-file.js') -var inMetrics = false +let inMetrics = false function startMetrics () { if (inMetrics) return @@ -50,24 +52,27 @@ function saveMetrics (itWorked) { } } try { - fs.writeFileSync(metricsFile, JSON.stringify(metrics)) + cacheFile.write(metricsFile, JSON.stringify(metrics)) } catch (ex) { - // we couldn't write the error metrics file, um, well, oh well. + // we couldn't write and/or chown the error metrics file, oh well. } } function sendMetrics (metricsFile, metricsRegistry) { inMetrics = true var cliMetrics = JSON.parse(fs.readFileSync(metricsFile)) - npm.load({}, function (err) { - if (err) return - npm.registry.config.retry.retries = 0 - npm.registry.sendAnonymousCLIMetrics(metricsRegistry, cliMetrics, function (err) { - if (err) { - fs.writeFileSync(path.join(path.dirname(metricsFile), 'last-send-metrics-error.txt'), err.stack) - } else { - fs.unlinkSync(metricsFile) - } - }) + regFetch( + `/-/npm/anon-metrics/v1/${encodeURIComponent(cliMetrics.metricId)}`, + // NOTE: skip npmConfig() to prevent auth + { + registry: metricsRegistry, + method: 'PUT', + body: cliMetrics.metrics, + retry: false + } + ).then(() => { + fs.unlinkSync(metricsFile) + }, err => { + cacheFile.write(path.join(path.dirname(metricsFile), 'last-send-metrics-error.txt'), err.stack) }) } diff --git a/lib/utils/module-name.js b/lib/utils/module-name.js index 89957b181fd05..18f54e4118d6d 100644 --- a/lib/utils/module-name.js +++ b/lib/utils/module-name.js @@ -21,6 +21,7 @@ function isNotEmpty (str) { var unknown = 0 function moduleName (tree) { + if (tree.name) { return tree.name } var pkg = tree.package || tree if (isNotEmpty(pkg.name) && typeof pkg.name === 'string') return pkg.name.trim() var pkgName = pathToPackageName(tree.path) diff --git a/lib/utils/open-url.js b/lib/utils/open-url.js index 7a48d2e868959..e1ed2b3fab76d 100644 --- a/lib/utils/open-url.js +++ b/lib/utils/open-url.js @@ -5,9 +5,28 @@ const opener = require('opener') // attempt to open URL in web-browser, print address otherwise: module.exports = function open (url, errMsg, cb, browser = npm.config.get('browser')) { - opener(url, { command: npm.config.get('browser') }, (er) => { + function printAlternateMsg () { + const json = npm.config.get('json') + const alternateMsg = json + ? JSON.stringify({ + title: errMsg, + url + }, null, 2) + : `${errMsg}:\n\n${url}` + + output(alternateMsg) + } + + const skipBrowser = process.argv.indexOf('--no-browser') > -1 + + if (skipBrowser) { + printAlternateMsg() + return cb() + } + + opener(url, { command: browser }, (er) => { if (er && er.code === 'ENOENT') { - output(`${errMsg}:\n\n${url}`) + printAlternateMsg() return cb() } else { return cb(er) diff --git a/lib/utils/otplease.js b/lib/utils/otplease.js new file mode 100644 index 0000000000000..d0477a896d004 --- /dev/null +++ b/lib/utils/otplease.js @@ -0,0 +1,27 @@ +'use strict' + +const BB = require('bluebird') + +const optCheck = require('figgy-pudding')({ + prompt: {default: 'This operation requires a one-time password.\nEnter OTP:'}, + otp: {} +}) +const readUserInfo = require('./read-user-info.js') + +module.exports = otplease +function otplease (opts, fn) { + opts = opts.concat ? opts : optCheck(opts) + return BB.try(() => { + return fn(opts) + }).catch(err => { + if (err.code !== 'EOTP' && !(err.code === 'E401' && /one-time pass/.test(err.body))) { + throw err + } else if (!process.stdin.isTTY || !process.stdout.isTTY) { + throw err + } else { + return readUserInfo.otp( + optCheck(opts).prompt + ).then(otp => fn(opts.concat({otp}))) + } + }) +} diff --git a/lib/utils/unsupported.js b/lib/utils/unsupported.js index bfed5cdbea634..c78cdcbc29494 100644 --- a/lib/utils/unsupported.js +++ b/lib/utils/unsupported.js @@ -1,21 +1,14 @@ 'use strict' var semver = require('semver') -var supportedNode = [ - {ver: '6', min: '6.0.0'}, - {ver: '8', min: '8.0.0'}, - {ver: '9', min: '9.0.0'}, - {ver: '10', min: '10.0.0'}, - {ver: '11', min: '11.0.0'}, - {ver: '12', min: '12.0.0'} -] -var knownBroken = '<4.7.0' +var supported = require('../../package.json').engines.node +var knownBroken = '<6.2.0 || 9 <9.3.0' var checkVersion = exports.checkVersion = function (version) { var versionNoPrerelease = version.replace(/-.*$/, '') return { version: versionNoPrerelease, broken: semver.satisfies(versionNoPrerelease, knownBroken), - unsupported: !semver.satisfies(versionNoPrerelease, supportedNode.map(function (n) { return '^' + n.min }).join('||')) + unsupported: !semver.satisfies(versionNoPrerelease, supported) } } @@ -23,18 +16,8 @@ exports.checkForBrokenNode = function () { var nodejs = checkVersion(process.version) if (nodejs.broken) { console.error('ERROR: npm is known not to run on Node.js ' + process.version) - supportedNode.forEach(function (rel) { - if (semver.satisfies(nodejs.version, rel.ver)) { - console.error('Node.js ' + rel.ver + " is supported but the specific version you're running has") - console.error('a bug known to break npm. Please update to at least ' + rel.min + ' to use this') - console.error('version of npm. You can find the latest release of Node.js at https://nodejs.org/') - process.exit(1) - } - }) - var supportedMajors = supportedNode.map(function (n) { return n.ver }).join(', ') - console.error("You'll need to upgrade to a newer version in order to use this") - console.error('version of npm. Supported versions are ' + supportedMajors + '. You can find the') - console.error('latest version at https://nodejs.org/') + console.error("You'll need to upgrade to a newer Node.js version in order to use this") + console.error('version of npm. You can find the latest version at https://nodejs.org/') process.exit(1) } } @@ -43,11 +26,9 @@ exports.checkForUnsupportedNode = function () { var nodejs = checkVersion(process.version) if (nodejs.unsupported) { var log = require('npmlog') - var supportedMajors = supportedNode.map(function (n) { return n.ver }).join(', ') log.warn('npm', 'npm does not support Node.js ' + process.version) log.warn('npm', 'You should probably upgrade to a newer version of node as we') log.warn('npm', "can't make any promises that npm will work with this version.") - log.warn('npm', 'Supported releases of Node.js are the latest release of ' + supportedMajors + '.') log.warn('npm', 'You can find the latest version at https://nodejs.org/') } } diff --git a/lib/version.js b/lib/version.js index 265b049bf3914..6619a8ba9d0d7 100644 --- a/lib/version.js +++ b/lib/version.js @@ -256,7 +256,7 @@ function checkGit (localData, cb) { statGitFolder(function (er) { var doGit = !er && npm.config.get('git-tag-version') if (!doGit) { - if (er) log.verbose('version', 'error checking for .git', er) + if (er && npm.config.get('git-tag-version')) log.verbose('version', 'error checking for .git', er) log.verbose('version', 'not tagging in git') return cb(null, false) } @@ -286,22 +286,32 @@ function checkGit (localData, cb) { module.exports.buildCommitArgs = buildCommitArgs function buildCommitArgs (args) { - args = args || [ 'commit' ] - if (!npm.config.get('commit-hooks')) args.push('-n') - return args + const add = [] + args = args || [] + if (args[0] === 'commit') args.shift() + if (!npm.config.get('commit-hooks')) add.push('-n') + if (npm.config.get('allow-same-version')) add.push('--allow-empty') + return ['commit', ...add, ...args] +} + +module.exports.buildTagFlags = buildTagFlags +function buildTagFlags () { + return '-'.concat( + npm.config.get('sign-git-tag') ? 's' : '', + npm.config.get('allow-same-version') ? 'f' : '', + 'm' + ) } function _commit (version, localData, cb) { const options = { env: process.env } const message = npm.config.get('message').replace(/%s/g, version) - const signTag = npm.config.get('sign-git-tag') const signCommit = npm.config.get('sign-git-commit') const commitArgs = buildCommitArgs([ 'commit', ...(signCommit ? ['-S', '-m'] : ['-m']), message ]) - const flagForTag = signTag ? '-sm' : '-am' stagePackageFiles(localData, options).then(() => { return git.exec(commitArgs, options) @@ -309,7 +319,7 @@ function _commit (version, localData, cb) { if (!localData.existingTag) { return git.exec([ 'tag', npm.config.get('tag-version-prefix') + version, - flagForTag, message + buildTagFlags(), message ], options) } }).nodeify(cb) diff --git a/lib/view.js b/lib/view.js index b7d7f6ec80310..a16884e25f647 100644 --- a/lib/view.js +++ b/lib/view.js @@ -8,17 +8,27 @@ const BB = require('bluebird') const byteSize = require('byte-size') const color = require('ansicolors') const columns = require('cli-columns') +const npmConfig = require('./config/figgy-config.js') +const log = require('npmlog') +const figgyPudding = require('figgy-pudding') +const npa = require('libnpm/parse-arg') +const npm = require('./npm.js') +const packument = require('libnpm/packument') +const path = require('path') +const readJson = require('libnpm/read-json') const relativeDate = require('tiny-relative-date') +const semver = require('semver') const style = require('ansistyles') -var npm = require('./npm.js') -var readJson = require('read-package-json') -var log = require('npmlog') -var util = require('util') -var semver = require('semver') -var mapToRegistry = require('./utils/map-to-registry.js') -var npa = require('npm-package-arg') -var path = require('path') -var usage = require('./utils/usage') +const usage = require('./utils/usage') +const util = require('util') +const validateName = require('validate-npm-package-name') + +const ViewConfig = figgyPudding({ + global: {}, + json: {}, + tag: {}, + unicode: {} +}) view.usage = usage( 'view', @@ -32,19 +42,14 @@ view.completion = function (opts, cb) { return cb() } // have the package, get the fields. - var tag = npm.config.get('tag') - mapToRegistry(opts.conf.argv.remain[2], npm.config, function (er, uri, auth) { - if (er) return cb(er) - - npm.registry.get(uri, { auth: auth }, function (er, d) { - if (er) return cb(er) - var dv = d.versions[d['dist-tags'][tag]] - var fields = [] - d.versions = Object.keys(d.versions).sort(semver.compareLoose) - fields = getFields(d).concat(getFields(dv)) - cb(null, fields) - }) - }) + const config = ViewConfig(npmConfig()) + const tag = config.tag + const spec = npa(opts.conf.argv.remain[2]) + return packument(spec, config).then(d => { + const dv = d.versions[d['dist-tags'][tag]] + d.versions = Object.keys(d.versions).sort(semver.compareLoose) + return getFields(d).concat(getFields(dv)) + }).nodeify(cb) function getFields (d, f, pref) { f = f || [] @@ -52,11 +57,11 @@ view.completion = function (opts, cb) { pref = pref || [] Object.keys(d).forEach(function (k) { if (k.charAt(0) === '_' || k.indexOf('.') !== -1) return - var p = pref.concat(k).join('.') + const p = pref.concat(k).join('.') f.push(p) if (Array.isArray(d[k])) { d[k].forEach(function (val, i) { - var pi = p + '[' + i + ']' + const pi = p + '[' + i + ']' if (val && typeof val === 'object') getFields(val, f, [p]) else f.push(pi) }) @@ -76,113 +81,133 @@ function view (args, silent, cb) { if (!args.length) args = ['.'] - var pkg = args.shift() - var nv + const opts = ViewConfig(npmConfig()) + const pkg = args.shift() + let nv if (/^[.]@/.test(pkg)) { nv = npa.resolve(null, pkg.slice(2)) } else { nv = npa(pkg) } - var name = nv.name - var local = (name === '.' || !name) + const name = nv.name + const local = (name === '.' || !name) - if (npm.config.get('global') && local) { + if (opts.global && local) { return cb(new Error('Cannot use view command in global mode.')) } if (local) { - var dir = npm.prefix - readJson(path.resolve(dir, 'package.json'), function (er, d) { + const dir = npm.prefix + BB.resolve(readJson(path.resolve(dir, 'package.json'))).nodeify((er, d) => { d = d || {} if (er && er.code !== 'ENOENT' && er.code !== 'ENOTDIR') return cb(er) if (!d.name) return cb(new Error('Invalid package.json')) - var p = d.name + const p = d.name nv = npa(p) if (pkg && ~pkg.indexOf('@')) { nv.rawSpec = pkg.split('@')[pkg.indexOf('@')] } - fetchAndRead(nv, args, silent, cb) + fetchAndRead(nv, args, silent, opts, cb) }) } else { - fetchAndRead(nv, args, silent, cb) + fetchAndRead(nv, args, silent, opts, cb) } } -function fetchAndRead (nv, args, silent, cb) { +function fetchAndRead (nv, args, silent, opts, cb) { // get the data about this package - var name = nv.name - var version = nv.rawSpec || npm.config.get('tag') - - mapToRegistry(name, npm.config, function (er, uri, auth) { - if (er) return cb(er) - - npm.registry.get(uri, { auth: auth }, function (er, data) { - if (er) return cb(er) - if (data['dist-tags'] && data['dist-tags'][version]) { - version = data['dist-tags'][version] - } - - if (data.time && data.time.unpublished) { - var u = data.time.unpublished - er = new Error('Unpublished by ' + u.name + ' on ' + u.time) - er.statusCode = 404 - er.code = 'E404' - er.pkgid = data._id - return cb(er, data) + let version = nv.rawSpec || npm.config.get('tag') + + return packument(nv, opts.concat({ + fullMetadata: true, + 'prefer-online': true + })).catch(err => { + // TODO - this should probably go into pacote, but the tests expect it. + if (err.code === 'E404') { + err.message = `'${nv.name}' is not in the npm registry.` + const validated = validateName(nv.name) + if (!validated.validForNewPackages) { + err.message += '\n' + err.message += (validated.errors || []).join('\n') + err.message += (validated.warnings || []).join('\n') + } else { + err.message += '\nYou should bug the author to publish it' + err.message += '\n(or use the name yourself!)' + err.message += '\n' + err.message += '\nNote that you can also install from a' + err.message += '\ntarball, folder, http url, or git url.' } + } + throw err + }).then(data => { + if (data['dist-tags'] && data['dist-tags'][version]) { + version = data['dist-tags'][version] + } - var results = [] - var error = null - var versions = data.versions || {} - data.versions = Object.keys(versions).sort(semver.compareLoose) - if (!args.length) args = [''] + if (data.time && data.time.unpublished) { + const u = data.time.unpublished + let er = new Error('Unpublished by ' + u.name + ' on ' + u.time) + er.statusCode = 404 + er.code = 'E404' + er.pkgid = data._id + throw er + } - // remove readme unless we asked for it - if (args.indexOf('readme') === -1) { - delete data.readme - } + const results = [] + let error = null + const versions = data.versions || {} + data.versions = Object.keys(versions).sort(semver.compareLoose) + if (!args.length) args = [''] - Object.keys(versions).forEach(function (v) { - if (semver.satisfies(v, version, true)) { - args.forEach(function (args) { - // remove readme unless we asked for it - if (args.indexOf('readme') !== -1) { - delete versions[v].readme - } - results.push(showFields(data, versions[v], args)) - }) - } - }) - var retval = results.reduce(reducer, {}) - - if (args.length === 1 && args[0] === '') { - retval = cleanBlanks(retval) - log.silly('cleanup', retval) - } + // remove readme unless we asked for it + if (args.indexOf('readme') === -1) { + delete data.readme + } - if (error || silent) { - cb(error, retval) - } else if ( - !npm.config.get('json') && - args.length === 1 && - args[0] === '' - ) { - data.version = version - BB.all(results.map((v) => prettyView(data, v[Object.keys(v)[0]]['']))) - .nodeify(cb) - .then(() => retval) - } else { - printData(retval, data._id, cb.bind(null, error, retval)) + Object.keys(versions).forEach(function (v) { + if (semver.satisfies(v, version, true)) { + args.forEach(function (args) { + // remove readme unless we asked for it + if (args.indexOf('readme') !== -1) { + delete versions[v].readme + } + results.push(showFields(data, versions[v], args)) + }) } }) - }) + let retval = results.reduce(reducer, {}) + + if (args.length === 1 && args[0] === '') { + retval = cleanBlanks(retval) + log.silly('view', retval) + } + + if (silent) { + return retval + } else if (error) { + throw error + } else if ( + !opts.json && + args.length === 1 && + args[0] === '' + ) { + data.version = version + return BB.all( + results.map((v) => prettyView(data, v[Object.keys(v)[0]][''], opts)) + ).then(() => retval) + } else { + return BB.fromNode(cb => { + printData(retval, data._id, opts, cb) + }).then(() => retval) + } + }).nodeify(cb) } -function prettyView (packument, manifest) { +function prettyView (packument, manifest, opts) { // More modern, pretty printing of default view - const unicode = npm.config.get('unicode') + const unicode = opts.unicode return BB.try(() => { if (!manifest) { log.error( @@ -219,7 +244,7 @@ function prettyView (packument, manifest) { name: color.yellow(manifest._npmUser.name), email: color.cyan(manifest._npmUser.email) }), - modified: color.yellow(relativeDate(packument.time[packument.version])), + modified: packument.time ? color.yellow(relativeDate(packument.time[packument.version])) : undefined, maintainers: (packument.maintainers || []).map((u) => unparsePerson({ name: color.yellow(u.name), email: color.cyan(u.email) @@ -312,7 +337,7 @@ function prettyView (packument, manifest) { } function cleanBlanks (obj) { - var clean = {} + const clean = {} Object.keys(obj).forEach(function (version) { clean[version] = obj[version][''] }) @@ -334,7 +359,7 @@ function reducer (l, r) { // return whatever was printed function showFields (data, version, fields) { - var o = {} + const o = {} ;[data, version].forEach(function (s) { Object.keys(s).forEach(function (k) { o[k] = s[k] @@ -344,18 +369,18 @@ function showFields (data, version, fields) { } function search (data, fields, version, title) { - var field - var tail = fields + let field + const tail = fields while (!field && fields.length) field = tail.shift() fields = [field].concat(tail) - var o + let o if (!field && !tail.length) { o = {} o[version] = {} o[version][title] = data return o } - var index = field.match(/(.+)\[([^\]]+)\]$/) + let index = field.match(/(.+)\[([^\]]+)\]$/) if (index) { field = index[1] index = index[2] @@ -369,10 +394,10 @@ function search (data, fields, version, title) { if (data.length === 1) { return search(data[0], fields, version, title) } - var results = [] + let results = [] data.forEach(function (data, i) { - var tl = title.length - var newt = title.substr(0, tl - fields.join('.').length - 1) + + const tl = title.length + const newt = title.substr(0, tl - fields.join('.').length - 1) + '[' + i + ']' + [''].concat(fields).join('.') results.push(search(data, fields.slice(), version, newt)) }) @@ -395,32 +420,32 @@ function search (data, fields, version, title) { return o } -function printData (data, name, cb) { - var versions = Object.keys(data) - var msg = '' - var msgJson = [] - var includeVersions = versions.length > 1 - var includeFields +function printData (data, name, opts, cb) { + const versions = Object.keys(data) + let msg = '' + let msgJson = [] + const includeVersions = versions.length > 1 + let includeFields versions.forEach(function (v) { - var fields = Object.keys(data[v]) + const fields = Object.keys(data[v]) includeFields = includeFields || (fields.length > 1) - if (npm.config.get('json')) msgJson.push({}) + if (opts.json) msgJson.push({}) fields.forEach(function (f) { - var d = cleanup(data[v][f]) - if (fields.length === 1 && npm.config.get('json')) { + let d = cleanup(data[v][f]) + if (fields.length === 1 && opts.json) { msgJson[msgJson.length - 1][f] = d } if (includeVersions || includeFields || typeof d !== 'string') { - if (npm.config.get('json')) { + if (opts.json) { msgJson[msgJson.length - 1][f] = d } else { d = util.inspect(d, { showHidden: false, depth: 5, colors: npm.color, maxArrayLength: null }) } - } else if (typeof d === 'string' && npm.config.get('json')) { + } else if (typeof d === 'string' && opts.json) { d = JSON.stringify(d) } - if (!npm.config.get('json')) { + if (!opts.json) { if (f && includeFields) f += ' = ' if (d.indexOf('\n') !== -1) d = ' \n' + d msg += (includeVersions ? name + '@' + v + ' ' : '') + @@ -429,9 +454,9 @@ function printData (data, name, cb) { }) }) - if (npm.config.get('json')) { + if (opts.json) { if (msgJson.length && Object.keys(msgJson[0]).length === 1) { - var k = Object.keys(msgJson[0])[0] + const k = Object.keys(msgJson[0])[0] msgJson = msgJson.map(function (m) { return m[k] }) } @@ -465,7 +490,7 @@ function cleanup (data) { data.versions = Object.keys(data.versions || {}) } - var keys = Object.keys(data) + let keys = Object.keys(data) keys.forEach(function (d) { if (d.charAt(0) === '_') delete data[d] else if (typeof data[d] === 'object') data[d] = cleanup(data[d]) diff --git a/lib/whoami.js b/lib/whoami.js index e8af6595d15cc..5145b447de4c6 100644 --- a/lib/whoami.js +++ b/lib/whoami.js @@ -1,47 +1,63 @@ -var npm = require('./npm.js') -var output = require('./utils/output.js') +'use strict' + +const BB = require('bluebird') + +const npmConfig = require('./config/figgy-config.js') +const fetch = require('libnpm/fetch') +const figgyPudding = require('figgy-pudding') +const npm = require('./npm.js') +const output = require('./utils/output.js') + +const WhoamiConfig = figgyPudding({ + json: {}, + registry: {} +}) module.exports = whoami whoami.usage = 'npm whoami [--registry ]\n(just prints username according to given registry)' -function whoami (args, silent, cb) { +function whoami ([spec], silent, cb) { // FIXME: need tighter checking on this, but is a breaking change if (typeof cb !== 'function') { cb = silent silent = false } - - var registry = npm.config.get('registry') - if (!registry) return cb(new Error('no default registry set')) - - var auth = npm.config.getCredentialsByURI(registry) - if (auth) { - if (auth.username) { - if (!silent) output(auth.username) - return process.nextTick(cb.bind(this, null, auth.username)) - } else if (auth.token) { - return npm.registry.whoami(registry, { auth: auth }, function (er, username) { - if (er) return cb(er) - if (!username) { - var needNewSession = new Error( + const opts = WhoamiConfig(npmConfig()) + return BB.try(() => { + // First, check if we have a user/pass-based auth + const registry = opts.registry + if (!registry) throw new Error('no default registry set') + return npm.config.getCredentialsByURI(registry) + }).then(({username, token}) => { + if (username) { + return username + } else if (token) { + return fetch.json('/-/whoami', opts.concat({ + spec + })).then(({username}) => { + if (username) { + return username + } else { + throw Object.assign(new Error( 'Your auth token is no longer valid. Please log in again.' - ) - needNewSession.code = 'ENEEDAUTH' - return cb(needNewSession) + ), {code: 'ENEEDAUTH'}) } - - if (!silent) output(username) - cb(null, username) }) + } else { + // At this point, if they have a credentials object, it doesn't have a + // token or auth in it. Probably just the default registry. + throw Object.assign(new Error( + 'This command requires you to be logged in.' + ), {code: 'ENEEDAUTH'}) } - } - - // At this point, if they have a credentials object, it doesn't have a token - // or auth in it. Probably just the default registry. - var needAuth = new Error( - 'this command requires you to be logged in.' - ) - needAuth.code = 'ENEEDAUTH' - process.nextTick(cb.bind(this, needAuth)) + }).then(username => { + if (silent) { + } else if (opts.json) { + output(JSON.stringify(username)) + } else { + output(username) + } + return username + }).nodeify(cb) } diff --git a/lib/xmas.js b/lib/xmas.js index 65c0c131abd48..81ab59c8e2236 100644 --- a/lib/xmas.js +++ b/lib/xmas.js @@ -11,7 +11,7 @@ module.exports = function (args, cb) { '\u0020', '\u0020', '\u0020', '\u0020', '\u0020', '\u0020', '\u0020', '\u2E1B', '\u2042', '\u2E2E', '&', '@', '\uFF61' ] - var oc = [21, 33, 34, 35, 36, 37] + var oc = [33, 34, 35, 36, 37] var l = '\u005e' function w (s) { process.stderr.write(s) } @@ -25,6 +25,7 @@ module.exports = function (args, cb) { var O = L * 2 - 2 var S = (M - O) / 2 for (i = 0; i < S; i++) w(' ') + w(x + '\u001b[21m') w(x + '\u001b[32m' + f) for (i = 0; i < O; i++) { w( @@ -33,6 +34,7 @@ module.exports = function (args, cb) { ) } w(x + '\u001b[32m' + b + '\n') + w(x + '\u001b[0m') } w(' ') for (i = 1; i < H; i++) w('\u001b[32m' + l) diff --git a/netlify.toml b/netlify.toml new file mode 100644 index 0000000000000..c1b75b83027b1 --- /dev/null +++ b/netlify.toml @@ -0,0 +1,4 @@ +[build] + base = "docs" + command = "npm run build" + publish = "public" diff --git a/node_modules/.bin/mkdirp b/node_modules/.bin/mkdirp deleted file mode 120000 index 017896cebb141..0000000000000 --- a/node_modules/.bin/mkdirp +++ /dev/null @@ -1 +0,0 @@ -../mkdirp/bin/cmd.js \ No newline at end of file diff --git a/node_modules/.bin/nopt b/node_modules/.bin/nopt deleted file mode 120000 index 6b6566ea7febb..0000000000000 --- a/node_modules/.bin/nopt +++ /dev/null @@ -1 +0,0 @@ -../nopt/bin/nopt.js \ No newline at end of file diff --git a/node_modules/.bin/opener b/node_modules/.bin/opener deleted file mode 120000 index 891b847f9117f..0000000000000 --- a/node_modules/.bin/opener +++ /dev/null @@ -1 +0,0 @@ -../opener/bin/opener-bin.js \ No newline at end of file diff --git a/node_modules/.bin/rimraf b/node_modules/.bin/rimraf deleted file mode 120000 index 4cd49a49ddfc1..0000000000000 --- a/node_modules/.bin/rimraf +++ /dev/null @@ -1 +0,0 @@ -../rimraf/bin.js \ No newline at end of file diff --git a/node_modules/.bin/semver b/node_modules/.bin/semver deleted file mode 120000 index 317eb293d8e12..0000000000000 --- a/node_modules/.bin/semver +++ /dev/null @@ -1 +0,0 @@ -../semver/bin/semver \ No newline at end of file diff --git a/node_modules/.bin/which b/node_modules/.bin/which deleted file mode 120000 index f62471c8511ba..0000000000000 --- a/node_modules/.bin/which +++ /dev/null @@ -1 +0,0 @@ -../which/bin/which \ No newline at end of file diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 196f8f1209ac2..fa76105d97b99 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -1,6 +1,16 @@ ## Automatically generated dev dependency ignores +/@babel/code-frame +/@babel/generator +/@babel/helper-function-name +/@babel/helper-get-function-arity +/@babel/helper-split-export-declaration +/@babel/highlight +/@babel/parser +/@babel/template +/@babel/traverse +/@babel/types +/@blueoak/list /@types/caseless -/@types/form-data /@types/node /@types/request /@types/tough-cookie @@ -8,15 +18,18 @@ /acorn-jsx /ajv-keywords /ansi-escapes +/append-transform +/arg /argparse /array-find-index /array-includes -/array-union -/array-uniq -/arrify /babel-code-frame /bind-obj-methods +/bl +/browser-process-hrtime /browser-request +/builtin-modules +/caching-transform /caller /caller-path /callsites @@ -28,26 +41,30 @@ /cloudant-follow /coffee-script /color-support +/commander +/commondir /connect /contains-path -/core-js +/convert-source-map +/correct-license-metadata /couchapp /coveralls +/cp-file /debug-log /deep-equal /deep-is -/define-properties +/default-require-extensions /deglob -/del /diff /docopt /doctrine +/domain-browser /ee-first +/emoji-regex /encodeurl /error-ex /errs -/es-abstract -/es-to-primitive +/es6-error /escape-html /eslint /eslint-config-standard @@ -61,6 +78,7 @@ /eslint-plugin-standard /eslint-scope /eslint-visitor-keys +/esm /espree /esprima /esquery @@ -70,40 +88,40 @@ /events-to-array /external-editor /fast-levenshtein -/fbjs /figures /file-entry-cache /finalhandler +/find-cache-dir /find-root /flat-cache -/foreach /foreground-child /fs-access +/fs-constants /fs-exists-cached -/function-bind /function-loop /functional-red-black-tree /get-stdin /globals -/globby -/has +/growl +/handlebars /has-ansi +/hasha /hock /http-proxy /ignore /inquirer /is-arrayish -/is-callable -/is-date-object -/is-path-cwd -/is-path-in-cwd /is-promise -/is-regex /is-resolvable -/is-symbol -/isomorphic-fetch +/istanbul-lib-coverage +/istanbul-lib-hook +/istanbul-lib-instrument +/istanbul-lib-report +/istanbul-lib-source-maps +/istanbul-reports /js-tokens /js-yaml +/jsesc /json /json-parse-errback /json-stable-stringify-without-jsonify @@ -112,28 +130,34 @@ /levn /licensee /load-json-file -/lodash.isempty +/lodash +/lodash.flattendeep /log-driver /loose-envify +/make-error /marked /marked-man +/merge-source-map +/mimic-fn +/minipass /nano /natural-compare -/node-fetch +/neo-async +/nested-error-stacks +/npm-license-corrections /npm-registry-couchapp /npm-registry-mock /null-check /nyc -/object-keys /on-finished /onetime /optimist /optionator /own-or /own-or-env +/package-hash /parse-json /parseurl -/path-parse /path-type /pinkie /pinkie-promise @@ -144,11 +168,11 @@ /pluralize /prelude-ls /progress -/promise /prop-types /querystring /read-pkg /read-pkg-up +/release-zalgo /require-inject /require-uncached /resolve @@ -157,15 +181,16 @@ /run-parallel /rx-lite /rx-lite-aggregates -/setimmediate /simple-concat /slice-ansi /source-map /source-map-support +/spawn-wrap /spdx-compare /spdx-expression-validate +/spdx-osi /spdx-ranges -/spdx-satisfies +/spdx-whitelisted /sprintf-js /stack-utils /standard @@ -177,18 +202,24 @@ /tap /tap-mocha-reporter /tap-parser +/tar-stream +/test-exclude /tmatch /tmp +/to-fast-properties +/trim-right /trivial-deferred +/ts-node /tsame /type-check -/ua-parser-js +/typescript +/uglify-js /unicode-length /uniq /url /utils-merge /watch -/whatwg-fetch /wordwrap /write /yapool +/yn diff --git a/node_modules/.package-map.json.gz b/node_modules/.package-map.json.gz deleted file mode 100644 index 223f2e45022b5..0000000000000 Binary files a/node_modules/.package-map.json.gz and /dev/null differ diff --git a/node_modules/@types/caseless/LICENSE b/node_modules/@types/caseless/LICENSE deleted file mode 100644 index 21071075c2459..0000000000000 --- a/node_modules/@types/caseless/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ - MIT License - - Copyright (c) Microsoft Corporation. All rights reserved. - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE diff --git a/node_modules/@types/caseless/README.md b/node_modules/@types/caseless/README.md deleted file mode 100644 index b0bddadf900c6..0000000000000 --- a/node_modules/@types/caseless/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# Installation -> `npm install --save @types/caseless` - -# Summary -This package contains type definitions for caseless (https://github.com/mikeal/caseless). - -# Details -Files were exported from https://www.github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/caseless - -Additional Details - * Last updated: Wed, 17 Jan 2018 21:35:26 GMT - * Dependencies: none - * Global values: none - -# Credits -These definitions were written by downace , Matt R. Wilson . diff --git a/node_modules/@types/caseless/index.d.ts b/node_modules/@types/caseless/index.d.ts deleted file mode 100644 index 9b19f8a150be6..0000000000000 --- a/node_modules/@types/caseless/index.d.ts +++ /dev/null @@ -1,34 +0,0 @@ -// Type definitions for caseless 0.12 -// Project: https://github.com/mikeal/caseless -// Definitions by: downace -// Matt R. Wilson -// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped -// TypeScript Version: 2.2 - -type KeyType = string; -type ValueType = any; -type RawDict = object; - -export interface Caseless { - set(name: KeyType, value: ValueType, clobber?: boolean): KeyType | false; - set(dict: RawDict): void; - has(name: KeyType): KeyType | false; - get(name: KeyType): ValueType | undefined; - swap(name: KeyType): void; - del(name: KeyType): boolean; -} - -export interface Httpified { - headers: RawDict; - setHeader(name: KeyType, value: ValueType, clobber?: boolean): KeyType | false; - setHeader(dict: RawDict): void; - hasHeader(name: KeyType): KeyType | false; - getHeader(name: KeyType): ValueType | undefined; - removeHeader(name: KeyType): boolean; -} - -export function httpify(resp: object, headers: RawDict): Caseless; - -declare function caseless(dict?: RawDict): Caseless; - -export default caseless; diff --git a/node_modules/@types/caseless/package.json b/node_modules/@types/caseless/package.json deleted file mode 100644 index b5645cd5604e8..0000000000000 --- a/node_modules/@types/caseless/package.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "_from": "@types/caseless@*", - "_id": "@types/caseless@0.12.1", - "_inBundle": false, - "_integrity": "sha512-FhlMa34NHp9K5MY1Uz8yb+ZvuX0pnvn3jScRSNAb75KHGB8d3rEU6hqMs3Z2vjuytcMfRg6c5CHMc3wtYyD2/A==", - "_location": "/@types/caseless", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "@types/caseless@*", - "name": "@types/caseless", - "escapedName": "@types%2fcaseless", - "scope": "@types", - "rawSpec": "*", - "saveSpec": null, - "fetchSpec": "*" - }, - "_requiredBy": [ - "/@types/request" - ], - "_resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.1.tgz", - "_shasum": "9794c69c8385d0192acc471a540d1f8e0d16218a", - "_spec": "@types/caseless@*", - "_where": "/Users/zkat/Documents/code/work/npm/node_modules/@types/request", - "bugs": { - "url": "https://github.com/DefinitelyTyped/DefinitelyTyped/issues" - }, - "bundleDependencies": false, - "contributors": [ - { - "name": "downace", - "url": "https://github.com/downace" - }, - { - "name": "Matt R. Wilson", - "url": "https://github.com/mastermatt" - } - ], - "dependencies": {}, - "deprecated": false, - "description": "TypeScript definitions for caseless", - "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped#readme", - "license": "MIT", - "main": "", - "name": "@types/caseless", - "repository": { - "type": "git", - "url": "git+https://github.com/DefinitelyTyped/DefinitelyTyped.git" - }, - "scripts": {}, - "typeScriptVersion": "2.2", - "typesPublisherContentHash": "9867f2283cf33abfd1a8875bf1d88add8d1133c5972167fba16c4699ceecc74e", - "version": "0.12.1" -} diff --git a/node_modules/@types/form-data/LICENSE b/node_modules/@types/form-data/LICENSE deleted file mode 100644 index 4b1ad51b2f0ef..0000000000000 --- a/node_modules/@types/form-data/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ - MIT License - - Copyright (c) Microsoft Corporation. All rights reserved. - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE diff --git a/node_modules/@types/form-data/README.md b/node_modules/@types/form-data/README.md deleted file mode 100644 index 36383f5772b32..0000000000000 --- a/node_modules/@types/form-data/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# Installation -> `npm install --save @types/form-data` - -# Summary -This package contains type definitions for form-data (https://github.com/form-data/form-data). - -# Details -Files were exported from https://www.github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/form-data - -Additional Details - * Last updated: Wed, 08 Nov 2017 22:42:24 GMT - * Dependencies: stream, http, node - * Global values: none - -# Credits -These definitions were written by Carlos Ballesteros Velasco , Leon Yu , BendingBender . diff --git a/node_modules/@types/form-data/index.d.ts b/node_modules/@types/form-data/index.d.ts deleted file mode 100644 index 8c20812697fbb..0000000000000 --- a/node_modules/@types/form-data/index.d.ts +++ /dev/null @@ -1,43 +0,0 @@ -// Type definitions for form-data 2.2 -// Project: https://github.com/form-data/form-data -// Definitions by: Carlos Ballesteros Velasco -// Leon Yu -// BendingBender -// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped -// TypeScript Version: 2.3 - -// Imported from: https://github.com/soywiz/typescript-node-definitions/form-data.d.ts - -/// -import * as stream from 'stream'; -import * as http from 'http'; - -export = FormData; - -declare class FormData extends stream.Readable { - append(key: string, value: any, options?: FormData.AppendOptions | string): void; - getHeaders(): FormData.Headers; - submit(params: string | FormData.SubmitOptions, callback?: (error: Error | undefined, response: http.IncomingMessage) => void): http.ClientRequest; - getBoundary(): string; - getLength(callback: (err: Error | undefined, length: number) => void): void; - getLengthSync(): number; - hasKnownLength(): boolean; -} - -declare namespace FormData { - interface Headers { - [key: string]: any; - } - - interface AppendOptions { - header?: string | Headers; - knownLength?: number; - filename?: string; - filepath?: string; - contentType?: string; - } - - interface SubmitOptions extends http.RequestOptions { - protocol?: 'https:' | 'http:'; - } -} diff --git a/node_modules/@types/form-data/package.json b/node_modules/@types/form-data/package.json deleted file mode 100644 index 9d5dee5a802b0..0000000000000 --- a/node_modules/@types/form-data/package.json +++ /dev/null @@ -1,61 +0,0 @@ -{ - "_from": "@types/form-data@*", - "_id": "@types/form-data@2.2.1", - "_inBundle": false, - "_integrity": "sha512-JAMFhOaHIciYVh8fb5/83nmuO/AHwmto+Hq7a9y8FzLDcC1KCU344XDOMEmahnrTFlHjgh4L0WJFczNIX2GxnQ==", - "_location": "/@types/form-data", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "@types/form-data@*", - "name": "@types/form-data", - "escapedName": "@types%2fform-data", - "scope": "@types", - "rawSpec": "*", - "saveSpec": null, - "fetchSpec": "*" - }, - "_requiredBy": [ - "/@types/request" - ], - "_resolved": "https://registry.npmjs.org/@types/form-data/-/form-data-2.2.1.tgz", - "_shasum": "ee2b3b8eaa11c0938289953606b745b738c54b1e", - "_spec": "@types/form-data@*", - "_where": "/Users/zkat/Documents/code/work/npm/node_modules/@types/request", - "bugs": { - "url": "https://github.com/DefinitelyTyped/DefinitelyTyped/issues" - }, - "bundleDependencies": false, - "contributors": [ - { - "name": "Carlos Ballesteros Velasco", - "url": "https://github.com/soywiz" - }, - { - "name": "Leon Yu", - "url": "https://github.com/leonyu" - }, - { - "name": "BendingBender", - "url": "https://github.com/BendingBender" - } - ], - "dependencies": { - "@types/node": "*" - }, - "deprecated": false, - "description": "TypeScript definitions for form-data", - "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped#readme", - "license": "MIT", - "main": "", - "name": "@types/form-data", - "repository": { - "type": "git", - "url": "git+https://github.com/DefinitelyTyped/DefinitelyTyped.git" - }, - "scripts": {}, - "typeScriptVersion": "2.3", - "typesPublisherContentHash": "6b123d0ea13d7814315d38f664835e20d88c9a72981615a76f3814b8d5ed6a5a", - "version": "2.2.1" -} diff --git a/node_modules/@types/node/LICENSE b/node_modules/@types/node/LICENSE deleted file mode 100644 index 21071075c2459..0000000000000 --- a/node_modules/@types/node/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ - MIT License - - Copyright (c) Microsoft Corporation. All rights reserved. - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE diff --git a/node_modules/@types/node/README.md b/node_modules/@types/node/README.md deleted file mode 100644 index 7fa2d094e0e8b..0000000000000 --- a/node_modules/@types/node/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# Installation -> `npm install --save @types/node` - -# Summary -This package contains type definitions for Node.js (http://nodejs.org/). - -# Details -Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node - -Additional Details - * Last updated: Tue, 14 Aug 2018 00:52:13 GMT - * Dependencies: none - * Global values: Buffer, NodeJS, SlowBuffer, Symbol, __dirname, __filename, clearImmediate, clearInterval, clearTimeout, console, exports, global, module, process, require, setImmediate, setInterval, setTimeout - -# Credits -These definitions were written by Microsoft TypeScript , DefinitelyTyped , Parambir Singh , Christian Vaagland Tellnes , Wilco Bakker , Nicolas Voigt , Chigozirim C. , Flarna , Mariusz Wiktorczyk , wwwy3y3 , Deividas Bakanas , Kelvin Jin , Alvis HT Tang , Sebastian Silbermann , Hannes Magnusson , Alberto Schiabel , Klaus Meinhardt , Huw , Nicolas Even , Bruno Scheufler , Mohsen Azimi , Hoàng Văn Khải , Alexander T. , Lishude , Andrew Makarov , Zane Hannan AU , Eugene Y. Q. Shen . diff --git a/node_modules/@types/node/index.d.ts b/node_modules/@types/node/index.d.ts deleted file mode 100644 index 7745372a21ecb..0000000000000 --- a/node_modules/@types/node/index.d.ts +++ /dev/null @@ -1,8101 +0,0 @@ -// Type definitions for Node.js 10.7.x -// Project: http://nodejs.org/ -// Definitions by: Microsoft TypeScript -// DefinitelyTyped -// Parambir Singh -// Christian Vaagland Tellnes -// Wilco Bakker -// Nicolas Voigt -// Chigozirim C. -// Flarna -// Mariusz Wiktorczyk -// wwwy3y3 -// Deividas Bakanas -// Kelvin Jin -// Alvis HT Tang -// Sebastian Silbermann -// Hannes Magnusson -// Alberto Schiabel -// Klaus Meinhardt -// Huw -// Nicolas Even -// Bruno Scheufler -// Mohsen Azimi -// Hoàng Văn Khải -// Alexander T. -// Lishude -// Andrew Makarov -// Zane Hannan AU -// Eugene Y. Q. Shen -// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped - -/** inspector module types */ -/// - -// This needs to be global to avoid TS2403 in case lib.dom.d.ts is present in the same build -interface Console { - Console: NodeJS.ConsoleConstructor; - /** - * A simple assertion test that verifies whether `value` is truthy. - * If it is not, an `AssertionError` is thrown. - * If provided, the error `message` is formatted using `util.format()` and used as the error message. - */ - assert(value: any, message?: string, ...optionalParams: any[]): void; - /** - * When `stdout` is a TTY, calling `console.clear()` will attempt to clear the TTY. - * When `stdout` is not a TTY, this method does nothing. - */ - clear(): void; - /** - * Maintains an internal counter specific to `label` and outputs to `stdout` the number of times `console.count()` has been called with the given `label`. - */ - count(label?: string): void; - /** - * Resets the internal counter specific to `label`. - */ - countReset(label?: string): void; - /** - * The `console.debug()` function is an alias for {@link console.log()}. - */ - debug(message?: any, ...optionalParams: any[]): void; - /** - * Uses {@link util.inspect()} on `obj` and prints the resulting string to `stdout`. - * This function bypasses any custom `inspect()` function defined on `obj`. - */ - dir(obj: any, options?: NodeJS.InspectOptions): void; - /** - * This method calls {@link console.log()} passing it the arguments received. Please note that this method does not produce any XML formatting - */ - dirxml(...data: any[]): void; - /** - * Prints to `stderr` with newline. - */ - error(message?: any, ...optionalParams: any[]): void; - /** - * Increases indentation of subsequent lines by two spaces. - * If one or more `label`s are provided, those are printed first without the additional indentation. - */ - group(...label: any[]): void; - /** - * The `console.groupCollapsed()` function is an alias for {@link console.group()}. - */ - groupCollapsed(): void; - /** - * Decreases indentation of subsequent lines by two spaces. - */ - groupEnd(): void; - /** - * The {@link console.info()} function is an alias for {@link console.log()}. - */ - info(message?: any, ...optionalParams: any[]): void; - /** - * Prints to `stdout` with newline. - */ - log(message?: any, ...optionalParams: any[]): void; - /** - * This method does not display anything unless used in the inspector. - * Prints to `stdout` the array `array` formatted as a table. - */ - table(tabularData: any, properties?: string[]): void; - /** - * Starts a timer that can be used to compute the duration of an operation. Timers are identified by a unique `label`. - */ - time(label?: string): void; - /** - * Stops a timer that was previously started by calling {@link console.time()} and prints the result to `stdout`. - */ - timeEnd(label?: string): void; - /** - * Prints to `stderr` the string 'Trace :', followed by the {@link util.format()} formatted message and stack trace to the current position in the code. - */ - trace(message?: any, ...optionalParams: any[]): void; - /** - * The {@link console.warn()} function is an alias for {@link console.error()}. - */ - warn(message?: any, ...optionalParams: any[]): void; - - // --- Inspector mode only --- - /** - * This method does not display anything unless used in the inspector. - * Starts a JavaScript CPU profile with an optional label. - */ - profile(label?: string): void; - /** - * This method does not display anything unless used in the inspector. - * Stops the current JavaScript CPU profiling session if one has been started and prints the report to the Profiles panel of the inspector. - */ - profileEnd(): void; - /** - * This method does not display anything unless used in the inspector. - * Adds an event with the label `label` to the Timeline panel of the inspector. - */ - timeStamp(label?: string): void; -} - -interface Error { - stack?: string; -} - -// Declare "static" methods in Error -interface ErrorConstructor { - /** Create .stack property on a target object */ - captureStackTrace(targetObject: Object, constructorOpt?: Function): void; - - /** - * Optional override for formatting stack traces - * - * @see https://github.com/v8/v8/wiki/Stack%20Trace%20API#customizing-stack-traces - */ - prepareStackTrace?: (err: Error, stackTraces: NodeJS.CallSite[]) => any; - - stackTraceLimit: number; -} - -// compat for TypeScript 1.8 -// if you use with --target es3 or --target es5 and use below definitions, -// use the lib.es6.d.ts that is bundled with TypeScript 1.8. -interface MapConstructor { } -interface WeakMapConstructor { } -interface SetConstructor { } -interface WeakSetConstructor { } - -// Forward-declare needed types from lib.es2015.d.ts (in case users are using `--lib es5`) -interface Iterable { } -interface Iterator { - next(value?: any): IteratorResult; -} -interface IteratorResult { } -interface AsyncIterableIterator {} -interface SymbolConstructor { - readonly observable: symbol; - readonly iterator: symbol; - readonly asyncIterator: symbol; -} -declare var Symbol: SymbolConstructor; - -// Node.js ESNEXT support -interface String { - /** Removes whitespace from the left end of a string. */ - trimLeft(): string; - /** Removes whitespace from the right end of a string. */ - trimRight(): string; -} - -/************************************************ -* * -* GLOBAL * -* * -************************************************/ -declare var process: NodeJS.Process; -declare var global: NodeJS.Global; -declare var console: Console; - -declare var __filename: string; -declare var __dirname: string; - -declare function setTimeout(callback: (...args: any[]) => void, ms: number, ...args: any[]): NodeJS.Timer; -declare namespace setTimeout { - export function __promisify__(ms: number): Promise; - export function __promisify__(ms: number, value: T): Promise; -} -declare function clearTimeout(timeoutId: NodeJS.Timer): void; -declare function setInterval(callback: (...args: any[]) => void, ms: number, ...args: any[]): NodeJS.Timer; -declare function clearInterval(intervalId: NodeJS.Timer): void; -declare function setImmediate(callback: (...args: any[]) => void, ...args: any[]): any; -declare namespace setImmediate { - export function __promisify__(): Promise; - export function __promisify__(value: T): Promise; -} -declare function clearImmediate(immediateId: any): void; - -// TODO: change to `type NodeRequireFunction = (id: string) => any;` in next mayor version. -interface NodeRequireFunction { - /* tslint:disable-next-line:callable-types */ - (id: string): any; -} - -interface NodeRequire extends NodeRequireFunction { - resolve: RequireResolve; - cache: any; - extensions: NodeExtensions; - main: NodeModule | undefined; -} - -interface RequireResolve { - (id: string, options?: { paths?: string[]; }): string; - paths(request: string): string[] | null; -} - -interface NodeExtensions { - '.js': (m: NodeModule, filename: string) => any; - '.json': (m: NodeModule, filename: string) => any; - '.node': (m: NodeModule, filename: string) => any; - [ext: string]: (m: NodeModule, filename: string) => any; -} - -declare var require: NodeRequire; - -interface NodeModule { - exports: any; - require: NodeRequireFunction; - id: string; - filename: string; - loaded: boolean; - parent: NodeModule | null; - children: NodeModule[]; - paths: string[]; -} - -declare var module: NodeModule; - -// Same as module.exports -declare var exports: any; -declare var SlowBuffer: { - new(str: string, encoding?: string): Buffer; - new(size: number): Buffer; - new(size: Uint8Array): Buffer; - new(array: any[]): Buffer; - prototype: Buffer; - isBuffer(obj: any): boolean; - byteLength(string: string, encoding?: string): number; - concat(list: Buffer[], totalLength?: number): Buffer; -}; - -// Buffer class -type BufferEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; -interface Buffer extends Uint8Array { - write(string: string, offset?: number, length?: number, encoding?: string): number; - toString(encoding?: string, start?: number, end?: number): string; - toJSON(): { type: 'Buffer', data: any[] }; - equals(otherBuffer: Uint8Array): boolean; - compare(otherBuffer: Uint8Array, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; - copy(targetBuffer: Uint8Array, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; - slice(start?: number, end?: number): Buffer; - writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; - writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; - writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; - writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; - readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; - readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; - readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; - readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; - readUInt8(offset: number, noAssert?: boolean): number; - readUInt16LE(offset: number, noAssert?: boolean): number; - readUInt16BE(offset: number, noAssert?: boolean): number; - readUInt32LE(offset: number, noAssert?: boolean): number; - readUInt32BE(offset: number, noAssert?: boolean): number; - readInt8(offset: number, noAssert?: boolean): number; - readInt16LE(offset: number, noAssert?: boolean): number; - readInt16BE(offset: number, noAssert?: boolean): number; - readInt32LE(offset: number, noAssert?: boolean): number; - readInt32BE(offset: number, noAssert?: boolean): number; - readFloatLE(offset: number, noAssert?: boolean): number; - readFloatBE(offset: number, noAssert?: boolean): number; - readDoubleLE(offset: number, noAssert?: boolean): number; - readDoubleBE(offset: number, noAssert?: boolean): number; - swap16(): Buffer; - swap32(): Buffer; - swap64(): Buffer; - writeUInt8(value: number, offset: number, noAssert?: boolean): number; - writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; - writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; - writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; - writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; - writeInt8(value: number, offset: number, noAssert?: boolean): number; - writeInt16LE(value: number, offset: number, noAssert?: boolean): number; - writeInt16BE(value: number, offset: number, noAssert?: boolean): number; - writeInt32LE(value: number, offset: number, noAssert?: boolean): number; - writeInt32BE(value: number, offset: number, noAssert?: boolean): number; - writeFloatLE(value: number, offset: number, noAssert?: boolean): number; - writeFloatBE(value: number, offset: number, noAssert?: boolean): number; - writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; - writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; - fill(value: any, offset?: number, end?: number): this; - indexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: string): number; - lastIndexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: string): number; - entries(): IterableIterator<[number, number]>; - includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; - keys(): IterableIterator; - values(): IterableIterator; -} - -/** - * Raw data is stored in instances of the Buffer class. - * A Buffer is similar to an array of integers but corresponds to a raw memory allocation outside the V8 heap. A Buffer cannot be resized. - * Valid string encodings: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' - */ -declare var Buffer: { - /** - * Allocates a new buffer containing the given {str}. - * - * @param str String to store in buffer. - * @param encoding encoding to use, optional. Default is 'utf8' - * @deprecated since v10.0.0 - Use `Buffer.from(string[, encoding])` instead. - */ - new(str: string, encoding?: string): Buffer; - /** - * Allocates a new buffer of {size} octets. - * - * @param size count of octets to allocate. - * @deprecated since v10.0.0 - Use `Buffer.alloc()` instead (also see `Buffer.allocUnsafe()`). - */ - new(size: number): Buffer; - /** - * Allocates a new buffer containing the given {array} of octets. - * - * @param array The octets to store. - * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. - */ - new(array: Uint8Array): Buffer; - /** - * Produces a Buffer backed by the same allocated memory as - * the given {ArrayBuffer}. - * - * - * @param arrayBuffer The ArrayBuffer with which to share memory. - * @deprecated since v10.0.0 - Use `Buffer.from(arrayBuffer[, byteOffset[, length]])` instead. - */ - new(arrayBuffer: ArrayBuffer): Buffer; - /** - * Allocates a new buffer containing the given {array} of octets. - * - * @param array The octets to store. - * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. - */ - new(array: any[]): Buffer; - /** - * Copies the passed {buffer} data onto a new {Buffer} instance. - * - * @param buffer The buffer to copy. - * @deprecated since v10.0.0 - Use `Buffer.from(buffer)` instead. - */ - new(buffer: Buffer): Buffer; - prototype: Buffer; - /** - * When passed a reference to the .buffer property of a TypedArray instance, - * the newly created Buffer will share the same allocated memory as the TypedArray. - * The optional {byteOffset} and {length} arguments specify a memory range - * within the {arrayBuffer} that will be shared by the Buffer. - * - * @param arrayBuffer The .buffer property of any TypedArray or a new ArrayBuffer() - */ - from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; - // from(arrayBuffer: SharedArrayBuffer, byteOffset?: number, length?: number): Buffer; - /** - * Creates a new Buffer using the passed {data} - * @param data data to create a new Buffer - */ - from(data: any[]): Buffer; - from(data: Uint8Array): Buffer; - /** - * Creates a new Buffer containing the given JavaScript string {str}. - * If provided, the {encoding} parameter identifies the character encoding. - * If not provided, {encoding} defaults to 'utf8'. - */ - from(str: string, encoding?: string): Buffer; - /** - * Creates a new Buffer using the passed {data} - * @param values to create a new Buffer - */ - of(...items: number[]): Buffer; - /** - * Returns true if {obj} is a Buffer - * - * @param obj object to test. - */ - isBuffer(obj: any): obj is Buffer; - /** - * Returns true if {encoding} is a valid encoding argument. - * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' - * - * @param encoding string to test. - */ - isEncoding(encoding: string): boolean | undefined; - /** - * Gives the actual byte length of a string. encoding defaults to 'utf8'. - * This is not the same as String.prototype.length since that returns the number of characters in a string. - * - * @param string string to test. - * @param encoding encoding used to evaluate (defaults to 'utf8') - */ - byteLength(string: string | NodeJS.TypedArray | DataView | ArrayBuffer /*| SharedArrayBuffer */, encoding?: string): number; - /** - * Returns a buffer which is the result of concatenating all the buffers in the list together. - * - * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. - * If the list has exactly one item, then the first item of the list is returned. - * If the list has more than one item, then a new Buffer is created. - * - * @param list An array of Buffer objects to concatenate - * @param totalLength Total length of the buffers when concatenated. - * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. - */ - concat(list: Uint8Array[], totalLength?: number): Buffer; - /** - * The same as buf1.compare(buf2). - */ - compare(buf1: Uint8Array, buf2: Uint8Array): number; - /** - * Allocates a new buffer of {size} octets. - * - * @param size count of octets to allocate. - * @param fill if specified, buffer will be initialized by calling buf.fill(fill). - * If parameter is omitted, buffer will be filled with zeros. - * @param encoding encoding used for call to buf.fill while initalizing - */ - alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; - /** - * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents - * of the newly created Buffer are unknown and may contain sensitive data. - * - * @param size count of octets to allocate - */ - allocUnsafe(size: number): Buffer; - /** - * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents - * of the newly created Buffer are unknown and may contain sensitive data. - * - * @param size count of octets to allocate - */ - allocUnsafeSlow(size: number): Buffer; - /** - * This is the number of bytes used to determine the size of pre-allocated, internal Buffer instances used for pooling. This value may be modified. - */ - poolSize: number; -}; - -/************************************************ -* * -* GLOBAL INTERFACES * -* * -************************************************/ -declare namespace NodeJS { - export interface InspectOptions { - showHidden?: boolean; - depth?: number | null; - colors?: boolean; - customInspect?: boolean; - showProxy?: boolean; - maxArrayLength?: number | null; - breakLength?: number; - compact?: boolean; - } - - export interface ConsoleConstructor { - prototype: Console; - new(stdout: WritableStream, stderr?: WritableStream): Console; - } - - export interface CallSite { - /** - * Value of "this" - */ - getThis(): any; - - /** - * Type of "this" as a string. - * This is the name of the function stored in the constructor field of - * "this", if available. Otherwise the object's [[Class]] internal - * property. - */ - getTypeName(): string | null; - - /** - * Current function - */ - getFunction(): Function | undefined; - - /** - * Name of the current function, typically its name property. - * If a name property is not available an attempt will be made to try - * to infer a name from the function's context. - */ - getFunctionName(): string | null; - - /** - * Name of the property [of "this" or one of its prototypes] that holds - * the current function - */ - getMethodName(): string | null; - - /** - * Name of the script [if this function was defined in a script] - */ - getFileName(): string | null; - - /** - * Current line number [if this function was defined in a script] - */ - getLineNumber(): number | null; - - /** - * Current column number [if this function was defined in a script] - */ - getColumnNumber(): number | null; - - /** - * A call site object representing the location where eval was called - * [if this function was created using a call to eval] - */ - getEvalOrigin(): string | undefined; - - /** - * Is this a toplevel invocation, that is, is "this" the global object? - */ - isToplevel(): boolean; - - /** - * Does this call take place in code defined by a call to eval? - */ - isEval(): boolean; - - /** - * Is this call in native V8 code? - */ - isNative(): boolean; - - /** - * Is this a constructor call? - */ - isConstructor(): boolean; - } - - export interface ErrnoException extends Error { - errno?: number; - code?: string; - path?: string; - syscall?: string; - stack?: string; - } - - export class EventEmitter { - addListener(event: string | symbol, listener: (...args: any[]) => void): this; - on(event: string | symbol, listener: (...args: any[]) => void): this; - once(event: string | symbol, listener: (...args: any[]) => void): this; - removeListener(event: string | symbol, listener: (...args: any[]) => void): this; - off(event: string | symbol, listener: (...args: any[]) => void): this; - removeAllListeners(event?: string | symbol): this; - setMaxListeners(n: number): this; - getMaxListeners(): number; - listeners(event: string | symbol): Function[]; - rawListeners(event: string | symbol): Function[]; - emit(event: string | symbol, ...args: any[]): boolean; - listenerCount(type: string | symbol): number; - // Added in Node 6... - prependListener(event: string | symbol, listener: (...args: any[]) => void): this; - prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; - eventNames(): Array; - } - - export interface ReadableStream extends EventEmitter { - readable: boolean; - read(size?: number): string | Buffer; - setEncoding(encoding: string): this; - pause(): this; - resume(): this; - isPaused(): boolean; - pipe(destination: T, options?: { end?: boolean; }): T; - unpipe(destination?: T): this; - unshift(chunk: string): void; - unshift(chunk: Buffer): void; - wrap(oldStream: ReadableStream): this; - [Symbol.asyncIterator](): AsyncIterableIterator; - } - - export interface WritableStream extends EventEmitter { - writable: boolean; - write(buffer: Buffer | string, cb?: Function): boolean; - write(str: string, encoding?: string, cb?: Function): boolean; - end(cb?: Function): void; - end(buffer: Buffer, cb?: Function): void; - end(str: string, cb?: Function): void; - end(str: string, encoding?: string, cb?: Function): void; - } - - export interface ReadWriteStream extends ReadableStream, WritableStream { } - - export interface Events extends EventEmitter { } - - export interface Domain extends Events { - run(fn: Function): void; - add(emitter: Events): void; - remove(emitter: Events): void; - bind(cb: (err: Error, data: any) => any): any; - intercept(cb: (data: any) => any): any; - - addListener(event: string, listener: (...args: any[]) => void): this; - on(event: string, listener: (...args: any[]) => void): this; - once(event: string, listener: (...args: any[]) => void): this; - removeListener(event: string, listener: (...args: any[]) => void): this; - removeAllListeners(event?: string): this; - } - - export interface MemoryUsage { - rss: number; - heapTotal: number; - heapUsed: number; - external: number; - } - - export interface CpuUsage { - user: number; - system: number; - } - - export interface ProcessVersions { - http_parser: string; - node: string; - v8: string; - ares: string; - uv: string; - zlib: string; - modules: string; - openssl: string; - } - - type Platform = 'aix' - | 'android' - | 'darwin' - | 'freebsd' - | 'linux' - | 'openbsd' - | 'sunos' - | 'win32' - | 'cygwin'; - - type Signals = - "SIGABRT" | "SIGALRM" | "SIGBUS" | "SIGCHLD" | "SIGCONT" | "SIGFPE" | "SIGHUP" | "SIGILL" | "SIGINT" | "SIGIO" | - "SIGIOT" | "SIGKILL" | "SIGPIPE" | "SIGPOLL" | "SIGPROF" | "SIGPWR" | "SIGQUIT" | "SIGSEGV" | "SIGSTKFLT" | - "SIGSTOP" | "SIGSYS" | "SIGTERM" | "SIGTRAP" | "SIGTSTP" | "SIGTTIN" | "SIGTTOU" | "SIGUNUSED" | "SIGURG" | - "SIGUSR1" | "SIGUSR2" | "SIGVTALRM" | "SIGWINCH" | "SIGXCPU" | "SIGXFSZ" | "SIGBREAK" | "SIGLOST" | "SIGINFO"; - - type BeforeExitListener = (code: number) => void; - type DisconnectListener = () => void; - type ExitListener = (code: number) => void; - type RejectionHandledListener = (promise: Promise) => void; - type UncaughtExceptionListener = (error: Error) => void; - type UnhandledRejectionListener = (reason: any, promise: Promise) => void; - type WarningListener = (warning: Error) => void; - type MessageListener = (message: any, sendHandle: any) => void; - type SignalsListener = (signal: Signals) => void; - type NewListenerListener = (type: string | symbol, listener: (...args: any[]) => void) => void; - type RemoveListenerListener = (type: string | symbol, listener: (...args: any[]) => void) => void; - - export interface Socket extends ReadWriteStream { - isTTY?: true; - } - - export interface ProcessEnv { - [key: string]: string | undefined; - } - - export interface WriteStream extends Socket { - readonly writableHighWaterMark: number; - readonly writableLength: number; - columns?: number; - rows?: number; - _write(chunk: any, encoding: string, callback: Function): void; - _destroy(err: Error | null, callback: Function): void; - _final(callback: Function): void; - setDefaultEncoding(encoding: string): this; - cork(): void; - uncork(): void; - destroy(error?: Error): void; - } - export interface ReadStream extends Socket { - readonly readableHighWaterMark: number; - readonly readableLength: number; - isRaw?: boolean; - setRawMode?(mode: boolean): void; - _read(size: number): void; - _destroy(err: Error | null, callback: Function): void; - push(chunk: any, encoding?: string): boolean; - destroy(error?: Error): void; - } - - export interface Process extends EventEmitter { - stdout: WriteStream; - stderr: WriteStream; - stdin: ReadStream; - openStdin(): Socket; - argv: string[]; - argv0: string; - execArgv: string[]; - execPath: string; - abort(): void; - chdir(directory: string): void; - cwd(): string; - debugPort: number; - emitWarning(warning: string | Error, name?: string, ctor?: Function): void; - env: ProcessEnv; - exit(code?: number): never; - exitCode: number; - getgid(): number; - setgid(id: number | string): void; - getuid(): number; - setuid(id: number | string): void; - geteuid(): number; - seteuid(id: number | string): void; - getegid(): number; - setegid(id: number | string): void; - getgroups(): number[]; - setgroups(groups: Array): void; - setUncaughtExceptionCaptureCallback(cb: ((err: Error) => void) | null): void; - hasUncaughtExceptionCaptureCallback(): boolean; - version: string; - versions: ProcessVersions; - config: { - target_defaults: { - cflags: any[]; - default_configuration: string; - defines: string[]; - include_dirs: string[]; - libraries: string[]; - }; - variables: { - clang: number; - host_arch: string; - node_install_npm: boolean; - node_install_waf: boolean; - node_prefix: string; - node_shared_openssl: boolean; - node_shared_v8: boolean; - node_shared_zlib: boolean; - node_use_dtrace: boolean; - node_use_etw: boolean; - node_use_openssl: boolean; - target_arch: string; - v8_no_strict_aliasing: number; - v8_use_snapshot: boolean; - visibility: string; - }; - }; - kill(pid: number, signal?: string | number): void; - pid: number; - ppid: number; - title: string; - arch: string; - platform: Platform; - mainModule?: NodeModule; - memoryUsage(): MemoryUsage; - cpuUsage(previousValue?: CpuUsage): CpuUsage; - nextTick(callback: Function, ...args: any[]): void; - umask(mask?: number): number; - uptime(): number; - hrtime(time?: [number, number]): [number, number]; - domain: Domain; - - // Worker - send?(message: any, sendHandle?: any): void; - disconnect(): void; - connected: boolean; - - /** - * EventEmitter - * 1. beforeExit - * 2. disconnect - * 3. exit - * 4. message - * 5. rejectionHandled - * 6. uncaughtException - * 7. unhandledRejection - * 8. warning - * 9. message - * 10. - * 11. newListener/removeListener inherited from EventEmitter - */ - addListener(event: "beforeExit", listener: BeforeExitListener): this; - addListener(event: "disconnect", listener: DisconnectListener): this; - addListener(event: "exit", listener: ExitListener): this; - addListener(event: "rejectionHandled", listener: RejectionHandledListener): this; - addListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; - addListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; - addListener(event: "warning", listener: WarningListener): this; - addListener(event: "message", listener: MessageListener): this; - addListener(event: Signals, listener: SignalsListener): this; - addListener(event: "newListener", listener: NewListenerListener): this; - addListener(event: "removeListener", listener: RemoveListenerListener): this; - - emit(event: "beforeExit", code: number): boolean; - emit(event: "disconnect"): boolean; - emit(event: "exit", code: number): boolean; - emit(event: "rejectionHandled", promise: Promise): boolean; - emit(event: "uncaughtException", error: Error): boolean; - emit(event: "unhandledRejection", reason: any, promise: Promise): boolean; - emit(event: "warning", warning: Error): boolean; - emit(event: "message", message: any, sendHandle: any): this; - emit(event: Signals): boolean; - emit(event: "newListener", eventName: string | symbol, listener: (...args: any[]) => void): this; - emit(event: "removeListener", eventName: string, listener: (...args: any[]) => void): this; - - on(event: "beforeExit", listener: BeforeExitListener): this; - on(event: "disconnect", listener: DisconnectListener): this; - on(event: "exit", listener: ExitListener): this; - on(event: "rejectionHandled", listener: RejectionHandledListener): this; - on(event: "uncaughtException", listener: UncaughtExceptionListener): this; - on(event: "unhandledRejection", listener: UnhandledRejectionListener): this; - on(event: "warning", listener: WarningListener): this; - on(event: "message", listener: MessageListener): this; - on(event: Signals, listener: SignalsListener): this; - on(event: "newListener", listener: NewListenerListener): this; - on(event: "removeListener", listener: RemoveListenerListener): this; - - once(event: "beforeExit", listener: BeforeExitListener): this; - once(event: "disconnect", listener: DisconnectListener): this; - once(event: "exit", listener: ExitListener): this; - once(event: "rejectionHandled", listener: RejectionHandledListener): this; - once(event: "uncaughtException", listener: UncaughtExceptionListener): this; - once(event: "unhandledRejection", listener: UnhandledRejectionListener): this; - once(event: "warning", listener: WarningListener): this; - once(event: "message", listener: MessageListener): this; - once(event: Signals, listener: SignalsListener): this; - once(event: "newListener", listener: NewListenerListener): this; - once(event: "removeListener", listener: RemoveListenerListener): this; - - prependListener(event: "beforeExit", listener: BeforeExitListener): this; - prependListener(event: "disconnect", listener: DisconnectListener): this; - prependListener(event: "exit", listener: ExitListener): this; - prependListener(event: "rejectionHandled", listener: RejectionHandledListener): this; - prependListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; - prependListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; - prependListener(event: "warning", listener: WarningListener): this; - prependListener(event: "message", listener: MessageListener): this; - prependListener(event: Signals, listener: SignalsListener): this; - prependListener(event: "newListener", listener: NewListenerListener): this; - prependListener(event: "removeListener", listener: RemoveListenerListener): this; - - prependOnceListener(event: "beforeExit", listener: BeforeExitListener): this; - prependOnceListener(event: "disconnect", listener: DisconnectListener): this; - prependOnceListener(event: "exit", listener: ExitListener): this; - prependOnceListener(event: "rejectionHandled", listener: RejectionHandledListener): this; - prependOnceListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; - prependOnceListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; - prependOnceListener(event: "warning", listener: WarningListener): this; - prependOnceListener(event: "message", listener: MessageListener): this; - prependOnceListener(event: Signals, listener: SignalsListener): this; - prependOnceListener(event: "newListener", listener: NewListenerListener): this; - prependOnceListener(event: "removeListener", listener: RemoveListenerListener): this; - - listeners(event: "beforeExit"): BeforeExitListener[]; - listeners(event: "disconnect"): DisconnectListener[]; - listeners(event: "exit"): ExitListener[]; - listeners(event: "rejectionHandled"): RejectionHandledListener[]; - listeners(event: "uncaughtException"): UncaughtExceptionListener[]; - listeners(event: "unhandledRejection"): UnhandledRejectionListener[]; - listeners(event: "warning"): WarningListener[]; - listeners(event: "message"): MessageListener[]; - listeners(event: Signals): SignalsListener[]; - listeners(event: "newListener"): NewListenerListener[]; - listeners(event: "removeListener"): RemoveListenerListener[]; - } - - export interface Global { - Array: typeof Array; - ArrayBuffer: typeof ArrayBuffer; - Boolean: typeof Boolean; - Buffer: typeof Buffer; - DataView: typeof DataView; - Date: typeof Date; - Error: typeof Error; - EvalError: typeof EvalError; - Float32Array: typeof Float32Array; - Float64Array: typeof Float64Array; - Function: typeof Function; - GLOBAL: Global; - Infinity: typeof Infinity; - Int16Array: typeof Int16Array; - Int32Array: typeof Int32Array; - Int8Array: typeof Int8Array; - Intl: typeof Intl; - JSON: typeof JSON; - Map: MapConstructor; - Math: typeof Math; - NaN: typeof NaN; - Number: typeof Number; - Object: typeof Object; - Promise: Function; - RangeError: typeof RangeError; - ReferenceError: typeof ReferenceError; - RegExp: typeof RegExp; - Set: SetConstructor; - String: typeof String; - Symbol: Function; - SyntaxError: typeof SyntaxError; - TypeError: typeof TypeError; - URIError: typeof URIError; - Uint16Array: typeof Uint16Array; - Uint32Array: typeof Uint32Array; - Uint8Array: typeof Uint8Array; - Uint8ClampedArray: Function; - WeakMap: WeakMapConstructor; - WeakSet: WeakSetConstructor; - clearImmediate: (immediateId: any) => void; - clearInterval: (intervalId: NodeJS.Timer) => void; - clearTimeout: (timeoutId: NodeJS.Timer) => void; - console: typeof console; - decodeURI: typeof decodeURI; - decodeURIComponent: typeof decodeURIComponent; - encodeURI: typeof encodeURI; - encodeURIComponent: typeof encodeURIComponent; - escape: (str: string) => string; - eval: typeof eval; - global: Global; - isFinite: typeof isFinite; - isNaN: typeof isNaN; - parseFloat: typeof parseFloat; - parseInt: typeof parseInt; - process: Process; - root: Global; - setImmediate: (callback: (...args: any[]) => void, ...args: any[]) => any; - setInterval: (callback: (...args: any[]) => void, ms: number, ...args: any[]) => NodeJS.Timer; - setTimeout: (callback: (...args: any[]) => void, ms: number, ...args: any[]) => NodeJS.Timer; - undefined: typeof undefined; - unescape: (str: string) => string; - gc: () => void; - v8debug?: any; - } - - export interface Timer { - ref(): void; - unref(): void; - } - - class Module { - static runMain(): void; - static wrap(code: string): string; - static builtinModules: string[]; - - static Module: typeof Module; - - exports: any; - require: NodeRequireFunction; - id: string; - filename: string; - loaded: boolean; - parent: Module | null; - children: Module[]; - paths: string[]; - - constructor(id: string, parent?: Module); - } - - type TypedArray = Uint8Array | Uint8ClampedArray | Uint16Array | Uint32Array | Int8Array | Int16Array | Int32Array | Float32Array | Float64Array; -} - -interface IterableIterator { } - -/************************************************ -* * -* MODULES * -* * -************************************************/ -declare module "buffer" { - export var INSPECT_MAX_BYTES: number; - var BuffType: typeof Buffer; - var SlowBuffType: typeof SlowBuffer; - export { BuffType as Buffer, SlowBuffType as SlowBuffer }; -} - -declare module "querystring" { - export interface StringifyOptions { - encodeURIComponent?: Function; - } - - export interface ParseOptions { - maxKeys?: number; - decodeURIComponent?: Function; - } - - interface ParsedUrlQuery { [key: string]: string | string[] | undefined; } - - export function stringify(obj: T, sep?: string, eq?: string, options?: StringifyOptions): string; - export function parse(str: string, sep?: string, eq?: string, options?: ParseOptions): ParsedUrlQuery; - export function parse(str: string, sep?: string, eq?: string, options?: ParseOptions): T; - export function escape(str: string): string; - export function unescape(str: string): string; -} - -declare module "events" { - class internal extends NodeJS.EventEmitter { } - - namespace internal { - export class EventEmitter extends internal { - /** @deprecated since v4.0.0 */ - static listenerCount(emitter: EventEmitter, event: string | symbol): number; - static defaultMaxListeners: number; - - addListener(event: string | symbol, listener: (...args: any[]) => void): this; - on(event: string | symbol, listener: (...args: any[]) => void): this; - once(event: string | symbol, listener: (...args: any[]) => void): this; - prependListener(event: string | symbol, listener: (...args: any[]) => void): this; - prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; - removeListener(event: string | symbol, listener: (...args: any[]) => void): this; - off(event: string | symbol, listener: (...args: any[]) => void): this; - removeAllListeners(event?: string | symbol): this; - setMaxListeners(n: number): this; - getMaxListeners(): number; - listeners(event: string | symbol): Function[]; - rawListeners(event: string | symbol): Function[]; - emit(event: string | symbol, ...args: any[]): boolean; - eventNames(): Array; - listenerCount(type: string | symbol): number; - } - } - - export = internal; -} - -declare module "http" { - import * as events from "events"; - import * as net from "net"; - import * as stream from "stream"; - import { URL } from "url"; - - // incoming headers will never contain number - export interface IncomingHttpHeaders { - 'accept'?: string; - 'access-control-allow-origin'?: string; - 'access-control-allow-credentials'?: string; - 'access-control-expose-headers'?: string; - 'access-control-max-age'?: string; - 'access-control-allow-methods'?: string; - 'access-control-allow-headers'?: string; - 'accept-patch'?: string; - 'accept-ranges'?: string; - 'authorization'?: string; - 'age'?: string; - 'allow'?: string; - 'alt-svc'?: string; - 'cache-control'?: string; - 'connection'?: string; - 'content-disposition'?: string; - 'content-encoding'?: string; - 'content-language'?: string; - 'content-length'?: string; - 'content-location'?: string; - 'content-range'?: string; - 'content-type'?: string; - 'date'?: string; - 'expires'?: string; - 'host'?: string; - 'last-modified'?: string; - 'location'?: string; - 'pragma'?: string; - 'proxy-authenticate'?: string; - 'public-key-pins'?: string; - 'retry-after'?: string; - 'set-cookie'?: string[]; - 'strict-transport-security'?: string; - 'trailer'?: string; - 'transfer-encoding'?: string; - 'tk'?: string; - 'upgrade'?: string; - 'vary'?: string; - 'via'?: string; - 'warning'?: string; - 'www-authenticate'?: string; - [header: string]: string | string[] | undefined; - } - - // outgoing headers allows numbers (as they are converted internally to strings) - export interface OutgoingHttpHeaders { - [header: string]: number | string | string[] | undefined; - } - - export interface ClientRequestArgs { - protocol?: string; - host?: string; - hostname?: string; - family?: number; - port?: number | string; - defaultPort?: number | string; - localAddress?: string; - socketPath?: string; - method?: string; - path?: string; - headers?: OutgoingHttpHeaders; - auth?: string; - agent?: Agent | boolean; - _defaultAgent?: Agent; - timeout?: number; - // https://github.com/nodejs/node/blob/master/lib/_http_client.js#L278 - createConnection?: (options: ClientRequestArgs, oncreate: (err: Error, socket: net.Socket) => void) => net.Socket; - } - - export class Server extends net.Server { - constructor(requestListener?: (req: IncomingMessage, res: ServerResponse) => void); - - setTimeout(msecs?: number, callback?: () => void): this; - setTimeout(callback: () => void): this; - maxHeadersCount: number; - timeout: number; - keepAliveTimeout: number; - } - /** - * @deprecated Use IncomingMessage - */ - export class ServerRequest extends IncomingMessage { - connection: net.Socket; - } - - // https://github.com/nodejs/node/blob/master/lib/_http_outgoing.js - export class OutgoingMessage extends stream.Writable { - upgrading: boolean; - chunkedEncoding: boolean; - shouldKeepAlive: boolean; - useChunkedEncodingByDefault: boolean; - sendDate: boolean; - finished: boolean; - headersSent: boolean; - connection: net.Socket; - - constructor(); - - setTimeout(msecs: number, callback?: () => void): this; - setHeader(name: string, value: number | string | string[]): void; - getHeader(name: string): number | string | string[] | undefined; - getHeaders(): OutgoingHttpHeaders; - getHeaderNames(): string[]; - hasHeader(name: string): boolean; - removeHeader(name: string): void; - addTrailers(headers: OutgoingHttpHeaders | Array<[string, string]>): void; - flushHeaders(): void; - } - - // https://github.com/nodejs/node/blob/master/lib/_http_server.js#L108-L256 - export class ServerResponse extends OutgoingMessage { - statusCode: number; - statusMessage: string; - - constructor(req: IncomingMessage); - - assignSocket(socket: net.Socket): void; - detachSocket(socket: net.Socket): void; - // https://github.com/nodejs/node/blob/master/test/parallel/test-http-write-callbacks.js#L53 - // no args in writeContinue callback - writeContinue(callback?: () => void): void; - writeHead(statusCode: number, reasonPhrase?: string, headers?: OutgoingHttpHeaders): void; - writeHead(statusCode: number, headers?: OutgoingHttpHeaders): void; - } - - // https://github.com/nodejs/node/blob/master/lib/_http_client.js#L77 - export class ClientRequest extends OutgoingMessage { - connection: net.Socket; - socket: net.Socket; - aborted: number; - - constructor(url: string | URL | ClientRequestArgs, cb?: (res: IncomingMessage) => void); - - abort(): void; - onSocket(socket: net.Socket): void; - setTimeout(timeout: number, callback?: () => void): this; - setNoDelay(noDelay?: boolean): void; - setSocketKeepAlive(enable?: boolean, initialDelay?: number): void; - } - - export class IncomingMessage extends stream.Readable { - constructor(socket: net.Socket); - - httpVersion: string; - httpVersionMajor: number; - httpVersionMinor: number; - connection: net.Socket; - headers: IncomingHttpHeaders; - rawHeaders: string[]; - trailers: { [key: string]: string | undefined }; - rawTrailers: string[]; - setTimeout(msecs: number, callback: () => void): this; - /** - * Only valid for request obtained from http.Server. - */ - method?: string; - /** - * Only valid for request obtained from http.Server. - */ - url?: string; - /** - * Only valid for response obtained from http.ClientRequest. - */ - statusCode?: number; - /** - * Only valid for response obtained from http.ClientRequest. - */ - statusMessage?: string; - socket: net.Socket; - destroy(error?: Error): void; - } - - /** - * @deprecated Use IncomingMessage - */ - export class ClientResponse extends IncomingMessage { } - - export interface AgentOptions { - /** - * Keep sockets around in a pool to be used by other requests in the future. Default = false - */ - keepAlive?: boolean; - /** - * When using HTTP KeepAlive, how often to send TCP KeepAlive packets over sockets being kept alive. Default = 1000. - * Only relevant if keepAlive is set to true. - */ - keepAliveMsecs?: number; - /** - * Maximum number of sockets to allow per host. Default for Node 0.10 is 5, default for Node 0.12 is Infinity - */ - maxSockets?: number; - /** - * Maximum number of sockets to leave open in a free state. Only relevant if keepAlive is set to true. Default = 256. - */ - maxFreeSockets?: number; - /** - * Socket timeout in milliseconds. This will set the timeout after the socket is connected. - */ - timeout?: number; - } - - export class Agent { - maxFreeSockets: number; - maxSockets: number; - sockets: any; - requests: any; - - constructor(opts?: AgentOptions); - - /** - * Destroy any sockets that are currently in use by the agent. - * It is usually not necessary to do this. However, if you are using an agent with KeepAlive enabled, - * then it is best to explicitly shut down the agent when you know that it will no longer be used. Otherwise, - * sockets may hang open for quite a long time before the server terminates them. - */ - destroy(): void; - } - - export var METHODS: string[]; - - export var STATUS_CODES: { - [errorCode: number]: string | undefined; - [errorCode: string]: string | undefined; - }; - - export function createServer(requestListener?: (request: IncomingMessage, response: ServerResponse) => void): Server; - export function createClient(port?: number, host?: string): any; - - // although RequestOptions are passed as ClientRequestArgs to ClientRequest directly, - // create interface RequestOptions would make the naming more clear to developers - export interface RequestOptions extends ClientRequestArgs { } - export function request(options: RequestOptions | string | URL, callback?: (res: IncomingMessage) => void): ClientRequest; - export function get(options: RequestOptions | string | URL, callback?: (res: IncomingMessage) => void): ClientRequest; - export var globalAgent: Agent; -} - -declare module "cluster" { - import * as child from "child_process"; - import * as events from "events"; - import * as net from "net"; - - // interfaces - export interface ClusterSettings { - execArgv?: string[]; // default: process.execArgv - exec?: string; - args?: string[]; - silent?: boolean; - stdio?: any[]; - uid?: number; - gid?: number; - inspectPort?: number | (() => number); - } - - export interface Address { - address: string; - port: number; - addressType: number | "udp4" | "udp6"; // 4, 6, -1, "udp4", "udp6" - } - - export class Worker extends events.EventEmitter { - id: number; - process: child.ChildProcess; - suicide: boolean; - send(message: any, sendHandle?: any, callback?: (error: Error) => void): boolean; - kill(signal?: string): void; - destroy(signal?: string): void; - disconnect(): void; - isConnected(): boolean; - isDead(): boolean; - exitedAfterDisconnect: boolean; - - /** - * events.EventEmitter - * 1. disconnect - * 2. error - * 3. exit - * 4. listening - * 5. message - * 6. online - */ - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "disconnect", listener: () => void): this; - addListener(event: "error", listener: (error: Error) => void): this; - addListener(event: "exit", listener: (code: number, signal: string) => void): this; - addListener(event: "listening", listener: (address: Address) => void): this; - addListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. - addListener(event: "online", listener: () => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "disconnect"): boolean; - emit(event: "error", error: Error): boolean; - emit(event: "exit", code: number, signal: string): boolean; - emit(event: "listening", address: Address): boolean; - emit(event: "message", message: any, handle: net.Socket | net.Server): boolean; - emit(event: "online"): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "disconnect", listener: () => void): this; - on(event: "error", listener: (error: Error) => void): this; - on(event: "exit", listener: (code: number, signal: string) => void): this; - on(event: "listening", listener: (address: Address) => void): this; - on(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. - on(event: "online", listener: () => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "disconnect", listener: () => void): this; - once(event: "error", listener: (error: Error) => void): this; - once(event: "exit", listener: (code: number, signal: string) => void): this; - once(event: "listening", listener: (address: Address) => void): this; - once(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. - once(event: "online", listener: () => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "disconnect", listener: () => void): this; - prependListener(event: "error", listener: (error: Error) => void): this; - prependListener(event: "exit", listener: (code: number, signal: string) => void): this; - prependListener(event: "listening", listener: (address: Address) => void): this; - prependListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. - prependListener(event: "online", listener: () => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "disconnect", listener: () => void): this; - prependOnceListener(event: "error", listener: (error: Error) => void): this; - prependOnceListener(event: "exit", listener: (code: number, signal: string) => void): this; - prependOnceListener(event: "listening", listener: (address: Address) => void): this; - prependOnceListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. - prependOnceListener(event: "online", listener: () => void): this; - } - - export interface Cluster extends events.EventEmitter { - Worker: Worker; - disconnect(callback?: Function): void; - fork(env?: any): Worker; - isMaster: boolean; - isWorker: boolean; - // TODO: cluster.schedulingPolicy - settings: ClusterSettings; - setupMaster(settings?: ClusterSettings): void; - worker?: Worker; - workers?: { - [index: string]: Worker | undefined - }; - - /** - * events.EventEmitter - * 1. disconnect - * 2. exit - * 3. fork - * 4. listening - * 5. message - * 6. online - * 7. setup - */ - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "disconnect", listener: (worker: Worker) => void): this; - addListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; - addListener(event: "fork", listener: (worker: Worker) => void): this; - addListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; - addListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. - addListener(event: "online", listener: (worker: Worker) => void): this; - addListener(event: "setup", listener: (settings: any) => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "disconnect", worker: Worker): boolean; - emit(event: "exit", worker: Worker, code: number, signal: string): boolean; - emit(event: "fork", worker: Worker): boolean; - emit(event: "listening", worker: Worker, address: Address): boolean; - emit(event: "message", worker: Worker, message: any, handle: net.Socket | net.Server): boolean; - emit(event: "online", worker: Worker): boolean; - emit(event: "setup", settings: any): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "disconnect", listener: (worker: Worker) => void): this; - on(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; - on(event: "fork", listener: (worker: Worker) => void): this; - on(event: "listening", listener: (worker: Worker, address: Address) => void): this; - on(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. - on(event: "online", listener: (worker: Worker) => void): this; - on(event: "setup", listener: (settings: any) => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "disconnect", listener: (worker: Worker) => void): this; - once(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; - once(event: "fork", listener: (worker: Worker) => void): this; - once(event: "listening", listener: (worker: Worker, address: Address) => void): this; - once(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. - once(event: "online", listener: (worker: Worker) => void): this; - once(event: "setup", listener: (settings: any) => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "disconnect", listener: (worker: Worker) => void): this; - prependListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; - prependListener(event: "fork", listener: (worker: Worker) => void): this; - prependListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; - prependListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. - prependListener(event: "online", listener: (worker: Worker) => void): this; - prependListener(event: "setup", listener: (settings: any) => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "disconnect", listener: (worker: Worker) => void): this; - prependOnceListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; - prependOnceListener(event: "fork", listener: (worker: Worker) => void): this; - prependOnceListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; - prependOnceListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. - prependOnceListener(event: "online", listener: (worker: Worker) => void): this; - prependOnceListener(event: "setup", listener: (settings: any) => void): this; - } - - export function disconnect(callback?: Function): void; - export function fork(env?: any): Worker; - export var isMaster: boolean; - export var isWorker: boolean; - // TODO: cluster.schedulingPolicy - export var settings: ClusterSettings; - export function setupMaster(settings?: ClusterSettings): void; - export var worker: Worker; - export var workers: { - [index: string]: Worker | undefined - }; - - /** - * events.EventEmitter - * 1. disconnect - * 2. exit - * 3. fork - * 4. listening - * 5. message - * 6. online - * 7. setup - */ - export function addListener(event: string, listener: (...args: any[]) => void): Cluster; - export function addListener(event: "disconnect", listener: (worker: Worker) => void): Cluster; - export function addListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster; - export function addListener(event: "fork", listener: (worker: Worker) => void): Cluster; - export function addListener(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster; - export function addListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster; // the handle is a net.Socket or net.Server object, or undefined. - export function addListener(event: "online", listener: (worker: Worker) => void): Cluster; - export function addListener(event: "setup", listener: (settings: any) => void): Cluster; - - export function emit(event: string | symbol, ...args: any[]): boolean; - export function emit(event: "disconnect", worker: Worker): boolean; - export function emit(event: "exit", worker: Worker, code: number, signal: string): boolean; - export function emit(event: "fork", worker: Worker): boolean; - export function emit(event: "listening", worker: Worker, address: Address): boolean; - export function emit(event: "message", worker: Worker, message: any, handle: net.Socket | net.Server): boolean; - export function emit(event: "online", worker: Worker): boolean; - export function emit(event: "setup", settings: any): boolean; - - export function on(event: string, listener: (...args: any[]) => void): Cluster; - export function on(event: "disconnect", listener: (worker: Worker) => void): Cluster; - export function on(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster; - export function on(event: "fork", listener: (worker: Worker) => void): Cluster; - export function on(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster; - export function on(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster; // the handle is a net.Socket or net.Server object, or undefined. - export function on(event: "online", listener: (worker: Worker) => void): Cluster; - export function on(event: "setup", listener: (settings: any) => void): Cluster; - - export function once(event: string, listener: (...args: any[]) => void): Cluster; - export function once(event: "disconnect", listener: (worker: Worker) => void): Cluster; - export function once(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster; - export function once(event: "fork", listener: (worker: Worker) => void): Cluster; - export function once(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster; - export function once(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster; // the handle is a net.Socket or net.Server object, or undefined. - export function once(event: "online", listener: (worker: Worker) => void): Cluster; - export function once(event: "setup", listener: (settings: any) => void): Cluster; - - export function removeListener(event: string, listener: (...args: any[]) => void): Cluster; - export function removeAllListeners(event?: string): Cluster; - export function setMaxListeners(n: number): Cluster; - export function getMaxListeners(): number; - export function listeners(event: string): Function[]; - export function listenerCount(type: string): number; - - export function prependListener(event: string, listener: (...args: any[]) => void): Cluster; - export function prependListener(event: "disconnect", listener: (worker: Worker) => void): Cluster; - export function prependListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster; - export function prependListener(event: "fork", listener: (worker: Worker) => void): Cluster; - export function prependListener(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster; - export function prependListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster; // the handle is a net.Socket or net.Server object, or undefined. - export function prependListener(event: "online", listener: (worker: Worker) => void): Cluster; - export function prependListener(event: "setup", listener: (settings: any) => void): Cluster; - - export function prependOnceListener(event: string, listener: (...args: any[]) => void): Cluster; - export function prependOnceListener(event: "disconnect", listener: (worker: Worker) => void): Cluster; - export function prependOnceListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster; - export function prependOnceListener(event: "fork", listener: (worker: Worker) => void): Cluster; - export function prependOnceListener(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster; - export function prependOnceListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster; // the handle is a net.Socket or net.Server object, or undefined. - export function prependOnceListener(event: "online", listener: (worker: Worker) => void): Cluster; - export function prependOnceListener(event: "setup", listener: (settings: any) => void): Cluster; - - export function eventNames(): string[]; -} - -declare module "zlib" { - import * as stream from "stream"; - - export interface ZlibOptions { - flush?: number; // default: zlib.constants.Z_NO_FLUSH - finishFlush?: number; // default: zlib.constants.Z_FINISH - chunkSize?: number; // default: 16*1024 - windowBits?: number; - level?: number; // compression only - memLevel?: number; // compression only - strategy?: number; // compression only - dictionary?: Buffer | NodeJS.TypedArray | DataView | ArrayBuffer; // deflate/inflate only, empty dictionary by default - } - - export interface Zlib { - readonly bytesRead: number; - close(callback?: () => void): void; - flush(kind?: number | (() => void), callback?: () => void): void; - } - - export interface ZlibParams { - params(level: number, strategy: number, callback: () => void): void; - } - - export interface ZlibReset { - reset(): void; - } - - export interface Gzip extends stream.Transform, Zlib { } - export interface Gunzip extends stream.Transform, Zlib { } - export interface Deflate extends stream.Transform, Zlib, ZlibReset, ZlibParams { } - export interface Inflate extends stream.Transform, Zlib, ZlibReset { } - export interface DeflateRaw extends stream.Transform, Zlib, ZlibReset, ZlibParams { } - export interface InflateRaw extends stream.Transform, Zlib, ZlibReset { } - export interface Unzip extends stream.Transform, Zlib { } - - export function createGzip(options?: ZlibOptions): Gzip; - export function createGunzip(options?: ZlibOptions): Gunzip; - export function createDeflate(options?: ZlibOptions): Deflate; - export function createInflate(options?: ZlibOptions): Inflate; - export function createDeflateRaw(options?: ZlibOptions): DeflateRaw; - export function createInflateRaw(options?: ZlibOptions): InflateRaw; - export function createUnzip(options?: ZlibOptions): Unzip; - - type InputType = string | Buffer | DataView | ArrayBuffer | NodeJS.TypedArray; - export function deflate(buf: InputType, callback: (error: Error | null, result: Buffer) => void): void; - export function deflate(buf: InputType, options: ZlibOptions, callback: (error: Error | null, result: Buffer) => void): void; - export function deflateSync(buf: InputType, options?: ZlibOptions): Buffer; - export function deflateRaw(buf: InputType, callback: (error: Error | null, result: Buffer) => void): void; - export function deflateRaw(buf: InputType, options: ZlibOptions, callback: (error: Error | null, result: Buffer) => void): void; - export function deflateRawSync(buf: InputType, options?: ZlibOptions): Buffer; - export function gzip(buf: InputType, callback: (error: Error | null, result: Buffer) => void): void; - export function gzip(buf: InputType, options: ZlibOptions, callback: (error: Error | null, result: Buffer) => void): void; - export function gzipSync(buf: InputType, options?: ZlibOptions): Buffer; - export function gunzip(buf: InputType, callback: (error: Error | null, result: Buffer) => void): void; - export function gunzip(buf: InputType, options: ZlibOptions, callback: (error: Error | null, result: Buffer) => void): void; - export function gunzipSync(buf: InputType, options?: ZlibOptions): Buffer; - export function inflate(buf: InputType, callback: (error: Error | null, result: Buffer) => void): void; - export function inflate(buf: InputType, options: ZlibOptions, callback: (error: Error | null, result: Buffer) => void): void; - export function inflateSync(buf: InputType, options?: ZlibOptions): Buffer; - export function inflateRaw(buf: InputType, callback: (error: Error | null, result: Buffer) => void): void; - export function inflateRaw(buf: InputType, options: ZlibOptions, callback: (error: Error | null, result: Buffer) => void): void; - export function inflateRawSync(buf: InputType, options?: ZlibOptions): Buffer; - export function unzip(buf: InputType, callback: (error: Error | null, result: Buffer) => void): void; - export function unzip(buf: InputType, options: ZlibOptions, callback: (error: Error | null, result: Buffer) => void): void; - export function unzipSync(buf: InputType, options?: ZlibOptions): Buffer; - - export namespace constants { - // Allowed flush values. - - export const Z_NO_FLUSH: number; - export const Z_PARTIAL_FLUSH: number; - export const Z_SYNC_FLUSH: number; - export const Z_FULL_FLUSH: number; - export const Z_FINISH: number; - export const Z_BLOCK: number; - export const Z_TREES: number; - - // Return codes for the compression/decompression functions. Negative values are errors, positive values are used for special but normal events. - - export const Z_OK: number; - export const Z_STREAM_END: number; - export const Z_NEED_DICT: number; - export const Z_ERRNO: number; - export const Z_STREAM_ERROR: number; - export const Z_DATA_ERROR: number; - export const Z_MEM_ERROR: number; - export const Z_BUF_ERROR: number; - export const Z_VERSION_ERROR: number; - - // Compression levels. - - export const Z_NO_COMPRESSION: number; - export const Z_BEST_SPEED: number; - export const Z_BEST_COMPRESSION: number; - export const Z_DEFAULT_COMPRESSION: number; - - // Compression strategy. - - export const Z_FILTERED: number; - export const Z_HUFFMAN_ONLY: number; - export const Z_RLE: number; - export const Z_FIXED: number; - export const Z_DEFAULT_STRATEGY: number; - } - - // Constants - export var Z_NO_FLUSH: number; - export var Z_PARTIAL_FLUSH: number; - export var Z_SYNC_FLUSH: number; - export var Z_FULL_FLUSH: number; - export var Z_FINISH: number; - export var Z_BLOCK: number; - export var Z_TREES: number; - export var Z_OK: number; - export var Z_STREAM_END: number; - export var Z_NEED_DICT: number; - export var Z_ERRNO: number; - export var Z_STREAM_ERROR: number; - export var Z_DATA_ERROR: number; - export var Z_MEM_ERROR: number; - export var Z_BUF_ERROR: number; - export var Z_VERSION_ERROR: number; - export var Z_NO_COMPRESSION: number; - export var Z_BEST_SPEED: number; - export var Z_BEST_COMPRESSION: number; - export var Z_DEFAULT_COMPRESSION: number; - export var Z_FILTERED: number; - export var Z_HUFFMAN_ONLY: number; - export var Z_RLE: number; - export var Z_FIXED: number; - export var Z_DEFAULT_STRATEGY: number; - export var Z_BINARY: number; - export var Z_TEXT: number; - export var Z_ASCII: number; - export var Z_UNKNOWN: number; - export var Z_DEFLATED: number; -} - -declare module "os" { - export interface CpuInfo { - model: string; - speed: number; - times: { - user: number; - nice: number; - sys: number; - idle: number; - irq: number; - }; - } - - export interface NetworkInterfaceBase { - address: string; - netmask: string; - mac: string; - internal: boolean; - } - - export interface NetworkInterfaceInfoIPv4 extends NetworkInterfaceBase { - family: "IPv4"; - } - - export interface NetworkInterfaceInfoIPv6 extends NetworkInterfaceBase { - family: "IPv6"; - scopeid: number; - } - - export type NetworkInterfaceInfo = NetworkInterfaceInfoIPv4 | NetworkInterfaceInfoIPv6; - - export function hostname(): string; - export function loadavg(): number[]; - export function uptime(): number; - export function freemem(): number; - export function totalmem(): number; - export function cpus(): CpuInfo[]; - export function type(): string; - export function release(): string; - export function networkInterfaces(): { [index: string]: NetworkInterfaceInfo[] }; - export function homedir(): string; - export function userInfo(options?: { encoding: string }): { username: string, uid: number, gid: number, shell: any, homedir: string }; - export var constants: { - UV_UDP_REUSEADDR: number, - signals: { - SIGHUP: number; - SIGINT: number; - SIGQUIT: number; - SIGILL: number; - SIGTRAP: number; - SIGABRT: number; - SIGIOT: number; - SIGBUS: number; - SIGFPE: number; - SIGKILL: number; - SIGUSR1: number; - SIGSEGV: number; - SIGUSR2: number; - SIGPIPE: number; - SIGALRM: number; - SIGTERM: number; - SIGCHLD: number; - SIGSTKFLT: number; - SIGCONT: number; - SIGSTOP: number; - SIGTSTP: number; - SIGTTIN: number; - SIGTTOU: number; - SIGURG: number; - SIGXCPU: number; - SIGXFSZ: number; - SIGVTALRM: number; - SIGPROF: number; - SIGWINCH: number; - SIGIO: number; - SIGPOLL: number; - SIGPWR: number; - SIGSYS: number; - SIGUNUSED: number; - }, - errno: { - E2BIG: number; - EACCES: number; - EADDRINUSE: number; - EADDRNOTAVAIL: number; - EAFNOSUPPORT: number; - EAGAIN: number; - EALREADY: number; - EBADF: number; - EBADMSG: number; - EBUSY: number; - ECANCELED: number; - ECHILD: number; - ECONNABORTED: number; - ECONNREFUSED: number; - ECONNRESET: number; - EDEADLK: number; - EDESTADDRREQ: number; - EDOM: number; - EDQUOT: number; - EEXIST: number; - EFAULT: number; - EFBIG: number; - EHOSTUNREACH: number; - EIDRM: number; - EILSEQ: number; - EINPROGRESS: number; - EINTR: number; - EINVAL: number; - EIO: number; - EISCONN: number; - EISDIR: number; - ELOOP: number; - EMFILE: number; - EMLINK: number; - EMSGSIZE: number; - EMULTIHOP: number; - ENAMETOOLONG: number; - ENETDOWN: number; - ENETRESET: number; - ENETUNREACH: number; - ENFILE: number; - ENOBUFS: number; - ENODATA: number; - ENODEV: number; - ENOENT: number; - ENOEXEC: number; - ENOLCK: number; - ENOLINK: number; - ENOMEM: number; - ENOMSG: number; - ENOPROTOOPT: number; - ENOSPC: number; - ENOSR: number; - ENOSTR: number; - ENOSYS: number; - ENOTCONN: number; - ENOTDIR: number; - ENOTEMPTY: number; - ENOTSOCK: number; - ENOTSUP: number; - ENOTTY: number; - ENXIO: number; - EOPNOTSUPP: number; - EOVERFLOW: number; - EPERM: number; - EPIPE: number; - EPROTO: number; - EPROTONOSUPPORT: number; - EPROTOTYPE: number; - ERANGE: number; - EROFS: number; - ESPIPE: number; - ESRCH: number; - ESTALE: number; - ETIME: number; - ETIMEDOUT: number; - ETXTBSY: number; - EWOULDBLOCK: number; - EXDEV: number; - }, - }; - export function arch(): string; - export function platform(): NodeJS.Platform; - export function tmpdir(): string; - export const EOL: string; - export function endianness(): "BE" | "LE"; -} - -declare module "https" { - import * as tls from "tls"; - import * as events from "events"; - import * as http from "http"; - import { URL } from "url"; - - export type ServerOptions = tls.SecureContextOptions & tls.TlsOptions; - - export type RequestOptions = http.RequestOptions & tls.SecureContextOptions & { - rejectUnauthorized?: boolean; // Defaults to true - servername?: string; // SNI TLS Extension - }; - - export interface AgentOptions extends http.AgentOptions, tls.ConnectionOptions { - rejectUnauthorized?: boolean; - maxCachedSessions?: number; - } - - export class Agent extends http.Agent { - constructor(options?: AgentOptions); - options: AgentOptions; - } - - export class Server extends tls.Server { - setTimeout(callback: () => void): this; - setTimeout(msecs?: number, callback?: () => void): this; - timeout: number; - keepAliveTimeout: number; - } - - export function createServer(options: ServerOptions, requestListener?: (req: http.IncomingMessage, res: http.ServerResponse) => void): Server; - export function request(options: RequestOptions | string | URL, callback?: (res: http.IncomingMessage) => void): http.ClientRequest; - export function get(options: RequestOptions | string | URL, callback?: (res: http.IncomingMessage) => void): http.ClientRequest; - export var globalAgent: Agent; -} - -declare module "punycode" { - export function decode(string: string): string; - export function encode(string: string): string; - export function toUnicode(domain: string): string; - export function toASCII(domain: string): string; - export var ucs2: ucs2; - interface ucs2 { - decode(string: string): number[]; - encode(codePoints: number[]): string; - } - export var version: any; -} - -declare module "repl" { - import * as stream from "stream"; - import * as readline from "readline"; - - export interface ReplOptions { - prompt?: string; - input?: NodeJS.ReadableStream; - output?: NodeJS.WritableStream; - terminal?: boolean; - eval?: Function; - useColors?: boolean; - useGlobal?: boolean; - ignoreUndefined?: boolean; - writer?: Function; - completer?: Function; - replMode?: any; - breakEvalOnSigint?: any; - } - - export interface REPLServer extends readline.ReadLine { - context: any; - inputStream: NodeJS.ReadableStream; - outputStream: NodeJS.WritableStream; - - defineCommand(keyword: string, cmd: Function | { help: string, action: Function }): void; - displayPrompt(preserveCursor?: boolean): void; - - /** - * events.EventEmitter - * 1. exit - * 2. reset - */ - - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "exit", listener: () => void): this; - addListener(event: "reset", listener: (...args: any[]) => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "exit"): boolean; - emit(event: "reset", context: any): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "exit", listener: () => void): this; - on(event: "reset", listener: (...args: any[]) => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "exit", listener: () => void): this; - once(event: "reset", listener: (...args: any[]) => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "exit", listener: () => void): this; - prependListener(event: "reset", listener: (...args: any[]) => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "exit", listener: () => void): this; - prependOnceListener(event: "reset", listener: (...args: any[]) => void): this; - } - - export function start(options?: string | ReplOptions): REPLServer; - - export class Recoverable extends SyntaxError { - err: Error; - - constructor(err: Error); - } -} - -declare module "readline" { - import * as events from "events"; - import * as stream from "stream"; - - export interface Key { - sequence?: string; - name?: string; - ctrl?: boolean; - meta?: boolean; - shift?: boolean; - } - - export interface ReadLine extends events.EventEmitter { - setPrompt(prompt: string): void; - prompt(preserveCursor?: boolean): void; - question(query: string, callback: (answer: string) => void): void; - pause(): ReadLine; - resume(): ReadLine; - close(): void; - write(data: string | Buffer, key?: Key): void; - - /** - * events.EventEmitter - * 1. close - * 2. line - * 3. pause - * 4. resume - * 5. SIGCONT - * 6. SIGINT - * 7. SIGTSTP - */ - - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "close", listener: () => void): this; - addListener(event: "line", listener: (input: any) => void): this; - addListener(event: "pause", listener: () => void): this; - addListener(event: "resume", listener: () => void): this; - addListener(event: "SIGCONT", listener: () => void): this; - addListener(event: "SIGINT", listener: () => void): this; - addListener(event: "SIGTSTP", listener: () => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "close"): boolean; - emit(event: "line", input: any): boolean; - emit(event: "pause"): boolean; - emit(event: "resume"): boolean; - emit(event: "SIGCONT"): boolean; - emit(event: "SIGINT"): boolean; - emit(event: "SIGTSTP"): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "close", listener: () => void): this; - on(event: "line", listener: (input: any) => void): this; - on(event: "pause", listener: () => void): this; - on(event: "resume", listener: () => void): this; - on(event: "SIGCONT", listener: () => void): this; - on(event: "SIGINT", listener: () => void): this; - on(event: "SIGTSTP", listener: () => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "close", listener: () => void): this; - once(event: "line", listener: (input: any) => void): this; - once(event: "pause", listener: () => void): this; - once(event: "resume", listener: () => void): this; - once(event: "SIGCONT", listener: () => void): this; - once(event: "SIGINT", listener: () => void): this; - once(event: "SIGTSTP", listener: () => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "close", listener: () => void): this; - prependListener(event: "line", listener: (input: any) => void): this; - prependListener(event: "pause", listener: () => void): this; - prependListener(event: "resume", listener: () => void): this; - prependListener(event: "SIGCONT", listener: () => void): this; - prependListener(event: "SIGINT", listener: () => void): this; - prependListener(event: "SIGTSTP", listener: () => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "close", listener: () => void): this; - prependOnceListener(event: "line", listener: (input: any) => void): this; - prependOnceListener(event: "pause", listener: () => void): this; - prependOnceListener(event: "resume", listener: () => void): this; - prependOnceListener(event: "SIGCONT", listener: () => void): this; - prependOnceListener(event: "SIGINT", listener: () => void): this; - prependOnceListener(event: "SIGTSTP", listener: () => void): this; - } - - type Completer = (line: string) => CompleterResult; - type AsyncCompleter = (line: string, callback: (err: any, result: CompleterResult) => void) => any; - - export type CompleterResult = [string[], string]; - - export interface ReadLineOptions { - input: NodeJS.ReadableStream; - output?: NodeJS.WritableStream; - completer?: Completer | AsyncCompleter; - terminal?: boolean; - historySize?: number; - prompt?: string; - crlfDelay?: number; - removeHistoryDuplicates?: boolean; - } - - export function createInterface(input: NodeJS.ReadableStream, output?: NodeJS.WritableStream, completer?: Completer | AsyncCompleter, terminal?: boolean): ReadLine; - export function createInterface(options: ReadLineOptions): ReadLine; - - export function cursorTo(stream: NodeJS.WritableStream, x: number, y?: number): void; - export function emitKeypressEvents(stream: NodeJS.ReadableStream, interface?: ReadLine): void; - export function moveCursor(stream: NodeJS.WritableStream, dx: number | string, dy: number | string): void; - export function clearLine(stream: NodeJS.WritableStream, dir: number): void; - export function clearScreenDown(stream: NodeJS.WritableStream): void; -} - -declare module "vm" { - export interface Context { } - export interface ScriptOptions { - filename?: string; - lineOffset?: number; - columnOffset?: number; - displayErrors?: boolean; - timeout?: number; - cachedData?: Buffer; - produceCachedData?: boolean; - } - export interface RunningScriptOptions { - filename?: string; - lineOffset?: number; - columnOffset?: number; - displayErrors?: boolean; - timeout?: number; - } - export class Script { - constructor(code: string, options?: ScriptOptions); - runInContext(contextifiedSandbox: Context, options?: RunningScriptOptions): any; - runInNewContext(sandbox?: Context, options?: RunningScriptOptions): any; - runInThisContext(options?: RunningScriptOptions): any; - } - export function createContext(sandbox?: Context): Context; - export function isContext(sandbox: Context): boolean; - export function runInContext(code: string, contextifiedSandbox: Context, options?: RunningScriptOptions | string): any; - /** @deprecated */ - export function runInDebugContext(code: string): any; - export function runInNewContext(code: string, sandbox?: Context, options?: RunningScriptOptions | string): any; - export function runInThisContext(code: string, options?: RunningScriptOptions | string): any; -} - -declare module "child_process" { - import * as events from "events"; - import * as stream from "stream"; - import * as net from "net"; - - export interface ChildProcess extends events.EventEmitter { - stdin: stream.Writable; - stdout: stream.Readable; - stderr: stream.Readable; - stdio: [stream.Writable, stream.Readable, stream.Readable]; - killed: boolean; - pid: number; - kill(signal?: string): void; - send(message: any, callback?: (error: Error) => void): boolean; - send(message: any, sendHandle?: net.Socket | net.Server, callback?: (error: Error) => void): boolean; - send(message: any, sendHandle?: net.Socket | net.Server, options?: MessageOptions, callback?: (error: Error) => void): boolean; - connected: boolean; - disconnect(): void; - unref(): void; - ref(): void; - - /** - * events.EventEmitter - * 1. close - * 2. disconnect - * 3. error - * 4. exit - * 5. message - */ - - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "close", listener: (code: number, signal: string) => void): this; - addListener(event: "disconnect", listener: () => void): this; - addListener(event: "error", listener: (err: Error) => void): this; - addListener(event: "exit", listener: (code: number, signal: string) => void): this; - addListener(event: "message", listener: (message: any, sendHandle: net.Socket | net.Server) => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "close", code: number, signal: string): boolean; - emit(event: "disconnect"): boolean; - emit(event: "error", err: Error): boolean; - emit(event: "exit", code: number, signal: string): boolean; - emit(event: "message", message: any, sendHandle: net.Socket | net.Server): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "close", listener: (code: number, signal: string) => void): this; - on(event: "disconnect", listener: () => void): this; - on(event: "error", listener: (err: Error) => void): this; - on(event: "exit", listener: (code: number, signal: string) => void): this; - on(event: "message", listener: (message: any, sendHandle: net.Socket | net.Server) => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "close", listener: (code: number, signal: string) => void): this; - once(event: "disconnect", listener: () => void): this; - once(event: "error", listener: (err: Error) => void): this; - once(event: "exit", listener: (code: number, signal: string) => void): this; - once(event: "message", listener: (message: any, sendHandle: net.Socket | net.Server) => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "close", listener: (code: number, signal: string) => void): this; - prependListener(event: "disconnect", listener: () => void): this; - prependListener(event: "error", listener: (err: Error) => void): this; - prependListener(event: "exit", listener: (code: number, signal: string) => void): this; - prependListener(event: "message", listener: (message: any, sendHandle: net.Socket | net.Server) => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "close", listener: (code: number, signal: string) => void): this; - prependOnceListener(event: "disconnect", listener: () => void): this; - prependOnceListener(event: "error", listener: (err: Error) => void): this; - prependOnceListener(event: "exit", listener: (code: number, signal: string) => void): this; - prependOnceListener(event: "message", listener: (message: any, sendHandle: net.Socket | net.Server) => void): this; - } - - export interface MessageOptions { - keepOpen?: boolean; - } - - export type StdioOptions = "pipe" | "ignore" | "inherit" | Array<("pipe" | "ipc" | "ignore" | stream.Stream | number | null | undefined)>; - - export interface SpawnOptions { - cwd?: string; - env?: NodeJS.ProcessEnv; - argv0?: string; - stdio?: StdioOptions; - detached?: boolean; - uid?: number; - gid?: number; - shell?: boolean | string; - windowsVerbatimArguments?: boolean; - windowsHide?: boolean; - } - - export function spawn(command: string, args?: ReadonlyArray, options?: SpawnOptions): ChildProcess; - - export interface ExecOptions { - cwd?: string; - env?: NodeJS.ProcessEnv; - shell?: string; - timeout?: number; - maxBuffer?: number; - killSignal?: string; - uid?: number; - gid?: number; - windowsHide?: boolean; - } - - export interface ExecOptionsWithStringEncoding extends ExecOptions { - encoding: BufferEncoding; - } - - export interface ExecOptionsWithBufferEncoding extends ExecOptions { - encoding: string | null; // specify `null`. - } - - export interface ExecException extends Error { - cmd?: string; - killed?: boolean; - code?: number; - signal?: string; - } - - // no `options` definitely means stdout/stderr are `string`. - export function exec(command: string, callback?: (error: ExecException | null, stdout: string, stderr: string) => void): ChildProcess; - - // `options` with `"buffer"` or `null` for `encoding` means stdout/stderr are definitely `Buffer`. - export function exec(command: string, options: { encoding: "buffer" | null } & ExecOptions, callback?: (error: ExecException | null, stdout: Buffer, stderr: Buffer) => void): ChildProcess; - - // `options` with well known `encoding` means stdout/stderr are definitely `string`. - export function exec(command: string, options: { encoding: BufferEncoding } & ExecOptions, callback?: (error: ExecException | null, stdout: string, stderr: string) => void): ChildProcess; - - // `options` with an `encoding` whose type is `string` means stdout/stderr could either be `Buffer` or `string`. - // There is no guarantee the `encoding` is unknown as `string` is a superset of `BufferEncoding`. - export function exec(command: string, options: { encoding: string } & ExecOptions, callback?: (error: ExecException | null, stdout: string | Buffer, stderr: string | Buffer) => void): ChildProcess; - - // `options` without an `encoding` means stdout/stderr are definitely `string`. - export function exec(command: string, options: ExecOptions, callback?: (error: ExecException | null, stdout: string, stderr: string) => void): ChildProcess; - - // fallback if nothing else matches. Worst case is always `string | Buffer`. - export function exec(command: string, options: ({ encoding?: string | null } & ExecOptions) | undefined | null, callback?: (error: ExecException | null, stdout: string | Buffer, stderr: string | Buffer) => void): ChildProcess; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace exec { - export function __promisify__(command: string): Promise<{ stdout: string, stderr: string }>; - export function __promisify__(command: string, options: { encoding: "buffer" | null } & ExecOptions): Promise<{ stdout: Buffer, stderr: Buffer }>; - export function __promisify__(command: string, options: { encoding: BufferEncoding } & ExecOptions): Promise<{ stdout: string, stderr: string }>; - export function __promisify__(command: string, options: ExecOptions): Promise<{ stdout: string, stderr: string }>; - export function __promisify__(command: string, options?: ({ encoding?: string | null } & ExecOptions) | null): Promise<{ stdout: string | Buffer, stderr: string | Buffer }>; - } - - export interface ExecFileOptions { - cwd?: string; - env?: NodeJS.ProcessEnv; - timeout?: number; - maxBuffer?: number; - killSignal?: string; - uid?: number; - gid?: number; - windowsHide?: boolean; - windowsVerbatimArguments?: boolean; - } - export interface ExecFileOptionsWithStringEncoding extends ExecFileOptions { - encoding: BufferEncoding; - } - export interface ExecFileOptionsWithBufferEncoding extends ExecFileOptions { - encoding: 'buffer' | null; - } - export interface ExecFileOptionsWithOtherEncoding extends ExecFileOptions { - encoding: string; - } - - export function execFile(file: string): ChildProcess; - export function execFile(file: string, options: ({ encoding?: string | null } & ExecFileOptions) | undefined | null): ChildProcess; - export function execFile(file: string, args?: ReadonlyArray | null): ChildProcess; - export function execFile(file: string, args: ReadonlyArray | undefined | null, options: ({ encoding?: string | null } & ExecFileOptions) | undefined | null): ChildProcess; - - // no `options` definitely means stdout/stderr are `string`. - export function execFile(file: string, callback: (error: Error | null, stdout: string, stderr: string) => void): ChildProcess; - export function execFile(file: string, args: ReadonlyArray | undefined | null, callback: (error: Error | null, stdout: string, stderr: string) => void): ChildProcess; - - // `options` with `"buffer"` or `null` for `encoding` means stdout/stderr are definitely `Buffer`. - export function execFile(file: string, options: ExecFileOptionsWithBufferEncoding, callback: (error: Error | null, stdout: Buffer, stderr: Buffer) => void): ChildProcess; - export function execFile(file: string, args: ReadonlyArray | undefined | null, options: ExecFileOptionsWithBufferEncoding, callback: (error: Error | null, stdout: Buffer, stderr: Buffer) => void): ChildProcess; - - // `options` with well known `encoding` means stdout/stderr are definitely `string`. - export function execFile(file: string, options: ExecFileOptionsWithStringEncoding, callback: (error: Error | null, stdout: string, stderr: string) => void): ChildProcess; - export function execFile(file: string, args: ReadonlyArray | undefined | null, options: ExecFileOptionsWithStringEncoding, callback: (error: Error | null, stdout: string, stderr: string) => void): ChildProcess; - - // `options` with an `encoding` whose type is `string` means stdout/stderr could either be `Buffer` or `string`. - // There is no guarantee the `encoding` is unknown as `string` is a superset of `BufferEncoding`. - export function execFile(file: string, options: ExecFileOptionsWithOtherEncoding, callback: (error: Error | null, stdout: string | Buffer, stderr: string | Buffer) => void): ChildProcess; - export function execFile(file: string, args: ReadonlyArray | undefined | null, options: ExecFileOptionsWithOtherEncoding, callback: (error: Error | null, stdout: string | Buffer, stderr: string | Buffer) => void): ChildProcess; - - // `options` without an `encoding` means stdout/stderr are definitely `string`. - export function execFile(file: string, options: ExecFileOptions, callback: (error: Error | null, stdout: string, stderr: string) => void): ChildProcess; - export function execFile(file: string, args: ReadonlyArray | undefined | null, options: ExecFileOptions, callback: (error: Error | null, stdout: string, stderr: string) => void): ChildProcess; - - // fallback if nothing else matches. Worst case is always `string | Buffer`. - export function execFile(file: string, options: ({ encoding?: string | null } & ExecFileOptions) | undefined | null, callback: ((error: Error | null, stdout: string | Buffer, stderr: string | Buffer) => void) | undefined | null): ChildProcess; - export function execFile(file: string, args: ReadonlyArray | undefined | null, options: ({ encoding?: string | null } & ExecFileOptions) | undefined | null, callback: ((error: Error | null, stdout: string | Buffer, stderr: string | Buffer) => void) | undefined | null): ChildProcess; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace execFile { - export function __promisify__(file: string): Promise<{ stdout: string, stderr: string }>; - export function __promisify__(file: string, args: string[] | undefined | null): Promise<{ stdout: string, stderr: string }>; - export function __promisify__(file: string, options: ExecFileOptionsWithBufferEncoding): Promise<{ stdout: Buffer, stderr: Buffer }>; - export function __promisify__(file: string, args: string[] | undefined | null, options: ExecFileOptionsWithBufferEncoding): Promise<{ stdout: Buffer, stderr: Buffer }>; - export function __promisify__(file: string, options: ExecFileOptionsWithStringEncoding): Promise<{ stdout: string, stderr: string }>; - export function __promisify__(file: string, args: string[] | undefined | null, options: ExecFileOptionsWithStringEncoding): Promise<{ stdout: string, stderr: string }>; - export function __promisify__(file: string, options: ExecFileOptionsWithOtherEncoding): Promise<{ stdout: string | Buffer, stderr: string | Buffer }>; - export function __promisify__(file: string, args: string[] | undefined | null, options: ExecFileOptionsWithOtherEncoding): Promise<{ stdout: string | Buffer, stderr: string | Buffer }>; - export function __promisify__(file: string, options: ExecFileOptions): Promise<{ stdout: string, stderr: string }>; - export function __promisify__(file: string, args: string[] | undefined | null, options: ExecFileOptions): Promise<{ stdout: string, stderr: string }>; - export function __promisify__(file: string, options: ({ encoding?: string | null } & ExecFileOptions) | undefined | null): Promise<{ stdout: string | Buffer, stderr: string | Buffer }>; - export function __promisify__(file: string, args: string[] | undefined | null, options: ({ encoding?: string | null } & ExecFileOptions) | undefined | null): Promise<{ stdout: string | Buffer, stderr: string | Buffer }>; - } - - export interface ForkOptions { - cwd?: string; - env?: NodeJS.ProcessEnv; - execPath?: string; - execArgv?: string[]; - silent?: boolean; - stdio?: StdioOptions; - windowsVerbatimArguments?: boolean; - uid?: number; - gid?: number; - } - export function fork(modulePath: string, args?: ReadonlyArray, options?: ForkOptions): ChildProcess; - - export interface SpawnSyncOptions { - argv0?: string; // Not specified in the docs - cwd?: string; - input?: string | Buffer | Uint8Array; - stdio?: StdioOptions; - env?: NodeJS.ProcessEnv; - uid?: number; - gid?: number; - timeout?: number; - killSignal?: string | number; - maxBuffer?: number; - encoding?: string; - shell?: boolean | string; - windowsVerbatimArguments?: boolean; - windowsHide?: boolean; - } - export interface SpawnSyncOptionsWithStringEncoding extends SpawnSyncOptions { - encoding: BufferEncoding; - } - export interface SpawnSyncOptionsWithBufferEncoding extends SpawnSyncOptions { - encoding: string; // specify `null`. - } - export interface SpawnSyncReturns { - pid: number; - output: string[]; - stdout: T; - stderr: T; - status: number; - signal: string; - error: Error; - } - export function spawnSync(command: string): SpawnSyncReturns; - export function spawnSync(command: string, options?: SpawnSyncOptionsWithStringEncoding): SpawnSyncReturns; - export function spawnSync(command: string, options?: SpawnSyncOptionsWithBufferEncoding): SpawnSyncReturns; - export function spawnSync(command: string, options?: SpawnSyncOptions): SpawnSyncReturns; - export function spawnSync(command: string, args?: ReadonlyArray, options?: SpawnSyncOptionsWithStringEncoding): SpawnSyncReturns; - export function spawnSync(command: string, args?: ReadonlyArray, options?: SpawnSyncOptionsWithBufferEncoding): SpawnSyncReturns; - export function spawnSync(command: string, args?: ReadonlyArray, options?: SpawnSyncOptions): SpawnSyncReturns; - - export interface ExecSyncOptions { - cwd?: string; - input?: string | Buffer | Uint8Array; - stdio?: StdioOptions; - env?: NodeJS.ProcessEnv; - shell?: string; - uid?: number; - gid?: number; - timeout?: number; - killSignal?: string | number; - maxBuffer?: number; - encoding?: string; - windowsHide?: boolean; - } - export interface ExecSyncOptionsWithStringEncoding extends ExecSyncOptions { - encoding: BufferEncoding; - } - export interface ExecSyncOptionsWithBufferEncoding extends ExecSyncOptions { - encoding: string; // specify `null`. - } - export function execSync(command: string): Buffer; - export function execSync(command: string, options?: ExecSyncOptionsWithStringEncoding): string; - export function execSync(command: string, options?: ExecSyncOptionsWithBufferEncoding): Buffer; - export function execSync(command: string, options?: ExecSyncOptions): Buffer; - - export interface ExecFileSyncOptions { - cwd?: string; - input?: string | Buffer | Uint8Array; - stdio?: StdioOptions; - env?: NodeJS.ProcessEnv; - uid?: number; - gid?: number; - timeout?: number; - killSignal?: string | number; - maxBuffer?: number; - encoding?: string; - windowsHide?: boolean; - shell?: boolean | string; - } - export interface ExecFileSyncOptionsWithStringEncoding extends ExecFileSyncOptions { - encoding: BufferEncoding; - } - export interface ExecFileSyncOptionsWithBufferEncoding extends ExecFileSyncOptions { - encoding: string; // specify `null`. - } - export function execFileSync(command: string): Buffer; - export function execFileSync(command: string, options?: ExecFileSyncOptionsWithStringEncoding): string; - export function execFileSync(command: string, options?: ExecFileSyncOptionsWithBufferEncoding): Buffer; - export function execFileSync(command: string, options?: ExecFileSyncOptions): Buffer; - export function execFileSync(command: string, args?: ReadonlyArray, options?: ExecFileSyncOptionsWithStringEncoding): string; - export function execFileSync(command: string, args?: ReadonlyArray, options?: ExecFileSyncOptionsWithBufferEncoding): Buffer; - export function execFileSync(command: string, args?: ReadonlyArray, options?: ExecFileSyncOptions): Buffer; -} - -declare module "url" { - import { ParsedUrlQuery } from 'querystring'; - - export interface UrlObjectCommon { - auth?: string; - hash?: string; - host?: string; - hostname?: string; - href?: string; - path?: string; - pathname?: string; - protocol?: string; - search?: string; - slashes?: boolean; - } - - // Input to `url.format` - export interface UrlObject extends UrlObjectCommon { - port?: string | number; - query?: string | null | { [key: string]: any }; - } - - // Output of `url.parse` - export interface Url extends UrlObjectCommon { - port?: string; - query?: string | null | ParsedUrlQuery; - } - - export interface UrlWithParsedQuery extends Url { - query: ParsedUrlQuery; - } - - export interface UrlWithStringQuery extends Url { - query: string | null; - } - - export function parse(urlStr: string): UrlWithStringQuery; - export function parse(urlStr: string, parseQueryString: false | undefined, slashesDenoteHost?: boolean): UrlWithStringQuery; - export function parse(urlStr: string, parseQueryString: true, slashesDenoteHost?: boolean): UrlWithParsedQuery; - export function parse(urlStr: string, parseQueryString: boolean, slashesDenoteHost?: boolean): Url; - - export function format(URL: URL, options?: URLFormatOptions): string; - export function format(urlObject: UrlObject | string): string; - export function resolve(from: string, to: string): string; - - export function domainToASCII(domain: string): string; - export function domainToUnicode(domain: string): string; - - export interface URLFormatOptions { - auth?: boolean; - fragment?: boolean; - search?: boolean; - unicode?: boolean; - } - - export class URL { - constructor(input: string, base?: string | URL); - hash: string; - host: string; - hostname: string; - href: string; - readonly origin: string; - password: string; - pathname: string; - port: string; - protocol: string; - search: string; - readonly searchParams: URLSearchParams; - username: string; - toString(): string; - toJSON(): string; - } - - export class URLSearchParams implements Iterable<[string, string]> { - constructor(init?: URLSearchParams | string | { [key: string]: string | string[] | undefined } | Iterable<[string, string]> | Array<[string, string]>); - append(name: string, value: string): void; - delete(name: string): void; - entries(): IterableIterator<[string, string]>; - forEach(callback: (value: string, name: string, searchParams: this) => void): void; - get(name: string): string | null; - getAll(name: string): string[]; - has(name: string): boolean; - keys(): IterableIterator; - set(name: string, value: string): void; - sort(): void; - toString(): string; - values(): IterableIterator; - [Symbol.iterator](): IterableIterator<[string, string]>; - } -} - -declare module "dns" { - // Supported getaddrinfo flags. - export const ADDRCONFIG: number; - export const V4MAPPED: number; - - export interface LookupOptions { - family?: number; - hints?: number; - all?: boolean; - } - - export interface LookupOneOptions extends LookupOptions { - all?: false; - } - - export interface LookupAllOptions extends LookupOptions { - all: true; - } - - export interface LookupAddress { - address: string; - family: number; - } - - export function lookup(hostname: string, family: number, callback: (err: NodeJS.ErrnoException, address: string, family: number) => void): void; - export function lookup(hostname: string, options: LookupOneOptions, callback: (err: NodeJS.ErrnoException, address: string, family: number) => void): void; - export function lookup(hostname: string, options: LookupAllOptions, callback: (err: NodeJS.ErrnoException, addresses: LookupAddress[]) => void): void; - export function lookup(hostname: string, options: LookupOptions, callback: (err: NodeJS.ErrnoException, address: string | LookupAddress[], family: number) => void): void; - export function lookup(hostname: string, callback: (err: NodeJS.ErrnoException, address: string, family: number) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace lookup { - export function __promisify__(hostname: string, options: LookupAllOptions): Promise<{ address: LookupAddress[] }>; - export function __promisify__(hostname: string, options?: LookupOneOptions | number): Promise<{ address: string, family: number }>; - export function __promisify__(hostname: string, options?: LookupOptions | number): Promise<{ address: string | LookupAddress[], family?: number }>; - } - - export function lookupService(address: string, port: number, callback: (err: NodeJS.ErrnoException, hostname: string, service: string) => void): void; - - export namespace lookupService { - export function __promisify__(address: string, port: number): Promise<{ hostname: string, service: string }>; - } - - export interface ResolveOptions { - ttl: boolean; - } - - export interface ResolveWithTtlOptions extends ResolveOptions { - ttl: true; - } - - export interface RecordWithTtl { - address: string; - ttl: number; - } - - /** @deprecated Use AnyARecord or AnyAaaaRecord instead. */ - export type AnyRecordWithTtl = AnyARecord | AnyAaaaRecord; - - export interface AnyARecord extends RecordWithTtl { - type: "A"; - } - - export interface AnyAaaaRecord extends RecordWithTtl { - type: "AAAA"; - } - - export interface MxRecord { - priority: number; - exchange: string; - } - - export interface AnyMxRecord extends MxRecord { - type: "MX"; - } - - export interface NaptrRecord { - flags: string; - service: string; - regexp: string; - replacement: string; - order: number; - preference: number; - } - - export interface AnyNaptrRecord extends NaptrRecord { - type: "NAPTR"; - } - - export interface SoaRecord { - nsname: string; - hostmaster: string; - serial: number; - refresh: number; - retry: number; - expire: number; - minttl: number; - } - - export interface AnySoaRecord extends SoaRecord { - type: "SOA"; - } - - export interface SrvRecord { - priority: number; - weight: number; - port: number; - name: string; - } - - export interface AnySrvRecord extends SrvRecord { - type: "SRV"; - } - - export interface AnyTxtRecord { - type: "TXT"; - entries: string[]; - } - - export interface AnyNsRecord { - type: "NS"; - value: string; - } - - export interface AnyPtrRecord { - type: "PTR"; - value: string; - } - - export interface AnyCnameRecord { - type: "CNAME"; - value: string; - } - - export type AnyRecord = AnyARecord | - AnyAaaaRecord | - AnyCnameRecord | - AnyMxRecord | - AnyNaptrRecord | - AnyNsRecord | - AnyPtrRecord | - AnySoaRecord | - AnySrvRecord | - AnyTxtRecord; - - export function resolve(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; - export function resolve(hostname: string, rrtype: "A", callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; - export function resolve(hostname: string, rrtype: "AAAA", callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; - export function resolve(hostname: string, rrtype: "ANY", callback: (err: NodeJS.ErrnoException, addresses: AnyRecord[]) => void): void; - export function resolve(hostname: string, rrtype: "CNAME", callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; - export function resolve(hostname: string, rrtype: "MX", callback: (err: NodeJS.ErrnoException, addresses: MxRecord[]) => void): void; - export function resolve(hostname: string, rrtype: "NAPTR", callback: (err: NodeJS.ErrnoException, addresses: NaptrRecord[]) => void): void; - export function resolve(hostname: string, rrtype: "NS", callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; - export function resolve(hostname: string, rrtype: "PTR", callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; - export function resolve(hostname: string, rrtype: "SOA", callback: (err: NodeJS.ErrnoException, addresses: SoaRecord) => void): void; - export function resolve(hostname: string, rrtype: "SRV", callback: (err: NodeJS.ErrnoException, addresses: SrvRecord[]) => void): void; - export function resolve(hostname: string, rrtype: "TXT", callback: (err: NodeJS.ErrnoException, addresses: string[][]) => void): void; - export function resolve(hostname: string, rrtype: string, callback: (err: NodeJS.ErrnoException, addresses: string[] | MxRecord[] | NaptrRecord[] | SoaRecord | SrvRecord[] | string[][] | AnyRecord[]) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace resolve { - export function __promisify__(hostname: string, rrtype?: "A" | "AAAA" | "CNAME" | "NS" | "PTR"): Promise; - export function __promisify__(hostname: string, rrtype: "ANY"): Promise; - export function __promisify__(hostname: string, rrtype: "MX"): Promise; - export function __promisify__(hostname: string, rrtype: "NAPTR"): Promise; - export function __promisify__(hostname: string, rrtype: "SOA"): Promise; - export function __promisify__(hostname: string, rrtype: "SRV"): Promise; - export function __promisify__(hostname: string, rrtype: "TXT"): Promise; - export function __promisify__(hostname: string, rrtype: string): Promise; - } - - export function resolve4(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; - export function resolve4(hostname: string, options: ResolveWithTtlOptions, callback: (err: NodeJS.ErrnoException, addresses: RecordWithTtl[]) => void): void; - export function resolve4(hostname: string, options: ResolveOptions, callback: (err: NodeJS.ErrnoException, addresses: string[] | RecordWithTtl[]) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace resolve4 { - export function __promisify__(hostname: string): Promise; - export function __promisify__(hostname: string, options: ResolveWithTtlOptions): Promise; - export function __promisify__(hostname: string, options?: ResolveOptions): Promise; - } - - export function resolve6(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; - export function resolve6(hostname: string, options: ResolveWithTtlOptions, callback: (err: NodeJS.ErrnoException, addresses: RecordWithTtl[]) => void): void; - export function resolve6(hostname: string, options: ResolveOptions, callback: (err: NodeJS.ErrnoException, addresses: string[] | RecordWithTtl[]) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace resolve6 { - export function __promisify__(hostname: string): Promise; - export function __promisify__(hostname: string, options: ResolveWithTtlOptions): Promise; - export function __promisify__(hostname: string, options?: ResolveOptions): Promise; - } - - export function resolveCname(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; - export namespace resolveCname { - export function __promisify__(hostname: string): Promise; - } - - export function resolveMx(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: MxRecord[]) => void): void; - export namespace resolveMx { - export function __promisify__(hostname: string): Promise; - } - - export function resolveNaptr(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: NaptrRecord[]) => void): void; - export namespace resolveNaptr { - export function __promisify__(hostname: string): Promise; - } - - export function resolveNs(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; - export namespace resolveNs { - export function __promisify__(hostname: string): Promise; - } - - export function resolvePtr(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; - export namespace resolvePtr { - export function __promisify__(hostname: string): Promise; - } - - export function resolveSoa(hostname: string, callback: (err: NodeJS.ErrnoException, address: SoaRecord) => void): void; - export namespace resolveSoa { - export function __promisify__(hostname: string): Promise; - } - - export function resolveSrv(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: SrvRecord[]) => void): void; - export namespace resolveSrv { - export function __promisify__(hostname: string): Promise; - } - - export function resolveTxt(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: string[][]) => void): void; - export namespace resolveTxt { - export function __promisify__(hostname: string): Promise; - } - - export function resolveAny(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: AnyRecord[]) => void): void; - export namespace resolveAny { - export function __promisify__(hostname: string): Promise; - } - - export function reverse(ip: string, callback: (err: NodeJS.ErrnoException, hostnames: string[]) => void): void; - export function setServers(servers: string[]): void; - - // Error codes - export var NODATA: string; - export var FORMERR: string; - export var SERVFAIL: string; - export var NOTFOUND: string; - export var NOTIMP: string; - export var REFUSED: string; - export var BADQUERY: string; - export var BADNAME: string; - export var BADFAMILY: string; - export var BADRESP: string; - export var CONNREFUSED: string; - export var TIMEOUT: string; - export var EOF: string; - export var FILE: string; - export var NOMEM: string; - export var DESTRUCTION: string; - export var BADSTR: string; - export var BADFLAGS: string; - export var NONAME: string; - export var BADHINTS: string; - export var NOTINITIALIZED: string; - export var LOADIPHLPAPI: string; - export var ADDRGETNETWORKPARAMS: string; - export var CANCELLED: string; -} - -declare module "net" { - import * as stream from "stream"; - import * as events from "events"; - import * as dns from "dns"; - - type LookupFunction = (hostname: string, options: dns.LookupOneOptions, callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void) => void; - - export interface AddressInfo { - address: string; - family: string; - port: number; - } - - export interface SocketConstructorOpts { - fd?: number; - allowHalfOpen?: boolean; - readable?: boolean; - writable?: boolean; - } - - export interface TcpSocketConnectOpts { - port: number; - host?: string; - localAddress?: string; - localPort?: number; - hints?: number; - family?: number; - lookup?: LookupFunction; - } - - export interface IpcSocketConnectOpts { - path: string; - } - - export type SocketConnectOpts = TcpSocketConnectOpts | IpcSocketConnectOpts; - - export class Socket extends stream.Duplex { - constructor(options?: SocketConstructorOpts); - - // Extended base methods - write(buffer: Buffer): boolean; - write(buffer: Buffer, cb?: Function): boolean; - write(str: string, cb?: Function): boolean; - write(str: string, encoding?: string, cb?: Function): boolean; - write(str: string, encoding?: string, fd?: string): boolean; - write(data: any, encoding?: string, callback?: Function): void; - - connect(options: SocketConnectOpts, connectionListener?: Function): this; - connect(port: number, host: string, connectionListener?: Function): this; - connect(port: number, connectionListener?: Function): this; - connect(path: string, connectionListener?: Function): this; - - bufferSize: number; - setEncoding(encoding?: string): this; - pause(): this; - resume(): this; - setTimeout(timeout: number, callback?: Function): this; - setNoDelay(noDelay?: boolean): this; - setKeepAlive(enable?: boolean, initialDelay?: number): this; - address(): AddressInfo | string; - unref(): void; - ref(): void; - - remoteAddress?: string; - remoteFamily?: string; - remotePort?: number; - localAddress: string; - localPort: number; - bytesRead: number; - bytesWritten: number; - connecting: boolean; - destroyed: boolean; - - // Extended base methods - end(): void; - end(buffer: Buffer, cb?: Function): void; - end(str: string, cb?: Function): void; - end(str: string, encoding?: string, cb?: Function): void; - end(data?: any, encoding?: string): void; - - /** - * events.EventEmitter - * 1. close - * 2. connect - * 3. data - * 4. drain - * 5. end - * 6. error - * 7. lookup - * 8. timeout - */ - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "close", listener: (had_error: boolean) => void): this; - addListener(event: "connect", listener: () => void): this; - addListener(event: "data", listener: (data: Buffer) => void): this; - addListener(event: "drain", listener: () => void): this; - addListener(event: "end", listener: () => void): this; - addListener(event: "error", listener: (err: Error) => void): this; - addListener(event: "lookup", listener: (err: Error, address: string, family: string | number, host: string) => void): this; - addListener(event: "timeout", listener: () => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "close", had_error: boolean): boolean; - emit(event: "connect"): boolean; - emit(event: "data", data: Buffer): boolean; - emit(event: "drain"): boolean; - emit(event: "end"): boolean; - emit(event: "error", err: Error): boolean; - emit(event: "lookup", err: Error, address: string, family: string | number, host: string): boolean; - emit(event: "timeout"): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "close", listener: (had_error: boolean) => void): this; - on(event: "connect", listener: () => void): this; - on(event: "data", listener: (data: Buffer) => void): this; - on(event: "drain", listener: () => void): this; - on(event: "end", listener: () => void): this; - on(event: "error", listener: (err: Error) => void): this; - on(event: "lookup", listener: (err: Error, address: string, family: string | number, host: string) => void): this; - on(event: "timeout", listener: () => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "close", listener: (had_error: boolean) => void): this; - once(event: "connect", listener: () => void): this; - once(event: "data", listener: (data: Buffer) => void): this; - once(event: "drain", listener: () => void): this; - once(event: "end", listener: () => void): this; - once(event: "error", listener: (err: Error) => void): this; - once(event: "lookup", listener: (err: Error, address: string, family: string | number, host: string) => void): this; - once(event: "timeout", listener: () => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "close", listener: (had_error: boolean) => void): this; - prependListener(event: "connect", listener: () => void): this; - prependListener(event: "data", listener: (data: Buffer) => void): this; - prependListener(event: "drain", listener: () => void): this; - prependListener(event: "end", listener: () => void): this; - prependListener(event: "error", listener: (err: Error) => void): this; - prependListener(event: "lookup", listener: (err: Error, address: string, family: string | number, host: string) => void): this; - prependListener(event: "timeout", listener: () => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "close", listener: (had_error: boolean) => void): this; - prependOnceListener(event: "connect", listener: () => void): this; - prependOnceListener(event: "data", listener: (data: Buffer) => void): this; - prependOnceListener(event: "drain", listener: () => void): this; - prependOnceListener(event: "end", listener: () => void): this; - prependOnceListener(event: "error", listener: (err: Error) => void): this; - prependOnceListener(event: "lookup", listener: (err: Error, address: string, family: string | number, host: string) => void): this; - prependOnceListener(event: "timeout", listener: () => void): this; - } - - export interface ListenOptions { - port?: number; - host?: string; - backlog?: number; - path?: string; - exclusive?: boolean; - readableAll?: boolean; - writableAll?: boolean; - } - - // https://github.com/nodejs/node/blob/master/lib/net.js - export class Server extends events.EventEmitter { - constructor(connectionListener?: (socket: Socket) => void); - constructor(options?: { allowHalfOpen?: boolean, pauseOnConnect?: boolean }, connectionListener?: (socket: Socket) => void); - - listen(port?: number, hostname?: string, backlog?: number, listeningListener?: Function): this; - listen(port?: number, hostname?: string, listeningListener?: Function): this; - listen(port?: number, backlog?: number, listeningListener?: Function): this; - listen(port?: number, listeningListener?: Function): this; - listen(path: string, backlog?: number, listeningListener?: Function): this; - listen(path: string, listeningListener?: Function): this; - listen(options: ListenOptions, listeningListener?: Function): this; - listen(handle: any, backlog?: number, listeningListener?: Function): this; - listen(handle: any, listeningListener?: Function): this; - close(callback?: Function): this; - address(): AddressInfo | string; - getConnections(cb: (error: Error | null, count: number) => void): void; - ref(): this; - unref(): this; - maxConnections: number; - connections: number; - listening: boolean; - - /** - * events.EventEmitter - * 1. close - * 2. connection - * 3. error - * 4. listening - */ - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "close", listener: () => void): this; - addListener(event: "connection", listener: (socket: Socket) => void): this; - addListener(event: "error", listener: (err: Error) => void): this; - addListener(event: "listening", listener: () => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "close"): boolean; - emit(event: "connection", socket: Socket): boolean; - emit(event: "error", err: Error): boolean; - emit(event: "listening"): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "close", listener: () => void): this; - on(event: "connection", listener: (socket: Socket) => void): this; - on(event: "error", listener: (err: Error) => void): this; - on(event: "listening", listener: () => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "close", listener: () => void): this; - once(event: "connection", listener: (socket: Socket) => void): this; - once(event: "error", listener: (err: Error) => void): this; - once(event: "listening", listener: () => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "close", listener: () => void): this; - prependListener(event: "connection", listener: (socket: Socket) => void): this; - prependListener(event: "error", listener: (err: Error) => void): this; - prependListener(event: "listening", listener: () => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "close", listener: () => void): this; - prependOnceListener(event: "connection", listener: (socket: Socket) => void): this; - prependOnceListener(event: "error", listener: (err: Error) => void): this; - prependOnceListener(event: "listening", listener: () => void): this; - } - - export interface TcpNetConnectOpts extends TcpSocketConnectOpts, SocketConstructorOpts { - timeout?: number; - } - - export interface IpcNetConnectOpts extends IpcSocketConnectOpts, SocketConstructorOpts { - timeout?: number; - } - - export type NetConnectOpts = TcpNetConnectOpts | IpcNetConnectOpts; - - export function createServer(connectionListener?: (socket: Socket) => void): Server; - export function createServer(options?: { allowHalfOpen?: boolean, pauseOnConnect?: boolean }, connectionListener?: (socket: Socket) => void): Server; - export function connect(options: NetConnectOpts, connectionListener?: Function): Socket; - export function connect(port: number, host?: string, connectionListener?: Function): Socket; - export function connect(path: string, connectionListener?: Function): Socket; - export function createConnection(options: NetConnectOpts, connectionListener?: Function): Socket; - export function createConnection(port: number, host?: string, connectionListener?: Function): Socket; - export function createConnection(path: string, connectionListener?: Function): Socket; - export function isIP(input: string): number; - export function isIPv4(input: string): boolean; - export function isIPv6(input: string): boolean; -} - -declare module "dgram" { - import { AddressInfo } from "net"; - import * as dns from "dns"; - import * as events from "events"; - - export interface RemoteInfo { - address: string; - family: string; - port: number; - } - - export interface BindOptions { - port: number; - address?: string; - exclusive?: boolean; - } - - type SocketType = "udp4" | "udp6"; - - export interface SocketOptions { - type: SocketType; - reuseAddr?: boolean; - recvBufferSize?: number; - sendBufferSize?: number; - lookup?: (hostname: string, options: dns.LookupOneOptions, callback: (err: NodeJS.ErrnoException, address: string, family: number) => void) => void; - } - - export function createSocket(type: SocketType, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket; - export function createSocket(options: SocketOptions, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket; - - export class Socket extends events.EventEmitter { - send(msg: Buffer | string | Uint8Array | any[], port: number, address?: string, callback?: (error: Error | null, bytes: number) => void): void; - send(msg: Buffer | string | Uint8Array, offset: number, length: number, port: number, address?: string, callback?: (error: Error | null, bytes: number) => void): void; - bind(port?: number, address?: string, callback?: () => void): void; - bind(port?: number, callback?: () => void): void; - bind(callback?: () => void): void; - bind(options: BindOptions, callback?: Function): void; - close(callback?: () => void): void; - address(): AddressInfo | string; - setBroadcast(flag: boolean): void; - setTTL(ttl: number): void; - setMulticastTTL(ttl: number): void; - setMulticastInterface(multicastInterface: string): void; - setMulticastLoopback(flag: boolean): void; - addMembership(multicastAddress: string, multicastInterface?: string): void; - dropMembership(multicastAddress: string, multicastInterface?: string): void; - ref(): this; - unref(): this; - setRecvBufferSize(size: number): void; - setSendBufferSize(size: number): void; - getRecvBufferSize(): number; - getSendBufferSize(): number; - - /** - * events.EventEmitter - * 1. close - * 2. error - * 3. listening - * 4. message - */ - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "close", listener: () => void): this; - addListener(event: "error", listener: (err: Error) => void): this; - addListener(event: "listening", listener: () => void): this; - addListener(event: "message", listener: (msg: Buffer, rinfo: AddressInfo) => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "close"): boolean; - emit(event: "error", err: Error): boolean; - emit(event: "listening"): boolean; - emit(event: "message", msg: Buffer, rinfo: AddressInfo): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "close", listener: () => void): this; - on(event: "error", listener: (err: Error) => void): this; - on(event: "listening", listener: () => void): this; - on(event: "message", listener: (msg: Buffer, rinfo: AddressInfo) => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "close", listener: () => void): this; - once(event: "error", listener: (err: Error) => void): this; - once(event: "listening", listener: () => void): this; - once(event: "message", listener: (msg: Buffer, rinfo: AddressInfo) => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "close", listener: () => void): this; - prependListener(event: "error", listener: (err: Error) => void): this; - prependListener(event: "listening", listener: () => void): this; - prependListener(event: "message", listener: (msg: Buffer, rinfo: AddressInfo) => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "close", listener: () => void): this; - prependOnceListener(event: "error", listener: (err: Error) => void): this; - prependOnceListener(event: "listening", listener: () => void): this; - prependOnceListener(event: "message", listener: (msg: Buffer, rinfo: AddressInfo) => void): this; - } -} - -declare module "fs" { - import * as stream from "stream"; - import * as events from "events"; - import { URL } from "url"; - - /** - * Valid types for path values in "fs". - */ - export type PathLike = string | Buffer | URL; - - export class Stats { - isFile(): boolean; - isDirectory(): boolean; - isBlockDevice(): boolean; - isCharacterDevice(): boolean; - isSymbolicLink(): boolean; - isFIFO(): boolean; - isSocket(): boolean; - dev: number; - ino: number; - mode: number; - nlink: number; - uid: number; - gid: number; - rdev: number; - size: number; - blksize: number; - blocks: number; - atimeMs: number; - mtimeMs: number; - ctimeMs: number; - birthtimeMs: number; - atime: Date; - mtime: Date; - ctime: Date; - birthtime: Date; - } - - export interface FSWatcher extends events.EventEmitter { - close(): void; - - /** - * events.EventEmitter - * 1. change - * 2. error - */ - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; - addListener(event: "error", listener: (error: Error) => void): this; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; - on(event: "error", listener: (error: Error) => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; - once(event: "error", listener: (error: Error) => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; - prependListener(event: "error", listener: (error: Error) => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; - prependOnceListener(event: "error", listener: (error: Error) => void): this; - } - - export class ReadStream extends stream.Readable { - close(): void; - bytesRead: number; - path: string | Buffer; - - /** - * events.EventEmitter - * 1. open - * 2. close - */ - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "open", listener: (fd: number) => void): this; - addListener(event: "close", listener: () => void): this; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "open", listener: (fd: number) => void): this; - on(event: "close", listener: () => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "open", listener: (fd: number) => void): this; - once(event: "close", listener: () => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "open", listener: (fd: number) => void): this; - prependListener(event: "close", listener: () => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "open", listener: (fd: number) => void): this; - prependOnceListener(event: "close", listener: () => void): this; - } - - export class WriteStream extends stream.Writable { - close(): void; - bytesWritten: number; - path: string | Buffer; - - /** - * events.EventEmitter - * 1. open - * 2. close - */ - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "open", listener: (fd: number) => void): this; - addListener(event: "close", listener: () => void): this; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "open", listener: (fd: number) => void): this; - on(event: "close", listener: () => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "open", listener: (fd: number) => void): this; - once(event: "close", listener: () => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "open", listener: (fd: number) => void): this; - prependListener(event: "close", listener: () => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "open", listener: (fd: number) => void): this; - prependOnceListener(event: "close", listener: () => void): this; - } - - /** - * Asynchronous rename(2) - Change the name or location of a file or directory. - * @param oldPath A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - export function rename(oldPath: PathLike, newPath: PathLike, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace rename { - /** - * Asynchronous rename(2) - Change the name or location of a file or directory. - * @param oldPath A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - export function __promisify__(oldPath: PathLike, newPath: PathLike): Promise; - } - - /** - * Synchronous rename(2) - Change the name or location of a file or directory. - * @param oldPath A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - export function renameSync(oldPath: PathLike, newPath: PathLike): void; - - /** - * Asynchronous truncate(2) - Truncate a file to a specified length. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param len If not specified, defaults to `0`. - */ - export function truncate(path: PathLike, len: number | undefined | null, callback: (err: NodeJS.ErrnoException) => void): void; - - /** - * Asynchronous truncate(2) - Truncate a file to a specified length. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - export function truncate(path: PathLike, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace truncate { - /** - * Asynchronous truncate(2) - Truncate a file to a specified length. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param len If not specified, defaults to `0`. - */ - export function __promisify__(path: PathLike, len?: number | null): Promise; - } - - /** - * Synchronous truncate(2) - Truncate a file to a specified length. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param len If not specified, defaults to `0`. - */ - export function truncateSync(path: PathLike, len?: number | null): void; - - /** - * Asynchronous ftruncate(2) - Truncate a file to a specified length. - * @param fd A file descriptor. - * @param len If not specified, defaults to `0`. - */ - export function ftruncate(fd: number, len: number | undefined | null, callback: (err: NodeJS.ErrnoException) => void): void; - - /** - * Asynchronous ftruncate(2) - Truncate a file to a specified length. - * @param fd A file descriptor. - */ - export function ftruncate(fd: number, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace ftruncate { - /** - * Asynchronous ftruncate(2) - Truncate a file to a specified length. - * @param fd A file descriptor. - * @param len If not specified, defaults to `0`. - */ - export function __promisify__(fd: number, len?: number | null): Promise; - } - - /** - * Synchronous ftruncate(2) - Truncate a file to a specified length. - * @param fd A file descriptor. - * @param len If not specified, defaults to `0`. - */ - export function ftruncateSync(fd: number, len?: number | null): void; - - /** - * Asynchronous chown(2) - Change ownership of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function chown(path: PathLike, uid: number, gid: number, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace chown { - /** - * Asynchronous chown(2) - Change ownership of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function __promisify__(path: PathLike, uid: number, gid: number): Promise; - } - - /** - * Synchronous chown(2) - Change ownership of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function chownSync(path: PathLike, uid: number, gid: number): void; - - /** - * Asynchronous fchown(2) - Change ownership of a file. - * @param fd A file descriptor. - */ - export function fchown(fd: number, uid: number, gid: number, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace fchown { - /** - * Asynchronous fchown(2) - Change ownership of a file. - * @param fd A file descriptor. - */ - export function __promisify__(fd: number, uid: number, gid: number): Promise; - } - - /** - * Synchronous fchown(2) - Change ownership of a file. - * @param fd A file descriptor. - */ - export function fchownSync(fd: number, uid: number, gid: number): void; - - /** - * Asynchronous lchown(2) - Change ownership of a file. Does not dereference symbolic links. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function lchown(path: PathLike, uid: number, gid: number, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace lchown { - /** - * Asynchronous lchown(2) - Change ownership of a file. Does not dereference symbolic links. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function __promisify__(path: PathLike, uid: number, gid: number): Promise; - } - - /** - * Synchronous lchown(2) - Change ownership of a file. Does not dereference symbolic links. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function lchownSync(path: PathLike, uid: number, gid: number): void; - - /** - * Asynchronous chmod(2) - Change permissions of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. - */ - export function chmod(path: PathLike, mode: string | number, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace chmod { - /** - * Asynchronous chmod(2) - Change permissions of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. - */ - export function __promisify__(path: PathLike, mode: string | number): Promise; - } - - /** - * Synchronous chmod(2) - Change permissions of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. - */ - export function chmodSync(path: PathLike, mode: string | number): void; - - /** - * Asynchronous fchmod(2) - Change permissions of a file. - * @param fd A file descriptor. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. - */ - export function fchmod(fd: number, mode: string | number, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace fchmod { - /** - * Asynchronous fchmod(2) - Change permissions of a file. - * @param fd A file descriptor. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. - */ - export function __promisify__(fd: number, mode: string | number): Promise; - } - - /** - * Synchronous fchmod(2) - Change permissions of a file. - * @param fd A file descriptor. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. - */ - export function fchmodSync(fd: number, mode: string | number): void; - - /** - * Asynchronous lchmod(2) - Change permissions of a file. Does not dereference symbolic links. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. - */ - export function lchmod(path: PathLike, mode: string | number, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace lchmod { - /** - * Asynchronous lchmod(2) - Change permissions of a file. Does not dereference symbolic links. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. - */ - export function __promisify__(path: PathLike, mode: string | number): Promise; - } - - /** - * Synchronous lchmod(2) - Change permissions of a file. Does not dereference symbolic links. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. - */ - export function lchmodSync(path: PathLike, mode: string | number): void; - - /** - * Asynchronous stat(2) - Get file status. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function stat(path: PathLike, callback: (err: NodeJS.ErrnoException, stats: Stats) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace stat { - /** - * Asynchronous stat(2) - Get file status. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function __promisify__(path: PathLike): Promise; - } - - /** - * Synchronous stat(2) - Get file status. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function statSync(path: PathLike): Stats; - - /** - * Asynchronous fstat(2) - Get file status. - * @param fd A file descriptor. - */ - export function fstat(fd: number, callback: (err: NodeJS.ErrnoException, stats: Stats) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace fstat { - /** - * Asynchronous fstat(2) - Get file status. - * @param fd A file descriptor. - */ - export function __promisify__(fd: number): Promise; - } - - /** - * Synchronous fstat(2) - Get file status. - * @param fd A file descriptor. - */ - export function fstatSync(fd: number): Stats; - - /** - * Asynchronous lstat(2) - Get file status. Does not dereference symbolic links. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function lstat(path: PathLike, callback: (err: NodeJS.ErrnoException, stats: Stats) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace lstat { - /** - * Asynchronous lstat(2) - Get file status. Does not dereference symbolic links. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function __promisify__(path: PathLike): Promise; - } - - /** - * Synchronous lstat(2) - Get file status. Does not dereference symbolic links. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function lstatSync(path: PathLike): Stats; - - /** - * Asynchronous link(2) - Create a new link (also known as a hard link) to an existing file. - * @param existingPath A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function link(existingPath: PathLike, newPath: PathLike, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace link { - /** - * Asynchronous link(2) - Create a new link (also known as a hard link) to an existing file. - * @param existingPath A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function link(existingPath: PathLike, newPath: PathLike): Promise; - } - - /** - * Synchronous link(2) - Create a new link (also known as a hard link) to an existing file. - * @param existingPath A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function linkSync(existingPath: PathLike, newPath: PathLike): void; - - /** - * Asynchronous symlink(2) - Create a new symbolic link to an existing file. - * @param target A path to an existing file. If a URL is provided, it must use the `file:` protocol. - * @param path A path to the new symlink. If a URL is provided, it must use the `file:` protocol. - * @param type May be set to `'dir'`, `'file'`, or `'junction'` (default is `'file'`) and is only available on Windows (ignored on other platforms). - * When using `'junction'`, the `target` argument will automatically be normalized to an absolute path. - */ - export function symlink(target: PathLike, path: PathLike, type: symlink.Type | undefined | null, callback: (err: NodeJS.ErrnoException) => void): void; - - /** - * Asynchronous symlink(2) - Create a new symbolic link to an existing file. - * @param target A path to an existing file. If a URL is provided, it must use the `file:` protocol. - * @param path A path to the new symlink. If a URL is provided, it must use the `file:` protocol. - */ - export function symlink(target: PathLike, path: PathLike, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace symlink { - /** - * Asynchronous symlink(2) - Create a new symbolic link to an existing file. - * @param target A path to an existing file. If a URL is provided, it must use the `file:` protocol. - * @param path A path to the new symlink. If a URL is provided, it must use the `file:` protocol. - * @param type May be set to `'dir'`, `'file'`, or `'junction'` (default is `'file'`) and is only available on Windows (ignored on other platforms). - * When using `'junction'`, the `target` argument will automatically be normalized to an absolute path. - */ - export function __promisify__(target: PathLike, path: PathLike, type?: string | null): Promise; - - export type Type = "dir" | "file" | "junction"; - } - - /** - * Synchronous symlink(2) - Create a new symbolic link to an existing file. - * @param target A path to an existing file. If a URL is provided, it must use the `file:` protocol. - * @param path A path to the new symlink. If a URL is provided, it must use the `file:` protocol. - * @param type May be set to `'dir'`, `'file'`, or `'junction'` (default is `'file'`) and is only available on Windows (ignored on other platforms). - * When using `'junction'`, the `target` argument will automatically be normalized to an absolute path. - */ - export function symlinkSync(target: PathLike, path: PathLike, type?: symlink.Type | null): void; - - /** - * Asynchronous readlink(2) - read value of a symbolic link. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function readlink(path: PathLike, options: { encoding?: BufferEncoding | null } | BufferEncoding | undefined | null, callback: (err: NodeJS.ErrnoException, linkString: string) => void): void; - - /** - * Asynchronous readlink(2) - read value of a symbolic link. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function readlink(path: PathLike, options: { encoding: "buffer" } | "buffer", callback: (err: NodeJS.ErrnoException, linkString: Buffer) => void): void; - - /** - * Asynchronous readlink(2) - read value of a symbolic link. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function readlink(path: PathLike, options: { encoding?: string | null } | string | undefined | null, callback: (err: NodeJS.ErrnoException, linkString: string | Buffer) => void): void; - - /** - * Asynchronous readlink(2) - read value of a symbolic link. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function readlink(path: PathLike, callback: (err: NodeJS.ErrnoException, linkString: string) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace readlink { - /** - * Asynchronous readlink(2) - read value of a symbolic link. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function __promisify__(path: PathLike, options?: { encoding?: BufferEncoding | null } | BufferEncoding | null): Promise; - - /** - * Asynchronous readlink(2) - read value of a symbolic link. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function __promisify__(path: PathLike, options: { encoding: "buffer" } | "buffer"): Promise; - - /** - * Asynchronous readlink(2) - read value of a symbolic link. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function __promisify__(path: PathLike, options?: { encoding?: string | null } | string | null): Promise; - } - - /** - * Synchronous readlink(2) - read value of a symbolic link. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function readlinkSync(path: PathLike, options?: { encoding?: BufferEncoding | null } | BufferEncoding | null): string; - - /** - * Synchronous readlink(2) - read value of a symbolic link. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function readlinkSync(path: PathLike, options: { encoding: "buffer" } | "buffer"): Buffer; - - /** - * Synchronous readlink(2) - read value of a symbolic link. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function readlinkSync(path: PathLike, options?: { encoding?: string | null } | string | null): string | Buffer; - - /** - * Asynchronous realpath(3) - return the canonicalized absolute pathname. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function realpath(path: PathLike, options: { encoding?: BufferEncoding | null } | BufferEncoding | undefined | null, callback: (err: NodeJS.ErrnoException, resolvedPath: string) => void): void; - - /** - * Asynchronous realpath(3) - return the canonicalized absolute pathname. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function realpath(path: PathLike, options: { encoding: "buffer" } | "buffer", callback: (err: NodeJS.ErrnoException, resolvedPath: Buffer) => void): void; - - /** - * Asynchronous realpath(3) - return the canonicalized absolute pathname. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function realpath(path: PathLike, options: { encoding?: string | null } | string | undefined | null, callback: (err: NodeJS.ErrnoException, resolvedPath: string | Buffer) => void): void; - - /** - * Asynchronous realpath(3) - return the canonicalized absolute pathname. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function realpath(path: PathLike, callback: (err: NodeJS.ErrnoException, resolvedPath: string) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace realpath { - /** - * Asynchronous realpath(3) - return the canonicalized absolute pathname. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function __promisify__(path: PathLike, options?: { encoding?: BufferEncoding | null } | BufferEncoding | null): Promise; - - /** - * Asynchronous realpath(3) - return the canonicalized absolute pathname. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function __promisify__(path: PathLike, options: { encoding: "buffer" } | "buffer"): Promise; - - /** - * Asynchronous realpath(3) - return the canonicalized absolute pathname. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function __promisify__(path: PathLike, options?: { encoding?: string | null } | string | null): Promise; - - export function native(path: PathLike, options: { encoding?: BufferEncoding | null } | BufferEncoding | undefined | null, callback: (err: NodeJS.ErrnoException, resolvedPath: string) => void): void; - export function native(path: PathLike, options: { encoding: "buffer" } | "buffer", callback: (err: NodeJS.ErrnoException, resolvedPath: Buffer) => void): void; - export function native(path: PathLike, options: { encoding?: string | null } | string | undefined | null, callback: (err: NodeJS.ErrnoException, resolvedPath: string | Buffer) => void): void; - export function native(path: PathLike, callback: (err: NodeJS.ErrnoException, resolvedPath: string) => void): void; - } - - /** - * Synchronous realpath(3) - return the canonicalized absolute pathname. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function realpathSync(path: PathLike, options?: { encoding?: BufferEncoding | null } | BufferEncoding | null): string; - - /** - * Synchronous realpath(3) - return the canonicalized absolute pathname. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function realpathSync(path: PathLike, options: { encoding: "buffer" } | "buffer"): Buffer; - - /** - * Synchronous realpath(3) - return the canonicalized absolute pathname. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function realpathSync(path: PathLike, options?: { encoding?: string | null } | string | null): string | Buffer; - - export namespace realpathSync { - export function native(path: PathLike, options?: { encoding?: BufferEncoding | null } | BufferEncoding | null): string; - export function native(path: PathLike, options: { encoding: "buffer" } | "buffer"): Buffer; - export function native(path: PathLike, options?: { encoding?: string | null } | string | null): string | Buffer; - } - - /** - * Asynchronous unlink(2) - delete a name and possibly the file it refers to. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function unlink(path: PathLike, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace unlink { - /** - * Asynchronous unlink(2) - delete a name and possibly the file it refers to. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function __promisify__(path: PathLike): Promise; - } - - /** - * Synchronous unlink(2) - delete a name and possibly the file it refers to. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function unlinkSync(path: PathLike): void; - - /** - * Asynchronous rmdir(2) - delete a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function rmdir(path: PathLike, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace rmdir { - /** - * Asynchronous rmdir(2) - delete a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function __promisify__(path: PathLike): Promise; - } - - /** - * Synchronous rmdir(2) - delete a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function rmdirSync(path: PathLike): void; - - /** - * Asynchronous mkdir(2) - create a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. - */ - export function mkdir(path: PathLike, mode: number | string | undefined | null, callback: (err: NodeJS.ErrnoException) => void): void; - - /** - * Asynchronous mkdir(2) - create a directory with a mode of `0o777`. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function mkdir(path: PathLike, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace mkdir { - /** - * Asynchronous mkdir(2) - create a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. - */ - export function __promisify__(path: PathLike, mode?: number | string | null): Promise; - } - - /** - * Synchronous mkdir(2) - create a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. - */ - export function mkdirSync(path: PathLike, mode?: number | string | null): void; - - /** - * Asynchronously creates a unique temporary directory. - * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function mkdtemp(prefix: string, options: { encoding?: BufferEncoding | null } | BufferEncoding | undefined | null, callback: (err: NodeJS.ErrnoException, folder: string) => void): void; - - /** - * Asynchronously creates a unique temporary directory. - * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function mkdtemp(prefix: string, options: "buffer" | { encoding: "buffer" }, callback: (err: NodeJS.ErrnoException, folder: Buffer) => void): void; - - /** - * Asynchronously creates a unique temporary directory. - * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function mkdtemp(prefix: string, options: { encoding?: string | null } | string | undefined | null, callback: (err: NodeJS.ErrnoException, folder: string | Buffer) => void): void; - - /** - * Asynchronously creates a unique temporary directory. - * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. - */ - export function mkdtemp(prefix: string, callback: (err: NodeJS.ErrnoException, folder: string) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace mkdtemp { - /** - * Asynchronously creates a unique temporary directory. - * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function __promisify__(prefix: string, options?: { encoding?: BufferEncoding | null } | BufferEncoding | null): Promise; - - /** - * Asynchronously creates a unique temporary directory. - * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function __promisify__(prefix: string, options: { encoding: "buffer" } | "buffer"): Promise; - - /** - * Asynchronously creates a unique temporary directory. - * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function __promisify__(prefix: string, options?: { encoding?: string | null } | string | null): Promise; - } - - /** - * Synchronously creates a unique temporary directory. - * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function mkdtempSync(prefix: string, options?: { encoding?: BufferEncoding | null } | BufferEncoding | null): string; - - /** - * Synchronously creates a unique temporary directory. - * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function mkdtempSync(prefix: string, options: { encoding: "buffer" } | "buffer"): Buffer; - - /** - * Synchronously creates a unique temporary directory. - * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function mkdtempSync(prefix: string, options?: { encoding?: string | null } | string | null): string | Buffer; - - /** - * Asynchronous readdir(3) - read a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function readdir(path: PathLike, options: { encoding: BufferEncoding | null } | BufferEncoding | undefined | null, callback: (err: NodeJS.ErrnoException, files: string[]) => void): void; - - /** - * Asynchronous readdir(3) - read a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function readdir(path: PathLike, options: { encoding: "buffer" } | "buffer", callback: (err: NodeJS.ErrnoException, files: Buffer[]) => void): void; - - /** - * Asynchronous readdir(3) - read a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function readdir(path: PathLike, options: { encoding?: string | null } | string | undefined | null, callback: (err: NodeJS.ErrnoException, files: string[] | Buffer[]) => void): void; - - /** - * Asynchronous readdir(3) - read a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function readdir(path: PathLike, callback: (err: NodeJS.ErrnoException, files: string[]) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace readdir { - /** - * Asynchronous readdir(3) - read a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function __promisify__(path: PathLike, options?: { encoding: BufferEncoding | null } | BufferEncoding | null): Promise; - - /** - * Asynchronous readdir(3) - read a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function __promisify__(path: PathLike, options: "buffer" | { encoding: "buffer" }): Promise; - - /** - * Asynchronous readdir(3) - read a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function __promisify__(path: PathLike, options?: { encoding?: string | null } | string | null): Promise; - } - - /** - * Synchronous readdir(3) - read a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function readdirSync(path: PathLike, options?: { encoding: BufferEncoding | null } | BufferEncoding | null): string[]; - - /** - * Synchronous readdir(3) - read a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function readdirSync(path: PathLike, options: { encoding: "buffer" } | "buffer"): Buffer[]; - - /** - * Synchronous readdir(3) - read a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - export function readdirSync(path: PathLike, options?: { encoding?: string | null } | string | null): string[] | Buffer[]; - - /** - * Asynchronous close(2) - close a file descriptor. - * @param fd A file descriptor. - */ - export function close(fd: number, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace close { - /** - * Asynchronous close(2) - close a file descriptor. - * @param fd A file descriptor. - */ - export function __promisify__(fd: number): Promise; - } - - /** - * Synchronous close(2) - close a file descriptor. - * @param fd A file descriptor. - */ - export function closeSync(fd: number): void; - - /** - * Asynchronous open(2) - open and possibly create a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. If not supplied, defaults to `0o666`. - */ - export function open(path: PathLike, flags: string | number, mode: string | number | undefined | null, callback: (err: NodeJS.ErrnoException, fd: number) => void): void; - - /** - * Asynchronous open(2) - open and possibly create a file. If the file is created, its mode will be `0o666`. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - export function open(path: PathLike, flags: string | number, callback: (err: NodeJS.ErrnoException, fd: number) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace open { - /** - * Asynchronous open(2) - open and possibly create a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. If not supplied, defaults to `0o666`. - */ - export function __promisify__(path: PathLike, flags: string | number, mode?: string | number | null): Promise; - } - - /** - * Synchronous open(2) - open and possibly create a file, returning a file descriptor.. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. If not supplied, defaults to `0o666`. - */ - export function openSync(path: PathLike, flags: string | number, mode?: string | number | null): number; - - /** - * Asynchronously change file timestamps of the file referenced by the supplied path. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param atime The last access time. If a string is provided, it will be coerced to number. - * @param mtime The last modified time. If a string is provided, it will be coerced to number. - */ - export function utimes(path: PathLike, atime: string | number | Date, mtime: string | number | Date, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace utimes { - /** - * Asynchronously change file timestamps of the file referenced by the supplied path. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param atime The last access time. If a string is provided, it will be coerced to number. - * @param mtime The last modified time. If a string is provided, it will be coerced to number. - */ - export function __promisify__(path: PathLike, atime: string | number | Date, mtime: string | number | Date): Promise; - } - - /** - * Synchronously change file timestamps of the file referenced by the supplied path. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param atime The last access time. If a string is provided, it will be coerced to number. - * @param mtime The last modified time. If a string is provided, it will be coerced to number. - */ - export function utimesSync(path: PathLike, atime: string | number | Date, mtime: string | number | Date): void; - - /** - * Asynchronously change file timestamps of the file referenced by the supplied file descriptor. - * @param fd A file descriptor. - * @param atime The last access time. If a string is provided, it will be coerced to number. - * @param mtime The last modified time. If a string is provided, it will be coerced to number. - */ - export function futimes(fd: number, atime: string | number | Date, mtime: string | number | Date, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace futimes { - /** - * Asynchronously change file timestamps of the file referenced by the supplied file descriptor. - * @param fd A file descriptor. - * @param atime The last access time. If a string is provided, it will be coerced to number. - * @param mtime The last modified time. If a string is provided, it will be coerced to number. - */ - export function __promisify__(fd: number, atime: string | number | Date, mtime: string | number | Date): Promise; - } - - /** - * Synchronously change file timestamps of the file referenced by the supplied file descriptor. - * @param fd A file descriptor. - * @param atime The last access time. If a string is provided, it will be coerced to number. - * @param mtime The last modified time. If a string is provided, it will be coerced to number. - */ - export function futimesSync(fd: number, atime: string | number | Date, mtime: string | number | Date): void; - - /** - * Asynchronous fsync(2) - synchronize a file's in-core state with the underlying storage device. - * @param fd A file descriptor. - */ - export function fsync(fd: number, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace fsync { - /** - * Asynchronous fsync(2) - synchronize a file's in-core state with the underlying storage device. - * @param fd A file descriptor. - */ - export function __promisify__(fd: number): Promise; - } - - /** - * Synchronous fsync(2) - synchronize a file's in-core state with the underlying storage device. - * @param fd A file descriptor. - */ - export function fsyncSync(fd: number): void; - - /** - * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. - * @param fd A file descriptor. - * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. - * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. - * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. - */ - export function write(fd: number, buffer: TBuffer, offset: number | undefined | null, length: number | undefined | null, position: number | undefined | null, callback: (err: NodeJS.ErrnoException, written: number, buffer: TBuffer) => void): void; - - /** - * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. - * @param fd A file descriptor. - * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. - * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. - */ - export function write(fd: number, buffer: TBuffer, offset: number | undefined | null, length: number | undefined | null, callback: (err: NodeJS.ErrnoException, written: number, buffer: TBuffer) => void): void; - - /** - * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. - * @param fd A file descriptor. - * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. - */ - export function write(fd: number, buffer: TBuffer, offset: number | undefined | null, callback: (err: NodeJS.ErrnoException, written: number, buffer: TBuffer) => void): void; - - /** - * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. - * @param fd A file descriptor. - */ - export function write(fd: number, buffer: TBuffer, callback: (err: NodeJS.ErrnoException, written: number, buffer: TBuffer) => void): void; - - /** - * Asynchronously writes `string` to the file referenced by the supplied file descriptor. - * @param fd A file descriptor. - * @param string A string to write. If something other than a string is supplied it will be coerced to a string. - * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. - * @param encoding The expected string encoding. - */ - export function write(fd: number, string: any, position: number | undefined | null, encoding: string | undefined | null, callback: (err: NodeJS.ErrnoException, written: number, str: string) => void): void; - - /** - * Asynchronously writes `string` to the file referenced by the supplied file descriptor. - * @param fd A file descriptor. - * @param string A string to write. If something other than a string is supplied it will be coerced to a string. - * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. - */ - export function write(fd: number, string: any, position: number | undefined | null, callback: (err: NodeJS.ErrnoException, written: number, str: string) => void): void; - - /** - * Asynchronously writes `string` to the file referenced by the supplied file descriptor. - * @param fd A file descriptor. - * @param string A string to write. If something other than a string is supplied it will be coerced to a string. - */ - export function write(fd: number, string: any, callback: (err: NodeJS.ErrnoException, written: number, str: string) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace write { - /** - * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. - * @param fd A file descriptor. - * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. - * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. - * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. - */ - export function __promisify__(fd: number, buffer?: TBuffer, offset?: number, length?: number, position?: number | null): Promise<{ bytesWritten: number, buffer: TBuffer }>; - - /** - * Asynchronously writes `string` to the file referenced by the supplied file descriptor. - * @param fd A file descriptor. - * @param string A string to write. If something other than a string is supplied it will be coerced to a string. - * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. - * @param encoding The expected string encoding. - */ - export function __promisify__(fd: number, string: any, position?: number | null, encoding?: string | null): Promise<{ bytesWritten: number, buffer: string }>; - } - - /** - * Synchronously writes `buffer` to the file referenced by the supplied file descriptor, returning the number of bytes written. - * @param fd A file descriptor. - * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. - * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. - * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. - */ - export function writeSync(fd: number, buffer: Buffer | Uint8Array, offset?: number | null, length?: number | null, position?: number | null): number; - - /** - * Synchronously writes `string` to the file referenced by the supplied file descriptor, returning the number of bytes written. - * @param fd A file descriptor. - * @param string A string to write. If something other than a string is supplied it will be coerced to a string. - * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. - * @param encoding The expected string encoding. - */ - export function writeSync(fd: number, string: any, position?: number | null, encoding?: string | null): number; - - /** - * Asynchronously reads data from the file referenced by the supplied file descriptor. - * @param fd A file descriptor. - * @param buffer The buffer that the data will be written to. - * @param offset The offset in the buffer at which to start writing. - * @param length The number of bytes to read. - * @param position The offset from the beginning of the file from which data should be read. If `null`, data will be read from the current position. - */ - export function read(fd: number, buffer: TBuffer, offset: number, length: number, position: number | null, callback?: (err: NodeJS.ErrnoException, bytesRead: number, buffer: TBuffer) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace read { - /** - * @param fd A file descriptor. - * @param buffer The buffer that the data will be written to. - * @param offset The offset in the buffer at which to start writing. - * @param length The number of bytes to read. - * @param position The offset from the beginning of the file from which data should be read. If `null`, data will be read from the current position. - */ - export function __promisify__(fd: number, buffer: TBuffer, offset: number, length: number, position: number | null): Promise<{ bytesRead: number, buffer: TBuffer }>; - } - - /** - * Synchronously reads data from the file referenced by the supplied file descriptor, returning the number of bytes read. - * @param fd A file descriptor. - * @param buffer The buffer that the data will be written to. - * @param offset The offset in the buffer at which to start writing. - * @param length The number of bytes to read. - * @param position The offset from the beginning of the file from which data should be read. If `null`, data will be read from the current position. - */ - export function readSync(fd: number, buffer: Buffer | Uint8Array, offset: number, length: number, position: number | null): number; - - /** - * Asynchronously reads the entire contents of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - * @param options An object that may contain an optional flag. - * If a flag is not provided, it defaults to `'r'`. - */ - export function readFile(path: PathLike | number, options: { encoding?: null; flag?: string; } | undefined | null, callback: (err: NodeJS.ErrnoException, data: Buffer) => void): void; - - /** - * Asynchronously reads the entire contents of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. - * If a flag is not provided, it defaults to `'r'`. - */ - export function readFile(path: PathLike | number, options: { encoding: string; flag?: string; } | string, callback: (err: NodeJS.ErrnoException, data: string) => void): void; - - /** - * Asynchronously reads the entire contents of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. - * If a flag is not provided, it defaults to `'r'`. - */ - export function readFile(path: PathLike | number, options: { encoding?: string | null; flag?: string; } | string | undefined | null, callback: (err: NodeJS.ErrnoException, data: string | Buffer) => void): void; - - /** - * Asynchronously reads the entire contents of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - */ - export function readFile(path: PathLike | number, callback: (err: NodeJS.ErrnoException, data: Buffer) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace readFile { - /** - * Asynchronously reads the entire contents of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - * @param options An object that may contain an optional flag. - * If a flag is not provided, it defaults to `'r'`. - */ - export function __promisify__(path: PathLike | number, options?: { encoding?: null; flag?: string; } | null): Promise; - - /** - * Asynchronously reads the entire contents of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. - * If a flag is not provided, it defaults to `'r'`. - */ - export function __promisify__(path: PathLike | number, options: { encoding: string; flag?: string; } | string): Promise; - - /** - * Asynchronously reads the entire contents of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. - * If a flag is not provided, it defaults to `'r'`. - */ - export function __promisify__(path: PathLike | number, options?: { encoding?: string | null; flag?: string; } | string | null): Promise; - } - - /** - * Synchronously reads the entire contents of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - * @param options An object that may contain an optional flag. If a flag is not provided, it defaults to `'r'`. - */ - export function readFileSync(path: PathLike | number, options?: { encoding?: null; flag?: string; } | null): Buffer; - - /** - * Synchronously reads the entire contents of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. - * If a flag is not provided, it defaults to `'r'`. - */ - export function readFileSync(path: PathLike | number, options: { encoding: string; flag?: string; } | string): string; - - /** - * Synchronously reads the entire contents of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. - * If a flag is not provided, it defaults to `'r'`. - */ - export function readFileSync(path: PathLike | number, options?: { encoding?: string | null; flag?: string; } | string | null): string | Buffer; - - /** - * Asynchronously writes data to a file, replacing the file if it already exists. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. - * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. - * If `encoding` is not supplied, the default of `'utf8'` is used. - * If `mode` is not supplied, the default of `0o666` is used. - * If `mode` is a string, it is parsed as an octal integer. - * If `flag` is not supplied, the default of `'w'` is used. - */ - export function writeFile(path: PathLike | number, data: any, options: { encoding?: string | null; mode?: number | string; flag?: string; } | string | undefined | null, callback: (err: NodeJS.ErrnoException) => void): void; - - /** - * Asynchronously writes data to a file, replacing the file if it already exists. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. - */ - export function writeFile(path: PathLike | number, data: any, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace writeFile { - /** - * Asynchronously writes data to a file, replacing the file if it already exists. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. - * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. - * If `encoding` is not supplied, the default of `'utf8'` is used. - * If `mode` is not supplied, the default of `0o666` is used. - * If `mode` is a string, it is parsed as an octal integer. - * If `flag` is not supplied, the default of `'w'` is used. - */ - export function __promisify__(path: PathLike | number, data: any, options?: { encoding?: string | null; mode?: number | string; flag?: string; } | string | null): Promise; - } - - /** - * Synchronously writes data to a file, replacing the file if it already exists. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. - * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. - * If `encoding` is not supplied, the default of `'utf8'` is used. - * If `mode` is not supplied, the default of `0o666` is used. - * If `mode` is a string, it is parsed as an octal integer. - * If `flag` is not supplied, the default of `'w'` is used. - */ - export function writeFileSync(path: PathLike | number, data: any, options?: { encoding?: string | null; mode?: number | string; flag?: string; } | string | null): void; - - /** - * Asynchronously append data to a file, creating the file if it does not exist. - * @param file A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. - * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. - * If `encoding` is not supplied, the default of `'utf8'` is used. - * If `mode` is not supplied, the default of `0o666` is used. - * If `mode` is a string, it is parsed as an octal integer. - * If `flag` is not supplied, the default of `'a'` is used. - */ - export function appendFile(file: PathLike | number, data: any, options: { encoding?: string | null, mode?: string | number, flag?: string } | string | undefined | null, callback: (err: NodeJS.ErrnoException) => void): void; - - /** - * Asynchronously append data to a file, creating the file if it does not exist. - * @param file A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. - */ - export function appendFile(file: PathLike | number, data: any, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace appendFile { - /** - * Asynchronously append data to a file, creating the file if it does not exist. - * @param file A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. - * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. - * If `encoding` is not supplied, the default of `'utf8'` is used. - * If `mode` is not supplied, the default of `0o666` is used. - * If `mode` is a string, it is parsed as an octal integer. - * If `flag` is not supplied, the default of `'a'` is used. - */ - export function __promisify__(file: PathLike | number, data: any, options?: { encoding?: string | null, mode?: string | number, flag?: string } | string | null): Promise; - } - - /** - * Synchronously append data to a file, creating the file if it does not exist. - * @param file A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * If a file descriptor is provided, the underlying file will _not_ be closed automatically. - * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. - * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. - * If `encoding` is not supplied, the default of `'utf8'` is used. - * If `mode` is not supplied, the default of `0o666` is used. - * If `mode` is a string, it is parsed as an octal integer. - * If `flag` is not supplied, the default of `'a'` is used. - */ - export function appendFileSync(file: PathLike | number, data: any, options?: { encoding?: string | null; mode?: number | string; flag?: string; } | string | null): void; - - /** - * Watch for changes on `filename`. The callback `listener` will be called each time the file is accessed. - */ - export function watchFile(filename: PathLike, options: { persistent?: boolean; interval?: number; } | undefined, listener: (curr: Stats, prev: Stats) => void): void; - - /** - * Watch for changes on `filename`. The callback `listener` will be called each time the file is accessed. - * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - export function watchFile(filename: PathLike, listener: (curr: Stats, prev: Stats) => void): void; - - /** - * Stop watching for changes on `filename`. - * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - export function unwatchFile(filename: PathLike, listener?: (curr: Stats, prev: Stats) => void): void; - - /** - * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. - * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. - * If `encoding` is not supplied, the default of `'utf8'` is used. - * If `persistent` is not supplied, the default of `true` is used. - * If `recursive` is not supplied, the default of `false` is used. - */ - export function watch(filename: PathLike, options: { encoding?: BufferEncoding | null, persistent?: boolean, recursive?: boolean } | BufferEncoding | undefined | null, listener?: (event: string, filename: string) => void): FSWatcher; - - /** - * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. - * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. - * If `encoding` is not supplied, the default of `'utf8'` is used. - * If `persistent` is not supplied, the default of `true` is used. - * If `recursive` is not supplied, the default of `false` is used. - */ - export function watch(filename: PathLike, options: { encoding: "buffer", persistent?: boolean, recursive?: boolean } | "buffer", listener?: (event: string, filename: Buffer) => void): FSWatcher; - - /** - * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. - * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. - * If `encoding` is not supplied, the default of `'utf8'` is used. - * If `persistent` is not supplied, the default of `true` is used. - * If `recursive` is not supplied, the default of `false` is used. - */ - export function watch(filename: PathLike, options: { encoding?: string | null, persistent?: boolean, recursive?: boolean } | string | null, listener?: (event: string, filename: string | Buffer) => void): FSWatcher; - - /** - * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. - * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - export function watch(filename: PathLike, listener?: (event: string, filename: string) => any): FSWatcher; - - /** - * Asynchronously tests whether or not the given path exists by checking with the file system. - * @deprecated - * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - export function exists(path: PathLike, callback: (exists: boolean) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace exists { - /** - * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - function __promisify__(path: PathLike): Promise; - } - - /** - * Synchronously tests whether or not the given path exists by checking with the file system. - * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - export function existsSync(path: PathLike): boolean; - - export namespace constants { - // File Access Constants - - /** Constant for fs.access(). File is visible to the calling process. */ - export const F_OK: number; - - /** Constant for fs.access(). File can be read by the calling process. */ - export const R_OK: number; - - /** Constant for fs.access(). File can be written by the calling process. */ - export const W_OK: number; - - /** Constant for fs.access(). File can be executed by the calling process. */ - export const X_OK: number; - - // File Copy Constants - - /** Constant for fs.copyFile. Flag indicating the destination file should not be overwritten if it already exists. */ - export const COPYFILE_EXCL: number; - - /** Constant for fs.copyFile. copy operation will attempt to create a copy-on-write reflink. If the underlying platform does not support copy-on-write, then a fallback copy mechanism is used. */ - export const COPYFILE_FICLONE: number; - - /** Constant for fs.copyFile. Copy operation will attempt to create a copy-on-write reflink. If the underlying platform does not support copy-on-write, then the operation will fail with an error. */ - export const COPYFILE_FICLONE_FORCE: number; - - // File Open Constants - - /** Constant for fs.open(). Flag indicating to open a file for read-only access. */ - export const O_RDONLY: number; - - /** Constant for fs.open(). Flag indicating to open a file for write-only access. */ - export const O_WRONLY: number; - - /** Constant for fs.open(). Flag indicating to open a file for read-write access. */ - export const O_RDWR: number; - - /** Constant for fs.open(). Flag indicating to create the file if it does not already exist. */ - export const O_CREAT: number; - - /** Constant for fs.open(). Flag indicating that opening a file should fail if the O_CREAT flag is set and the file already exists. */ - export const O_EXCL: number; - - /** Constant for fs.open(). Flag indicating that if path identifies a terminal device, opening the path shall not cause that terminal to become the controlling terminal for the process (if the process does not already have one). */ - export const O_NOCTTY: number; - - /** Constant for fs.open(). Flag indicating that if the file exists and is a regular file, and the file is opened successfully for write access, its length shall be truncated to zero. */ - export const O_TRUNC: number; - - /** Constant for fs.open(). Flag indicating that data will be appended to the end of the file. */ - export const O_APPEND: number; - - /** Constant for fs.open(). Flag indicating that the open should fail if the path is not a directory. */ - export const O_DIRECTORY: number; - - /** Constant for fs.open(). Flag indicating reading accesses to the file system will no longer result in an update to the atime information associated with the file. This flag is available on Linux operating systems only. */ - export const O_NOATIME: number; - - /** Constant for fs.open(). Flag indicating that the open should fail if the path is a symbolic link. */ - export const O_NOFOLLOW: number; - - /** Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O. */ - export const O_SYNC: number; - - /** Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O with write operations waiting for data integrity. */ - export const O_DSYNC: number; - - /** Constant for fs.open(). Flag indicating to open the symbolic link itself rather than the resource it is pointing to. */ - export const O_SYMLINK: number; - - /** Constant for fs.open(). When set, an attempt will be made to minimize caching effects of file I/O. */ - export const O_DIRECT: number; - - /** Constant for fs.open(). Flag indicating to open the file in nonblocking mode when possible. */ - export const O_NONBLOCK: number; - - // File Type Constants - - /** Constant for fs.Stats mode property for determining a file's type. Bit mask used to extract the file type code. */ - export const S_IFMT: number; - - /** Constant for fs.Stats mode property for determining a file's type. File type constant for a regular file. */ - export const S_IFREG: number; - - /** Constant for fs.Stats mode property for determining a file's type. File type constant for a directory. */ - export const S_IFDIR: number; - - /** Constant for fs.Stats mode property for determining a file's type. File type constant for a character-oriented device file. */ - export const S_IFCHR: number; - - /** Constant for fs.Stats mode property for determining a file's type. File type constant for a block-oriented device file. */ - export const S_IFBLK: number; - - /** Constant for fs.Stats mode property for determining a file's type. File type constant for a FIFO/pipe. */ - export const S_IFIFO: number; - - /** Constant for fs.Stats mode property for determining a file's type. File type constant for a symbolic link. */ - export const S_IFLNK: number; - - /** Constant for fs.Stats mode property for determining a file's type. File type constant for a socket. */ - export const S_IFSOCK: number; - - // File Mode Constants - - /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by owner. */ - export const S_IRWXU: number; - - /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by owner. */ - export const S_IRUSR: number; - - /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by owner. */ - export const S_IWUSR: number; - - /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by owner. */ - export const S_IXUSR: number; - - /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by group. */ - export const S_IRWXG: number; - - /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by group. */ - export const S_IRGRP: number; - - /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by group. */ - export const S_IWGRP: number; - - /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by group. */ - export const S_IXGRP: number; - - /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by others. */ - export const S_IRWXO: number; - - /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by others. */ - export const S_IROTH: number; - - /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by others. */ - export const S_IWOTH: number; - - /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by others. */ - export const S_IXOTH: number; - } - - /** - * Asynchronously tests a user's permissions for the file specified by path. - * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - export function access(path: PathLike, mode: number | undefined, callback: (err: NodeJS.ErrnoException) => void): void; - - /** - * Asynchronously tests a user's permissions for the file specified by path. - * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - export function access(path: PathLike, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace access { - /** - * Asynchronously tests a user's permissions for the file specified by path. - * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - export function __promisify__(path: PathLike, mode?: number): Promise; - } - - /** - * Synchronously tests a user's permissions for the file specified by path. - * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - export function accessSync(path: PathLike, mode?: number): void; - - /** - * Returns a new `ReadStream` object. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - export function createReadStream(path: PathLike, options?: string | { - flags?: string; - encoding?: string; - fd?: number; - mode?: number; - autoClose?: boolean; - start?: number; - end?: number; - highWaterMark?: number; - }): ReadStream; - - /** - * Returns a new `WriteStream` object. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - export function createWriteStream(path: PathLike, options?: string | { - flags?: string; - encoding?: string; - fd?: number; - mode?: number; - autoClose?: boolean; - start?: number; - }): WriteStream; - - /** - * Asynchronous fdatasync(2) - synchronize a file's in-core state with storage device. - * @param fd A file descriptor. - */ - export function fdatasync(fd: number, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace fdatasync { - /** - * Asynchronous fdatasync(2) - synchronize a file's in-core state with storage device. - * @param fd A file descriptor. - */ - export function __promisify__(fd: number): Promise; - } - - /** - * Synchronous fdatasync(2) - synchronize a file's in-core state with storage device. - * @param fd A file descriptor. - */ - export function fdatasyncSync(fd: number): void; - - /** - * Asynchronously copies src to dest. By default, dest is overwritten if it already exists. - * No arguments other than a possible exception are given to the callback function. - * Node.js makes no guarantees about the atomicity of the copy operation. - * If an error occurs after the destination file has been opened for writing, Node.js will attempt - * to remove the destination. - * @param src A path to the source file. - * @param dest A path to the destination file. - */ - export function copyFile(src: PathLike, dest: PathLike, callback: (err: NodeJS.ErrnoException) => void): void; - /** - * Asynchronously copies src to dest. By default, dest is overwritten if it already exists. - * No arguments other than a possible exception are given to the callback function. - * Node.js makes no guarantees about the atomicity of the copy operation. - * If an error occurs after the destination file has been opened for writing, Node.js will attempt - * to remove the destination. - * @param src A path to the source file. - * @param dest A path to the destination file. - * @param flags An integer that specifies the behavior of the copy operation. The only supported flag is fs.constants.COPYFILE_EXCL, which causes the copy operation to fail if dest already exists. - */ - export function copyFile(src: PathLike, dest: PathLike, flags: number, callback: (err: NodeJS.ErrnoException) => void): void; - - // NOTE: This namespace provides design-time support for util.promisify. Exported members do not exist at runtime. - export namespace copyFile { - /** - * Asynchronously copies src to dest. By default, dest is overwritten if it already exists. - * No arguments other than a possible exception are given to the callback function. - * Node.js makes no guarantees about the atomicity of the copy operation. - * If an error occurs after the destination file has been opened for writing, Node.js will attempt - * to remove the destination. - * @param src A path to the source file. - * @param dest A path to the destination file. - * @param flags An optional integer that specifies the behavior of the copy operation. The only supported flag is fs.constants.COPYFILE_EXCL, which causes the copy operation to fail if dest already exists. - */ - export function __promisify__(src: PathLike, dst: PathLike, flags?: number): Promise; - } - - /** - * Synchronously copies src to dest. By default, dest is overwritten if it already exists. - * Node.js makes no guarantees about the atomicity of the copy operation. - * If an error occurs after the destination file has been opened for writing, Node.js will attempt - * to remove the destination. - * @param src A path to the source file. - * @param dest A path to the destination file. - * @param flags An optional integer that specifies the behavior of the copy operation. The only supported flag is fs.constants.COPYFILE_EXCL, which causes the copy operation to fail if dest already exists. - */ - export function copyFileSync(src: PathLike, dest: PathLike, flags?: number): void; - - export namespace promises { - interface FileHandle { - /** - * Gets the file descriptor for this file handle. - */ - readonly fd: number; - - /** - * Asynchronously append data to a file, creating the file if it does not exist. The underlying file will _not_ be closed automatically. - * The `FileHandle` must have been opened for appending. - * @param data The data to write. If something other than a `Buffer` or `Uint8Array` is provided, the value is coerced to a string. - * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. - * If `encoding` is not supplied, the default of `'utf8'` is used. - * If `mode` is not supplied, the default of `0o666` is used. - * If `mode` is a string, it is parsed as an octal integer. - * If `flag` is not supplied, the default of `'a'` is used. - */ - appendFile(data: any, options?: { encoding?: string | null, mode?: string | number, flag?: string | number } | string | null): Promise; - - /** - * Asynchronous fchown(2) - Change ownership of a file. - */ - chown(uid: number, gid: number): Promise; - - /** - * Asynchronous fchmod(2) - Change permissions of a file. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. - */ - chmod(mode: string | number): Promise; - - /** - * Asynchronous fdatasync(2) - synchronize a file's in-core state with storage device. - */ - datasync(): Promise; - - /** - * Asynchronous fsync(2) - synchronize a file's in-core state with the underlying storage device. - */ - sync(): Promise; - - /** - * Asynchronously reads data from the file. - * The `FileHandle` must have been opened for reading. - * @param buffer The buffer that the data will be written to. - * @param offset The offset in the buffer at which to start writing. - * @param length The number of bytes to read. - * @param position The offset from the beginning of the file from which data should be read. If `null`, data will be read from the current position. - */ - read(buffer: TBuffer, offset?: number | null, length?: number | null, position?: number | null): Promise<{ bytesRead: number, buffer: TBuffer }>; - - /** - * Asynchronously reads the entire contents of a file. The underlying file will _not_ be closed automatically. - * The `FileHandle` must have been opened for reading. - * @param options An object that may contain an optional flag. - * If a flag is not provided, it defaults to `'r'`. - */ - readFile(options?: { encoding?: null, flag?: string | number } | null): Promise; - - /** - * Asynchronously reads the entire contents of a file. The underlying file will _not_ be closed automatically. - * The `FileHandle` must have been opened for reading. - * @param options An object that may contain an optional flag. - * If a flag is not provided, it defaults to `'r'`. - */ - readFile(options: { encoding: BufferEncoding, flag?: string | number } | BufferEncoding): Promise; - - /** - * Asynchronously reads the entire contents of a file. The underlying file will _not_ be closed automatically. - * The `FileHandle` must have been opened for reading. - * @param options An object that may contain an optional flag. - * If a flag is not provided, it defaults to `'r'`. - */ - readFile(options?: { encoding?: string | null, flag?: string | number } | string | null): Promise; - - /** - * Asynchronous fstat(2) - Get file status. - */ - stat(): Promise; - - /** - * Asynchronous ftruncate(2) - Truncate a file to a specified length. - * @param len If not specified, defaults to `0`. - */ - truncate(len?: number): Promise; - - /** - * Asynchronously change file timestamps of the file. - * @param atime The last access time. If a string is provided, it will be coerced to number. - * @param mtime The last modified time. If a string is provided, it will be coerced to number. - */ - utimes(atime: string | number | Date, mtime: string | number | Date): Promise; - - /** - * Asynchronously writes `buffer` to the file. - * The `FileHandle` must have been opened for writing. - * @param buffer The buffer that the data will be written to. - * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. - * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. - * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. - */ - write(buffer: TBuffer, offset?: number | null, length?: number | null, position?: number | null): Promise<{ bytesWritten: number, buffer: TBuffer }>; - - /** - * Asynchronously writes `string` to the file. - * The `FileHandle` must have been opened for writing. - * It is unsafe to call `write()` multiple times on the same file without waiting for the `Promise` to be resolved (or rejected). For this scenario, `fs.createWriteStream` is strongly recommended. - * @param string A string to write. If something other than a string is supplied it will be coerced to a string. - * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. - * @param encoding The expected string encoding. - */ - write(data: any, position?: number | null, encoding?: string | null): Promise<{ bytesWritten: number, buffer: string }>; - - /** - * Asynchronously writes data to a file, replacing the file if it already exists. The underlying file will _not_ be closed automatically. - * The `FileHandle` must have been opened for writing. - * It is unsafe to call `writeFile()` multiple times on the same file without waiting for the `Promise` to be resolved (or rejected). - * @param data The data to write. If something other than a `Buffer` or `Uint8Array` is provided, the value is coerced to a string. - * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. - * If `encoding` is not supplied, the default of `'utf8'` is used. - * If `mode` is not supplied, the default of `0o666` is used. - * If `mode` is a string, it is parsed as an octal integer. - * If `flag` is not supplied, the default of `'w'` is used. - */ - writeFile(data: any, options?: { encoding?: string | null, mode?: string | number, flag?: string | number } | string | null): Promise; - - /** - * Asynchronous close(2) - close a `FileHandle`. - */ - close(): Promise; - } - - /** - * Asynchronously tests a user's permissions for the file specified by path. - * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - function access(path: PathLike, mode?: number): Promise; - - /** - * Asynchronously copies `src` to `dest`. By default, `dest` is overwritten if it already exists. - * Node.js makes no guarantees about the atomicity of the copy operation. - * If an error occurs after the destination file has been opened for writing, Node.js will attempt - * to remove the destination. - * @param src A path to the source file. - * @param dest A path to the destination file. - * @param flags An optional integer that specifies the behavior of the copy operation. The only - * supported flag is `fs.constants.COPYFILE_EXCL`, which causes the copy operation to fail if - * `dest` already exists. - */ - function copyFile(src: PathLike, dest: PathLike, flags?: number): Promise; - - /** - * Asynchronous open(2) - open and possibly create a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. If not - * supplied, defaults to `0o666`. - */ - function open(path: PathLike, flags: string | number, mode?: string | number): Promise; - - /** - * Asynchronously reads data from the file referenced by the supplied `FileHandle`. - * @param handle A `FileHandle`. - * @param buffer The buffer that the data will be written to. - * @param offset The offset in the buffer at which to start writing. - * @param length The number of bytes to read. - * @param position The offset from the beginning of the file from which data should be read. If - * `null`, data will be read from the current position. - */ - function read(handle: FileHandle, buffer: TBuffer, offset?: number | null, length?: number | null, position?: number | null): Promise<{ bytesRead: number, buffer: TBuffer }>; - - /** - * Asynchronously writes `buffer` to the file referenced by the supplied `FileHandle`. - * It is unsafe to call `fsPromises.write()` multiple times on the same file without waiting for the `Promise` to be resolved (or rejected). For this scenario, `fs.createWriteStream` is strongly recommended. - * @param handle A `FileHandle`. - * @param buffer The buffer that the data will be written to. - * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. - * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. - * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. - */ - function write(handle: FileHandle, buffer: TBuffer, offset?: number | null, length?: number | null, position?: number | null): Promise<{ bytesWritten: number, buffer: TBuffer }>; - - /** - * Asynchronously writes `string` to the file referenced by the supplied `FileHandle`. - * It is unsafe to call `fsPromises.write()` multiple times on the same file without waiting for the `Promise` to be resolved (or rejected). For this scenario, `fs.createWriteStream` is strongly recommended. - * @param handle A `FileHandle`. - * @param string A string to write. If something other than a string is supplied it will be coerced to a string. - * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. - * @param encoding The expected string encoding. - */ - function write(handle: FileHandle, string: any, position?: number | null, encoding?: string | null): Promise<{ bytesWritten: number, buffer: string }>; - - /** - * Asynchronous rename(2) - Change the name or location of a file or directory. - * @param oldPath A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - */ - function rename(oldPath: PathLike, newPath: PathLike): Promise; - - /** - * Asynchronous truncate(2) - Truncate a file to a specified length. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param len If not specified, defaults to `0`. - */ - function truncate(path: PathLike, len?: number): Promise; - - /** - * Asynchronous ftruncate(2) - Truncate a file to a specified length. - * @param handle A `FileHandle`. - * @param len If not specified, defaults to `0`. - */ - function ftruncate(handle: FileHandle, len?: number): Promise; - - /** - * Asynchronous rmdir(2) - delete a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - function rmdir(path: PathLike): Promise; - - /** - * Asynchronous fdatasync(2) - synchronize a file's in-core state with storage device. - * @param handle A `FileHandle`. - */ - function fdatasync(handle: FileHandle): Promise; - - /** - * Asynchronous fsync(2) - synchronize a file's in-core state with the underlying storage device. - * @param handle A `FileHandle`. - */ - function fsync(handle: FileHandle): Promise; - - /** - * Asynchronous mkdir(2) - create a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. - */ - function mkdir(path: PathLike, mode?: string | number): Promise; - - /** - * Asynchronous readdir(3) - read a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - function readdir(path: PathLike, options?: { encoding?: BufferEncoding | null } | BufferEncoding | null): Promise; - - /** - * Asynchronous readdir(3) - read a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - function readdir(path: PathLike, options: { encoding: "buffer" } | "buffer"): Promise; - - /** - * Asynchronous readdir(3) - read a directory. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - function readdir(path: PathLike, options?: { encoding?: string | null } | string | null): Promise; - - /** - * Asynchronous readlink(2) - read value of a symbolic link. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - function readlink(path: PathLike, options?: { encoding?: BufferEncoding | null } | BufferEncoding | null): Promise; - - /** - * Asynchronous readlink(2) - read value of a symbolic link. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - function readlink(path: PathLike, options: { encoding: "buffer" } | "buffer"): Promise; - - /** - * Asynchronous readlink(2) - read value of a symbolic link. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - function readlink(path: PathLike, options?: { encoding?: string | null } | string | null): Promise; - - /** - * Asynchronous symlink(2) - Create a new symbolic link to an existing file. - * @param target A path to an existing file. If a URL is provided, it must use the `file:` protocol. - * @param path A path to the new symlink. If a URL is provided, it must use the `file:` protocol. - * @param type May be set to `'dir'`, `'file'`, or `'junction'` (default is `'file'`) and is only available on Windows (ignored on other platforms). - * When using `'junction'`, the `target` argument will automatically be normalized to an absolute path. - */ - function symlink(target: PathLike, path: PathLike, type?: string | null): Promise; - - /** - * Asynchronous fstat(2) - Get file status. - * @param handle A `FileHandle`. - */ - function fstat(handle: FileHandle): Promise; - - /** - * Asynchronous lstat(2) - Get file status. Does not dereference symbolic links. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - function lstat(path: PathLike): Promise; - - /** - * Asynchronous stat(2) - Get file status. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - function stat(path: PathLike): Promise; - - /** - * Asynchronous link(2) - Create a new link (also known as a hard link) to an existing file. - * @param existingPath A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - function link(existingPath: PathLike, newPath: PathLike): Promise; - - /** - * Asynchronous unlink(2) - delete a name and possibly the file it refers to. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - function unlink(path: PathLike): Promise; - - /** - * Asynchronous fchmod(2) - Change permissions of a file. - * @param handle A `FileHandle`. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. - */ - function fchmod(handle: FileHandle, mode: string | number): Promise; - - /** - * Asynchronous chmod(2) - Change permissions of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. - */ - function chmod(path: PathLike, mode: string | number): Promise; - - /** - * Asynchronous lchmod(2) - Change permissions of a file. Does not dereference symbolic links. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param mode A file mode. If a string is passed, it is parsed as an octal integer. - */ - function lchmod(path: PathLike, mode: string | number): Promise; - - /** - * Asynchronous lchown(2) - Change ownership of a file. Does not dereference symbolic links. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - function lchown(path: PathLike, uid: number, gid: number): Promise; - - /** - * Asynchronous fchown(2) - Change ownership of a file. - * @param handle A `FileHandle`. - */ - function fchown(handle: FileHandle, uid: number, gid: number): Promise; - - /** - * Asynchronous chown(2) - Change ownership of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - */ - function chown(path: PathLike, uid: number, gid: number): Promise; - - /** - * Asynchronously change file timestamps of the file referenced by the supplied path. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param atime The last access time. If a string is provided, it will be coerced to number. - * @param mtime The last modified time. If a string is provided, it will be coerced to number. - */ - function utimes(path: PathLike, atime: string | number | Date, mtime: string | number | Date): Promise; - - /** - * Asynchronously change file timestamps of the file referenced by the supplied `FileHandle`. - * @param handle A `FileHandle`. - * @param atime The last access time. If a string is provided, it will be coerced to number. - * @param mtime The last modified time. If a string is provided, it will be coerced to number. - */ - function futimes(handle: FileHandle, atime: string | number | Date, mtime: string | number | Date): Promise; - - /** - * Asynchronous realpath(3) - return the canonicalized absolute pathname. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - function realpath(path: PathLike, options?: { encoding?: BufferEncoding | null } | BufferEncoding | null): Promise; - - /** - * Asynchronous realpath(3) - return the canonicalized absolute pathname. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - function realpath(path: PathLike, options: { encoding: "buffer" } | "buffer"): Promise; - - /** - * Asynchronous realpath(3) - return the canonicalized absolute pathname. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - function realpath(path: PathLike, options?: { encoding?: string | null } | string | null): Promise; - - /** - * Asynchronously creates a unique temporary directory. - * Generates six random characters to be appended behind a required `prefix` to create a unique temporary directory. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - function mkdtemp(prefix: string, options?: { encoding?: BufferEncoding | null } | BufferEncoding | null): Promise; - - /** - * Asynchronously creates a unique temporary directory. - * Generates six random characters to be appended behind a required `prefix` to create a unique temporary directory. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - function mkdtemp(prefix: string, options: { encoding: "buffer" } | "buffer"): Promise; - - /** - * Asynchronously creates a unique temporary directory. - * Generates six random characters to be appended behind a required `prefix` to create a unique temporary directory. - * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. - */ - function mkdtemp(prefix: string, options?: { encoding?: string | null } | string | null): Promise; - - /** - * Asynchronously writes data to a file, replacing the file if it already exists. - * It is unsafe to call `fsPromises.writeFile()` multiple times on the same file without waiting for the `Promise` to be resolved (or rejected). - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * If a `FileHandle` is provided, the underlying file will _not_ be closed automatically. - * @param data The data to write. If something other than a `Buffer` or `Uint8Array` is provided, the value is coerced to a string. - * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. - * If `encoding` is not supplied, the default of `'utf8'` is used. - * If `mode` is not supplied, the default of `0o666` is used. - * If `mode` is a string, it is parsed as an octal integer. - * If `flag` is not supplied, the default of `'w'` is used. - */ - function writeFile(path: PathLike | FileHandle, data: any, options?: { encoding?: string | null, mode?: string | number, flag?: string | number } | string | null): Promise; - - /** - * Asynchronously append data to a file, creating the file if it does not exist. - * @param file A path to a file. If a URL is provided, it must use the `file:` protocol. - * URL support is _experimental_. - * If a `FileHandle` is provided, the underlying file will _not_ be closed automatically. - * @param data The data to write. If something other than a `Buffer` or `Uint8Array` is provided, the value is coerced to a string. - * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. - * If `encoding` is not supplied, the default of `'utf8'` is used. - * If `mode` is not supplied, the default of `0o666` is used. - * If `mode` is a string, it is parsed as an octal integer. - * If `flag` is not supplied, the default of `'a'` is used. - */ - function appendFile(path: PathLike | FileHandle, data: any, options?: { encoding?: string | null, mode?: string | number, flag?: string | number } | string | null): Promise; - - /** - * Asynchronously reads the entire contents of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * If a `FileHandle` is provided, the underlying file will _not_ be closed automatically. - * @param options An object that may contain an optional flag. - * If a flag is not provided, it defaults to `'r'`. - */ - function readFile(path: PathLike | FileHandle, options?: { encoding?: null, flag?: string | number } | null): Promise; - - /** - * Asynchronously reads the entire contents of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * If a `FileHandle` is provided, the underlying file will _not_ be closed automatically. - * @param options An object that may contain an optional flag. - * If a flag is not provided, it defaults to `'r'`. - */ - function readFile(path: PathLike | FileHandle, options: { encoding: BufferEncoding, flag?: string | number } | BufferEncoding): Promise; - - /** - * Asynchronously reads the entire contents of a file. - * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. - * If a `FileHandle` is provided, the underlying file will _not_ be closed automatically. - * @param options An object that may contain an optional flag. - * If a flag is not provided, it defaults to `'r'`. - */ - function readFile(path: PathLike | FileHandle, options?: { encoding?: string | null, flag?: string | number } | string | null): Promise; - } -} - -declare module "path" { - /** - * A parsed path object generated by path.parse() or consumed by path.format(). - */ - export interface ParsedPath { - /** - * The root of the path such as '/' or 'c:\' - */ - root: string; - /** - * The full directory path such as '/home/user/dir' or 'c:\path\dir' - */ - dir: string; - /** - * The file name including extension (if any) such as 'index.html' - */ - base: string; - /** - * The file extension (if any) such as '.html' - */ - ext: string; - /** - * The file name without extension (if any) such as 'index' - */ - name: string; - } - export interface FormatInputPathObject { - /** - * The root of the path such as '/' or 'c:\' - */ - root?: string; - /** - * The full directory path such as '/home/user/dir' or 'c:\path\dir' - */ - dir?: string; - /** - * The file name including extension (if any) such as 'index.html' - */ - base?: string; - /** - * The file extension (if any) such as '.html' - */ - ext?: string; - /** - * The file name without extension (if any) such as 'index' - */ - name?: string; - } - - /** - * Normalize a string path, reducing '..' and '.' parts. - * When multiple slashes are found, they're replaced by a single one; when the path contains a trailing slash, it is preserved. On Windows backslashes are used. - * - * @param p string path to normalize. - */ - export function normalize(p: string): string; - /** - * Join all arguments together and normalize the resulting path. - * Arguments must be strings. In v0.8, non-string arguments were silently ignored. In v0.10 and up, an exception is thrown. - * - * @param paths paths to join. - */ - export function join(...paths: string[]): string; - /** - * The right-most parameter is considered {to}. Other parameters are considered an array of {from}. - * - * Starting from leftmost {from} paramter, resolves {to} to an absolute path. - * - * If {to} isn't already absolute, {from} arguments are prepended in right to left order, until an absolute path is found. If after using all {from} paths still no absolute path is found, the current working directory is used as well. The resulting path is normalized, and trailing slashes are removed unless the path gets resolved to the root directory. - * - * @param pathSegments string paths to join. Non-string arguments are ignored. - */ - export function resolve(...pathSegments: string[]): string; - /** - * Determines whether {path} is an absolute path. An absolute path will always resolve to the same location, regardless of the working directory. - * - * @param path path to test. - */ - export function isAbsolute(path: string): boolean; - /** - * Solve the relative path from {from} to {to}. - * At times we have two absolute paths, and we need to derive the relative path from one to the other. This is actually the reverse transform of path.resolve. - */ - export function relative(from: string, to: string): string; - /** - * Return the directory name of a path. Similar to the Unix dirname command. - * - * @param p the path to evaluate. - */ - export function dirname(p: string): string; - /** - * Return the last portion of a path. Similar to the Unix basename command. - * Often used to extract the file name from a fully qualified path. - * - * @param p the path to evaluate. - * @param ext optionally, an extension to remove from the result. - */ - export function basename(p: string, ext?: string): string; - /** - * Return the extension of the path, from the last '.' to end of string in the last portion of the path. - * If there is no '.' in the last portion of the path or the first character of it is '.', then it returns an empty string - * - * @param p the path to evaluate. - */ - export function extname(p: string): string; - /** - * The platform-specific file separator. '\\' or '/'. - */ - export var sep: '\\' | '/'; - /** - * The platform-specific file delimiter. ';' or ':'. - */ - export var delimiter: ';' | ':'; - /** - * Returns an object from a path string - the opposite of format(). - * - * @param pathString path to evaluate. - */ - export function parse(pathString: string): ParsedPath; - /** - * Returns a path string from an object - the opposite of parse(). - * - * @param pathString path to evaluate. - */ - export function format(pathObject: FormatInputPathObject): string; - - export module posix { - export function normalize(p: string): string; - export function join(...paths: any[]): string; - export function resolve(...pathSegments: any[]): string; - export function isAbsolute(p: string): boolean; - export function relative(from: string, to: string): string; - export function dirname(p: string): string; - export function basename(p: string, ext?: string): string; - export function extname(p: string): string; - export var sep: string; - export var delimiter: string; - export function parse(p: string): ParsedPath; - export function format(pP: FormatInputPathObject): string; - } - - export module win32 { - export function normalize(p: string): string; - export function join(...paths: any[]): string; - export function resolve(...pathSegments: any[]): string; - export function isAbsolute(p: string): boolean; - export function relative(from: string, to: string): string; - export function dirname(p: string): string; - export function basename(p: string, ext?: string): string; - export function extname(p: string): string; - export var sep: string; - export var delimiter: string; - export function parse(p: string): ParsedPath; - export function format(pP: FormatInputPathObject): string; - } -} - -declare module "string_decoder" { - export interface NodeStringDecoder { - write(buffer: Buffer): string; - end(buffer?: Buffer): string; - } - export var StringDecoder: { - new(encoding?: string): NodeStringDecoder; - }; -} - -declare module "tls" { - import * as crypto from "crypto"; - import * as dns from "dns"; - import * as net from "net"; - import * as stream from "stream"; - - var CLIENT_RENEG_LIMIT: number; - var CLIENT_RENEG_WINDOW: number; - - export interface Certificate { - /** - * Country code. - */ - C: string; - /** - * Street. - */ - ST: string; - /** - * Locality. - */ - L: string; - /** - * Organization. - */ - O: string; - /** - * Organizational unit. - */ - OU: string; - /** - * Common name. - */ - CN: string; - } - - export interface PeerCertificate { - subject: Certificate; - issuer: Certificate; - subjectaltname: string; - infoAccess: { [index: string]: string[] | undefined }; - modulus: string; - exponent: string; - valid_from: string; - valid_to: string; - fingerprint: string; - ext_key_usage: string[]; - serialNumber: string; - raw: Buffer; - } - - export interface DetailedPeerCertificate extends PeerCertificate { - issuerCertificate: DetailedPeerCertificate; - } - - export interface CipherNameAndProtocol { - /** - * The cipher name. - */ - name: string; - /** - * SSL/TLS protocol version. - */ - version: string; - } - - export class TLSSocket extends net.Socket { - /** - * Construct a new tls.TLSSocket object from an existing TCP socket. - */ - constructor(socket: net.Socket, options?: { - /** - * An optional TLS context object from tls.createSecureContext() - */ - secureContext?: SecureContext, - /** - * If true the TLS socket will be instantiated in server-mode. - * Defaults to false. - */ - isServer?: boolean, - /** - * An optional net.Server instance. - */ - server?: net.Server, - /** - * If true the server will request a certificate from clients that - * connect and attempt to verify that certificate. Defaults to - * false. - */ - requestCert?: boolean, - /** - * If true the server will reject any connection which is not - * authorized with the list of supplied CAs. This option only has an - * effect if requestCert is true. Defaults to false. - */ - rejectUnauthorized?: boolean, - /** - * An array of strings or a Buffer naming possible NPN protocols. - * (Protocols should be ordered by their priority.) - */ - NPNProtocols?: string[] | Buffer[] | Uint8Array[] | Buffer | Uint8Array, - /** - * An array of strings or a Buffer naming possible ALPN protocols. - * (Protocols should be ordered by their priority.) When the server - * receives both NPN and ALPN extensions from the client, ALPN takes - * precedence over NPN and the server does not send an NPN extension - * to the client. - */ - ALPNProtocols?: string[] | Buffer[] | Uint8Array[] | Buffer | Uint8Array, - /** - * SNICallback(servername, cb) A function that will be - * called if the client supports SNI TLS extension. Two arguments - * will be passed when called: servername and cb. SNICallback should - * invoke cb(null, ctx), where ctx is a SecureContext instance. - * (tls.createSecureContext(...) can be used to get a proper - * SecureContext.) If SNICallback wasn't provided the default callback - * with high-level API will be used (see below). - */ - SNICallback?: (servername: string, cb: (err: Error | null, ctx: SecureContext) => void) => void, - /** - * An optional Buffer instance containing a TLS session. - */ - session?: Buffer, - /** - * If true, specifies that the OCSP status request extension will be - * added to the client hello and an 'OCSPResponse' event will be - * emitted on the socket before establishing a secure communication - */ - requestOCSP?: boolean - }); - - /** - * A boolean that is true if the peer certificate was signed by one of the specified CAs, otherwise false. - */ - authorized: boolean; - /** - * The reason why the peer's certificate has not been verified. - * This property becomes available only when tlsSocket.authorized === false. - */ - authorizationError: Error; - /** - * Static boolean value, always true. - * May be used to distinguish TLS sockets from regular ones. - */ - encrypted: boolean; - /** - * Returns an object representing the cipher name and the SSL/TLS protocol version of the current connection. - * @returns Returns an object representing the cipher name - * and the SSL/TLS protocol version of the current connection. - */ - getCipher(): CipherNameAndProtocol; - /** - * Returns an object representing the peer's certificate. - * The returned object has some properties corresponding to the field of the certificate. - * If detailed argument is true the full chain with issuer property will be returned, - * if false only the top certificate without issuer property. - * If the peer does not provide a certificate, it returns null or an empty object. - * @param detailed - If true; the full chain with issuer property will be returned. - * @returns An object representing the peer's certificate. - */ - getPeerCertificate(detailed: true): DetailedPeerCertificate; - getPeerCertificate(detailed?: false): PeerCertificate; - getPeerCertificate(detailed?: boolean): PeerCertificate | DetailedPeerCertificate; - /** - * Returns a string containing the negotiated SSL/TLS protocol version of the current connection. - * The value `'unknown'` will be returned for connected sockets that have not completed the handshaking process. - * The value `null` will be returned for server sockets or disconnected client sockets. - * See https://www.openssl.org/docs/man1.0.2/ssl/SSL_get_version.html for more information. - * @returns negotiated SSL/TLS protocol version of the current connection - */ - getProtocol(): string | null; - /** - * Could be used to speed up handshake establishment when reconnecting to the server. - * @returns ASN.1 encoded TLS session or undefined if none was negotiated. - */ - getSession(): any; - /** - * NOTE: Works only with client TLS sockets. - * Useful only for debugging, for session reuse provide session option to tls.connect(). - * @returns TLS session ticket or undefined if none was negotiated. - */ - getTLSTicket(): any; - /** - * Initiate TLS renegotiation process. - * - * NOTE: Can be used to request peer's certificate after the secure connection has been established. - * ANOTHER NOTE: When running as the server, socket will be destroyed with an error after handshakeTimeout timeout. - * @param options - The options may contain the following fields: rejectUnauthorized, - * requestCert (See tls.createServer() for details). - * @param callback - callback(err) will be executed with null as err, once the renegotiation - * is successfully completed. - */ - renegotiate(options: { rejectUnauthorized?: boolean, requestCert?: boolean }, callback: (err: Error | null) => void): any; - /** - * Set maximum TLS fragment size (default and maximum value is: 16384, minimum is: 512). - * Smaller fragment size decreases buffering latency on the client: large fragments are buffered by - * the TLS layer until the entire fragment is received and its integrity is verified; - * large fragments can span multiple roundtrips, and their processing can be delayed due to packet - * loss or reordering. However, smaller fragments add extra TLS framing bytes and CPU overhead, - * which may decrease overall server throughput. - * @param size - TLS fragment size (default and maximum value is: 16384, minimum is: 512). - * @returns Returns true on success, false otherwise. - */ - setMaxSendFragment(size: number): boolean; - - /** - * events.EventEmitter - * 1. OCSPResponse - * 2. secureConnect - */ - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; - addListener(event: "secureConnect", listener: () => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "OCSPResponse", response: Buffer): boolean; - emit(event: "secureConnect"): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "OCSPResponse", listener: (response: Buffer) => void): this; - on(event: "secureConnect", listener: () => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "OCSPResponse", listener: (response: Buffer) => void): this; - once(event: "secureConnect", listener: () => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; - prependListener(event: "secureConnect", listener: () => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; - prependOnceListener(event: "secureConnect", listener: () => void): this; - } - - export interface TlsOptions extends SecureContextOptions { - handshakeTimeout?: number; - requestCert?: boolean; - rejectUnauthorized?: boolean; - NPNProtocols?: string[] | Buffer[] | Uint8Array[] | Buffer | Uint8Array; - ALPNProtocols?: string[] | Buffer[] | Uint8Array[] | Buffer | Uint8Array; - SNICallback?: (servername: string, cb: (err: Error | null, ctx: SecureContext) => void) => void; - sessionTimeout?: number; - ticketKeys?: Buffer; - } - - export interface ConnectionOptions extends SecureContextOptions { - host?: string; - port?: number; - path?: string; // Creates unix socket connection to path. If this option is specified, `host` and `port` are ignored. - socket?: net.Socket; // Establish secure connection on a given socket rather than creating a new socket - rejectUnauthorized?: boolean; // Defaults to true - NPNProtocols?: string[] | Buffer[] | Uint8Array[] | Buffer | Uint8Array; - ALPNProtocols?: string[] | Buffer[] | Uint8Array[] | Buffer | Uint8Array; - checkServerIdentity?: typeof checkServerIdentity; - servername?: string; // SNI TLS Extension - session?: Buffer; - minDHSize?: number; - secureContext?: SecureContext; // If not provided, the entire ConnectionOptions object will be passed to tls.createSecureContext() - lookup?: net.LookupFunction; - } - - export class Server extends net.Server { - addContext(hostName: string, credentials: { - key: string; - cert: string; - ca: string; - }): void; - - /** - * events.EventEmitter - * 1. tlsClientError - * 2. newSession - * 3. OCSPRequest - * 4. resumeSession - * 5. secureConnection - */ - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; - addListener(event: "newSession", listener: (sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void) => void): this; - addListener(event: "OCSPRequest", listener: (certificate: Buffer, issuer: Buffer, callback: Function) => void): this; - addListener(event: "resumeSession", listener: (sessionId: any, callback: (err: Error, sessionData: any) => void) => void): this; - addListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "tlsClientError", err: Error, tlsSocket: TLSSocket): boolean; - emit(event: "newSession", sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void): boolean; - emit(event: "OCSPRequest", certificate: Buffer, issuer: Buffer, callback: Function): boolean; - emit(event: "resumeSession", sessionId: any, callback: (err: Error, sessionData: any) => void): boolean; - emit(event: "secureConnection", tlsSocket: TLSSocket): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; - on(event: "newSession", listener: (sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void) => void): this; - on(event: "OCSPRequest", listener: (certificate: Buffer, issuer: Buffer, callback: Function) => void): this; - on(event: "resumeSession", listener: (sessionId: any, callback: (err: Error, sessionData: any) => void) => void): this; - on(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; - once(event: "newSession", listener: (sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void) => void): this; - once(event: "OCSPRequest", listener: (certificate: Buffer, issuer: Buffer, callback: Function) => void): this; - once(event: "resumeSession", listener: (sessionId: any, callback: (err: Error, sessionData: any) => void) => void): this; - once(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; - prependListener(event: "newSession", listener: (sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void) => void): this; - prependListener(event: "OCSPRequest", listener: (certificate: Buffer, issuer: Buffer, callback: Function) => void): this; - prependListener(event: "resumeSession", listener: (sessionId: any, callback: (err: Error, sessionData: any) => void) => void): this; - prependListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; - prependOnceListener(event: "newSession", listener: (sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void) => void): this; - prependOnceListener(event: "OCSPRequest", listener: (certificate: Buffer, issuer: Buffer, callback: Function) => void): this; - prependOnceListener(event: "resumeSession", listener: (sessionId: any, callback: (err: Error, sessionData: any) => void) => void): this; - prependOnceListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; - } - - export interface SecurePair { - encrypted: any; - cleartext: any; - } - - export interface SecureContextOptions { - pfx?: string | Buffer | Array; - key?: string | Buffer | Array; - passphrase?: string; - cert?: string | Buffer | Array; - ca?: string | Buffer | Array; - ciphers?: string; - honorCipherOrder?: boolean; - ecdhCurve?: string; - clientCertEngine?: string; - crl?: string | Buffer | Array; - dhparam?: string | Buffer; - secureOptions?: number; // Value is a numeric bitmask of the `SSL_OP_*` options - secureProtocol?: string; // SSL Method, e.g. SSLv23_method - sessionIdContext?: string; - } - - export interface SecureContext { - context: any; - } - - /* - * Verifies the certificate `cert` is issued to host `host`. - * @host The hostname to verify the certificate against - * @cert PeerCertificate representing the peer's certificate - * - * Returns Error object, populating it with the reason, host and cert on failure. On success, returns undefined. - */ - export function checkServerIdentity(host: string, cert: PeerCertificate): Error | undefined; - export function createServer(options: TlsOptions, secureConnectionListener?: (socket: TLSSocket) => void): Server; - export function connect(options: ConnectionOptions, secureConnectionListener?: () => void): TLSSocket; - export function connect(port: number, host?: string, options?: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; - export function connect(port: number, options?: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; - export function createSecurePair(credentials?: crypto.Credentials, isServer?: boolean, requestCert?: boolean, rejectUnauthorized?: boolean): SecurePair; - export function createSecureContext(details: SecureContextOptions): SecureContext; - export function getCiphers(): string[]; - - export var DEFAULT_ECDH_CURVE: string; -} - -declare module "crypto" { - import * as stream from "stream"; - - export interface Certificate { - exportChallenge(spkac: string | Buffer | NodeJS.TypedArray | DataView): Buffer; - exportPublicKey(spkac: string | Buffer | NodeJS.TypedArray | DataView): Buffer; - verifySpkac(spkac: Buffer | NodeJS.TypedArray | DataView): boolean; - } - export var Certificate: { - new(): Certificate; - (): Certificate; - }; - - /** @deprecated since v10.0.0 */ - export var fips: boolean; - - export interface CredentialDetails { - pfx: string; - key: string; - passphrase: string; - cert: string; - ca: string | string[]; - crl: string | string[]; - ciphers: string; - } - export interface Credentials { context?: any; } - export function createCredentials(details: CredentialDetails): Credentials; - export function createHash(algorithm: string, options?: stream.TransformOptions): Hash; - export function createHmac(algorithm: string, key: string | Buffer | NodeJS.TypedArray | DataView, options?: stream.TransformOptions): Hmac; - - type Utf8AsciiLatin1Encoding = "utf8" | "ascii" | "latin1"; - type HexBase64Latin1Encoding = "latin1" | "hex" | "base64"; - type Utf8AsciiBinaryEncoding = "utf8" | "ascii" | "binary"; - type HexBase64BinaryEncoding = "binary" | "base64" | "hex"; - type ECDHKeyFormat = "compressed" | "uncompressed" | "hybrid"; - - export interface Hash extends NodeJS.ReadWriteStream { - update(data: string | Buffer | NodeJS.TypedArray | DataView): Hash; - update(data: string, input_encoding: Utf8AsciiLatin1Encoding): Hash; - digest(): Buffer; - digest(encoding: HexBase64Latin1Encoding): string; - } - export interface Hmac extends NodeJS.ReadWriteStream { - update(data: string | Buffer | NodeJS.TypedArray | DataView): Hmac; - update(data: string, input_encoding: Utf8AsciiLatin1Encoding): Hmac; - digest(): Buffer; - digest(encoding: HexBase64Latin1Encoding): string; - } - export type CipherCCMTypes = 'aes-128-ccm' | 'aes-192-ccm' | 'aes-256-ccm'; - export type CipherGCMTypes = 'aes-128-gcm' | 'aes-192-gcm' | 'aes-256-gcm'; - export interface CipherCCMOptions extends stream.TransformOptions { - authTagLength: number; - } - export interface CipherGCMOptions extends stream.TransformOptions { - authTagLength?: number; - } - /** @deprecated since v10.0.0 use createCipheriv() */ - export function createCipher(algorithm: string, password: string | Buffer | NodeJS.TypedArray | DataView, options?: stream.TransformOptions): Cipher; - export function createCipher(algorithm: CipherCCMTypes, password: string | Buffer | NodeJS.TypedArray | DataView, options: CipherCCMOptions): CipherCCM; - export function createCipher(algorithm: CipherGCMTypes, password: string | Buffer | NodeJS.TypedArray | DataView, options: CipherGCMOptions): CipherGCM; - - export function createCipheriv(algorithm: string, key: string | Buffer | NodeJS.TypedArray | DataView, iv: string | Buffer | NodeJS.TypedArray | DataView, options?: stream.TransformOptions): Cipher; - export function createCipheriv(algorithm: CipherGCMTypes, key: string | Buffer | NodeJS.TypedArray | DataView, iv: string | Buffer | NodeJS.TypedArray | DataView, options: CipherCCMOptions): CipherCCM; - export function createCipheriv(algorithm: CipherGCMTypes, key: string | Buffer | NodeJS.TypedArray | DataView, iv: string | Buffer | NodeJS.TypedArray | DataView, options: CipherGCMOptions): CipherGCM; - - export interface Cipher extends NodeJS.ReadWriteStream { - update(data: string | Buffer | NodeJS.TypedArray | DataView): Buffer; - update(data: string, input_encoding: Utf8AsciiBinaryEncoding): Buffer; - update(data: Buffer | NodeJS.TypedArray | DataView, output_encoding: HexBase64BinaryEncoding): string; - update(data: Buffer | NodeJS.TypedArray | DataView, input_encoding: any, output_encoding: HexBase64BinaryEncoding): string; - // second arg ignored - update(data: string, input_encoding: Utf8AsciiBinaryEncoding, output_encoding: HexBase64BinaryEncoding): string; - final(): Buffer; - final(output_encoding: string): string; - setAutoPadding(auto_padding?: boolean): this; - // getAuthTag(): Buffer; - // setAAD(buffer: Buffer): this; // docs only say buffer - } - export interface CipherCCM extends Cipher { - setAAD(buffer: Buffer, options: { plainTextLength: number }): this; - getAuthTag(): Buffer; - } - export interface CipherGCM extends Cipher { - setAAD(buffer: Buffer, options?: { plainTextLength: number }): this; - getAuthTag(): Buffer; - } - /** @deprecated since v10.0.0 use createCipheriv() */ - export function createDecipher(algorithm: string, password: string | Buffer | NodeJS.TypedArray | DataView, options?: stream.TransformOptions): Decipher; - export function createDecipher(algorithm: CipherCCMTypes, password: string | Buffer | NodeJS.TypedArray | DataView, options: CipherCCMOptions): DecipherCCM; - export function createDecipher(algorithm: CipherGCMTypes, password: string | Buffer | NodeJS.TypedArray | DataView, options: CipherGCMOptions): DecipherGCM; - - export function createDecipheriv(algorithm: string, key: string | Buffer | NodeJS.TypedArray | DataView, iv: string | Buffer | NodeJS.TypedArray | DataView, options?: stream.TransformOptions): Decipher; - export function createDecipheriv(algorithm: CipherCCMTypes, key: string | Buffer | NodeJS.TypedArray | DataView, iv: string | Buffer | NodeJS.TypedArray | DataView, options: CipherCCMOptions): DecipherCCM; - export function createDecipheriv(algorithm: CipherGCMTypes, key: string | Buffer | NodeJS.TypedArray | DataView, iv: string | Buffer | NodeJS.TypedArray | DataView, options: CipherGCMOptions): DecipherGCM; - - export interface Decipher extends NodeJS.ReadWriteStream { - update(data: Buffer | NodeJS.TypedArray | DataView): Buffer; - update(data: string, input_encoding: HexBase64BinaryEncoding): Buffer; - update(data: Buffer | NodeJS.TypedArray | DataView, input_encoding: any, output_encoding: Utf8AsciiBinaryEncoding): string; - // second arg is ignored - update(data: string, input_encoding: HexBase64BinaryEncoding, output_encoding: Utf8AsciiBinaryEncoding): string; - final(): Buffer; - final(output_encoding: string): string; - setAutoPadding(auto_padding?: boolean): this; - // setAuthTag(tag: Buffer | NodeJS.TypedArray | DataView): this; - // setAAD(buffer: Buffer | NodeJS.TypedArray | DataView): this; - } - export interface DecipherCCM extends Decipher { - setAuthTag(buffer: Buffer | NodeJS.TypedArray | DataView, options: { plainTextLength: number }): this; - setAAD(buffer: Buffer | NodeJS.TypedArray | DataView): this; - } - export interface DecipherGCM extends Decipher { - setAuthTag(buffer: Buffer | NodeJS.TypedArray | DataView, options?: { plainTextLength: number }): this; - setAAD(buffer: Buffer | NodeJS.TypedArray | DataView): this; - } - - export function createSign(algorithm: string, options?: stream.WritableOptions): Signer; - export interface Signer extends NodeJS.WritableStream { - update(data: string | Buffer | NodeJS.TypedArray | DataView): Signer; - update(data: string, input_encoding: Utf8AsciiLatin1Encoding): Signer; - sign(private_key: string | { key: string; passphrase: string, padding?: number, saltLength?: number }): Buffer; - sign(private_key: string | { key: string; passphrase: string, padding?: number, saltLength?: number }, output_format: HexBase64Latin1Encoding): string; - } - export function createVerify(algorith: string, options?: stream.WritableOptions): Verify; - export interface Verify extends NodeJS.WritableStream { - update(data: string | Buffer | NodeJS.TypedArray | DataView): Verify; - update(data: string, input_encoding: Utf8AsciiLatin1Encoding): Verify; - verify(object: string | Object, signature: Buffer | NodeJS.TypedArray | DataView): boolean; - verify(object: string | Object, signature: string, signature_format: HexBase64Latin1Encoding): boolean; - // https://nodejs.org/api/crypto.html#crypto_verifier_verify_object_signature_signature_format - // The signature field accepts a TypedArray type, but it is only available starting ES2017 - } - export function createDiffieHellman(prime_length: number, generator?: number | Buffer | NodeJS.TypedArray | DataView): DiffieHellman; - export function createDiffieHellman(prime: Buffer | NodeJS.TypedArray | DataView): DiffieHellman; - export function createDiffieHellman(prime: string, prime_encoding: HexBase64Latin1Encoding): DiffieHellman; - export function createDiffieHellman(prime: string, prime_encoding: HexBase64Latin1Encoding, generator: number | Buffer | NodeJS.TypedArray | DataView): DiffieHellman; - export function createDiffieHellman(prime: string, prime_encoding: HexBase64Latin1Encoding, generator: string, generator_encoding: HexBase64Latin1Encoding): DiffieHellman; - export interface DiffieHellman { - generateKeys(): Buffer; - generateKeys(encoding: HexBase64Latin1Encoding): string; - computeSecret(other_public_key: Buffer | NodeJS.TypedArray | DataView): Buffer; - computeSecret(other_public_key: string, input_encoding: HexBase64Latin1Encoding): Buffer; - computeSecret(other_public_key: Buffer | NodeJS.TypedArray | DataView, output_encoding: HexBase64Latin1Encoding): string; - computeSecret(other_public_key: string, input_encoding: HexBase64Latin1Encoding, output_encoding: HexBase64Latin1Encoding): string; - getPrime(): Buffer; - getPrime(encoding: HexBase64Latin1Encoding): string; - getGenerator(): Buffer; - getGenerator(encoding: HexBase64Latin1Encoding): string; - getPublicKey(): Buffer; - getPublicKey(encoding: HexBase64Latin1Encoding): string; - getPrivateKey(): Buffer; - getPrivateKey(encoding: HexBase64Latin1Encoding): string; - setPublicKey(public_key: Buffer | NodeJS.TypedArray | DataView): void; - setPublicKey(public_key: string, encoding: string): void; - setPrivateKey(private_key: Buffer | NodeJS.TypedArray | DataView): void; - setPrivateKey(private_key: string, encoding: string): void; - verifyError: number; - } - export function getDiffieHellman(group_name: string): DiffieHellman; - export function pbkdf2(password: string | Buffer | NodeJS.TypedArray | DataView, salt: string | Buffer | NodeJS.TypedArray | DataView, iterations: number, keylen: number, digest: string, callback: (err: Error | null, derivedKey: Buffer) => any): void; - export function pbkdf2Sync(password: string | Buffer | NodeJS.TypedArray | DataView, salt: string | Buffer | NodeJS.TypedArray | DataView, iterations: number, keylen: number, digest: string): Buffer; - - export function randomBytes(size: number): Buffer; - export function randomBytes(size: number, callback: (err: Error | null, buf: Buffer) => void): void; - export function pseudoRandomBytes(size: number): Buffer; - export function pseudoRandomBytes(size: number, callback: (err: Error | null, buf: Buffer) => void): void; - - export function randomFillSync(buffer: T, offset?: number, size?: number): T; - export function randomFill(buffer: T, callback: (err: Error | null, buf: T) => void): void; - export function randomFill(buffer: T, offset: number, callback: (err: Error | null, buf: T) => void): void; - export function randomFill(buffer: T, offset: number, size: number, callback: (err: Error | null, buf: T) => void): void; - - export interface ScryptOptions { - N?: number; - r?: number; - p?: number; - maxmem?: number; - } - export function scrypt(password: string | Buffer | NodeJS.TypedArray | DataView, salt: string | Buffer | NodeJS.TypedArray | DataView, keylen: number, callback: (err: Error | null, derivedKey: Buffer) => void): void; - export function scrypt(password: string | Buffer | NodeJS.TypedArray | DataView, salt: string | Buffer | NodeJS.TypedArray | DataView, keylen: number, options: ScryptOptions, callback: (err: Error | null, derivedKey: Buffer) => void): void; - export function scryptSync(password: string | Buffer | NodeJS.TypedArray | DataView, salt: string | Buffer | NodeJS.TypedArray | DataView, keylen: number, options?: ScryptOptions): Buffer; - - export interface RsaPublicKey { - key: string; - padding?: number; - } - export interface RsaPrivateKey { - key: string; - passphrase?: string; - padding?: number; - } - export function publicEncrypt(public_key: string | RsaPublicKey, buffer: Buffer | NodeJS.TypedArray | DataView): Buffer; - export function privateDecrypt(private_key: string | RsaPrivateKey, buffer: Buffer | NodeJS.TypedArray | DataView): Buffer; - export function privateEncrypt(private_key: string | RsaPrivateKey, buffer: Buffer | NodeJS.TypedArray | DataView): Buffer; - export function publicDecrypt(public_key: string | RsaPublicKey, buffer: Buffer | NodeJS.TypedArray | DataView): Buffer; - export function getCiphers(): string[]; - export function getCurves(): string[]; - export function getHashes(): string[]; - export class ECDH { - static convertKey(key: string | Buffer | NodeJS.TypedArray | DataView, curve: string, inputEncoding?: HexBase64Latin1Encoding, outputEncoding?: "latin1" | "hex" | "base64", format?: "uncompressed" | "compressed" | "hybrid"): Buffer | string; - generateKeys(): Buffer; - generateKeys(encoding: HexBase64Latin1Encoding, format?: ECDHKeyFormat): string; - computeSecret(other_public_key: Buffer | NodeJS.TypedArray | DataView): Buffer; - computeSecret(other_public_key: string, input_encoding: HexBase64Latin1Encoding): Buffer; - computeSecret(other_public_key: Buffer | NodeJS.TypedArray | DataView, output_encoding: HexBase64Latin1Encoding): string; - computeSecret(other_public_key: string, input_encoding: HexBase64Latin1Encoding, output_encoding: HexBase64Latin1Encoding): string; - getPrivateKey(): Buffer; - getPrivateKey(encoding: HexBase64Latin1Encoding): string; - getPublicKey(): Buffer; - getPublicKey(encoding: HexBase64Latin1Encoding, format?: ECDHKeyFormat): string; - setPrivateKey(private_key: Buffer | NodeJS.TypedArray | DataView): void; - setPrivateKey(private_key: string, encoding: HexBase64Latin1Encoding): void; - } - export function createECDH(curve_name: string): ECDH; - export function timingSafeEqual(a: Buffer | NodeJS.TypedArray | DataView, b: Buffer | NodeJS.TypedArray | DataView): boolean; - /** @deprecated since v10.0.0 */ - export var DEFAULT_ENCODING: string; -} - -declare module "stream" { - import * as events from "events"; - - class internal extends events.EventEmitter { - pipe(destination: T, options?: { end?: boolean; }): T; - } - - namespace internal { - export class Stream extends internal { } - - export interface ReadableOptions { - highWaterMark?: number; - encoding?: string; - objectMode?: boolean; - read?(this: Readable, size: number): void; - destroy?(this: Readable, error: Error | null, callback: (error: Error | null) => void): void; - } - - export class Readable extends Stream implements NodeJS.ReadableStream { - readable: boolean; - readonly readableHighWaterMark: number; - readonly readableLength: number; - constructor(opts?: ReadableOptions); - _read(size: number): void; - read(size?: number): any; - setEncoding(encoding: string): this; - pause(): this; - resume(): this; - isPaused(): boolean; - unpipe(destination?: T): this; - unshift(chunk: any): void; - wrap(oldStream: NodeJS.ReadableStream): this; - push(chunk: any, encoding?: string): boolean; - _destroy(error: Error | null, callback: (error: Error | null) => void): void; - destroy(error?: Error): void; - - /** - * Event emitter - * The defined events on documents including: - * 1. close - * 2. data - * 3. end - * 4. readable - * 5. error - */ - addListener(event: "close", listener: () => void): this; - addListener(event: "data", listener: (chunk: any) => void): this; - addListener(event: "end", listener: () => void): this; - addListener(event: "readable", listener: () => void): this; - addListener(event: "error", listener: (err: Error) => void): this; - addListener(event: string | symbol, listener: (...args: any[]) => void): this; - - emit(event: "close"): boolean; - emit(event: "data", chunk: any): boolean; - emit(event: "end"): boolean; - emit(event: "readable"): boolean; - emit(event: "error", err: Error): boolean; - emit(event: string | symbol, ...args: any[]): boolean; - - on(event: "close", listener: () => void): this; - on(event: "data", listener: (chunk: any) => void): this; - on(event: "end", listener: () => void): this; - on(event: "readable", listener: () => void): this; - on(event: "error", listener: (err: Error) => void): this; - on(event: string | symbol, listener: (...args: any[]) => void): this; - - once(event: "close", listener: () => void): this; - once(event: "data", listener: (chunk: any) => void): this; - once(event: "end", listener: () => void): this; - once(event: "readable", listener: () => void): this; - once(event: "error", listener: (err: Error) => void): this; - once(event: string | symbol, listener: (...args: any[]) => void): this; - - prependListener(event: "close", listener: () => void): this; - prependListener(event: "data", listener: (chunk: any) => void): this; - prependListener(event: "end", listener: () => void): this; - prependListener(event: "readable", listener: () => void): this; - prependListener(event: "error", listener: (err: Error) => void): this; - prependListener(event: string | symbol, listener: (...args: any[]) => void): this; - - prependOnceListener(event: "close", listener: () => void): this; - prependOnceListener(event: "data", listener: (chunk: any) => void): this; - prependOnceListener(event: "end", listener: () => void): this; - prependOnceListener(event: "readable", listener: () => void): this; - prependOnceListener(event: "error", listener: (err: Error) => void): this; - prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; - - removeListener(event: "close", listener: () => void): this; - removeListener(event: "data", listener: (chunk: any) => void): this; - removeListener(event: "end", listener: () => void): this; - removeListener(event: "readable", listener: () => void): this; - removeListener(event: "error", listener: (err: Error) => void): this; - removeListener(event: string | symbol, listener: (...args: any[]) => void): this; - - [Symbol.asyncIterator](): AsyncIterableIterator; - } - - export interface WritableOptions { - highWaterMark?: number; - decodeStrings?: boolean; - objectMode?: boolean; - write?(this: Writable, chunk: any, encoding: string, callback: (error?: Error | null) => void): void; - writev?(this: Writable, chunks: Array<{ chunk: any, encoding: string }>, callback: (error?: Error | null) => void): void; - destroy?(this: Writable, error: Error | null, callback: (error: Error | null) => void): void; - final?(this: Writable, callback: (error?: Error | null) => void): void; - } - - export class Writable extends Stream implements NodeJS.WritableStream { - writable: boolean; - readonly writableHighWaterMark: number; - readonly writableLength: number; - constructor(opts?: WritableOptions); - _write(chunk: any, encoding: string, callback: (error?: Error | null) => void): void; - _writev?(chunks: Array<{ chunk: any, encoding: string }>, callback: (error?: Error | null) => void): void; - _destroy(error: Error | null, callback: (error: Error | null) => void): void; - _final(callback: (error?: Error | null) => void): void; - write(chunk: any, cb?: (error: Error | null | undefined) => void): boolean; - write(chunk: any, encoding?: string, cb?: (error: Error | null | undefined) => void): boolean; - setDefaultEncoding(encoding: string): this; - end(cb?: () => void): void; - end(chunk: any, cb?: () => void): void; - end(chunk: any, encoding?: string, cb?: () => void): void; - cork(): void; - uncork(): void; - destroy(error?: Error): void; - - /** - * Event emitter - * The defined events on documents including: - * 1. close - * 2. drain - * 3. error - * 4. finish - * 5. pipe - * 6. unpipe - */ - addListener(event: "close", listener: () => void): this; - addListener(event: "drain", listener: () => void): this; - addListener(event: "error", listener: (err: Error) => void): this; - addListener(event: "finish", listener: () => void): this; - addListener(event: "pipe", listener: (src: Readable) => void): this; - addListener(event: "unpipe", listener: (src: Readable) => void): this; - addListener(event: string | symbol, listener: (...args: any[]) => void): this; - - emit(event: "close"): boolean; - emit(event: "drain"): boolean; - emit(event: "error", err: Error): boolean; - emit(event: "finish"): boolean; - emit(event: "pipe", src: Readable): boolean; - emit(event: "unpipe", src: Readable): boolean; - emit(event: string | symbol, ...args: any[]): boolean; - - on(event: "close", listener: () => void): this; - on(event: "drain", listener: () => void): this; - on(event: "error", listener: (err: Error) => void): this; - on(event: "finish", listener: () => void): this; - on(event: "pipe", listener: (src: Readable) => void): this; - on(event: "unpipe", listener: (src: Readable) => void): this; - on(event: string | symbol, listener: (...args: any[]) => void): this; - - once(event: "close", listener: () => void): this; - once(event: "drain", listener: () => void): this; - once(event: "error", listener: (err: Error) => void): this; - once(event: "finish", listener: () => void): this; - once(event: "pipe", listener: (src: Readable) => void): this; - once(event: "unpipe", listener: (src: Readable) => void): this; - once(event: string | symbol, listener: (...args: any[]) => void): this; - - prependListener(event: "close", listener: () => void): this; - prependListener(event: "drain", listener: () => void): this; - prependListener(event: "error", listener: (err: Error) => void): this; - prependListener(event: "finish", listener: () => void): this; - prependListener(event: "pipe", listener: (src: Readable) => void): this; - prependListener(event: "unpipe", listener: (src: Readable) => void): this; - prependListener(event: string | symbol, listener: (...args: any[]) => void): this; - - prependOnceListener(event: "close", listener: () => void): this; - prependOnceListener(event: "drain", listener: () => void): this; - prependOnceListener(event: "error", listener: (err: Error) => void): this; - prependOnceListener(event: "finish", listener: () => void): this; - prependOnceListener(event: "pipe", listener: (src: Readable) => void): this; - prependOnceListener(event: "unpipe", listener: (src: Readable) => void): this; - prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; - - removeListener(event: "close", listener: () => void): this; - removeListener(event: "drain", listener: () => void): this; - removeListener(event: "error", listener: (err: Error) => void): this; - removeListener(event: "finish", listener: () => void): this; - removeListener(event: "pipe", listener: (src: Readable) => void): this; - removeListener(event: "unpipe", listener: (src: Readable) => void): this; - removeListener(event: string | symbol, listener: (...args: any[]) => void): this; - } - - export interface DuplexOptions extends ReadableOptions, WritableOptions { - allowHalfOpen?: boolean; - readableObjectMode?: boolean; - writableObjectMode?: boolean; - read?(this: Duplex, size: number): void; - write?(this: Duplex, chunk: any, encoding: string, callback: (error?: Error | null) => void): void; - writev?(this: Duplex, chunks: Array<{ chunk: any, encoding: string }>, callback: (error?: Error | null) => void): void; - final?(this: Duplex, callback: (error?: Error | null) => void): void; - destroy?(this: Duplex, error: Error | null, callback: (error: Error | null) => void): void; - } - - // Note: Duplex extends both Readable and Writable. - export class Duplex extends Readable implements Writable { - writable: boolean; - readonly writableHighWaterMark: number; - readonly writableLength: number; - constructor(opts?: DuplexOptions); - _write(chunk: any, encoding: string, callback: (error?: Error | null) => void): void; - _writev?(chunks: Array<{ chunk: any, encoding: string }>, callback: (error?: Error | null) => void): void; - _destroy(error: Error | null, callback: (error: Error | null) => void): void; - _final(callback: (error?: Error | null) => void): void; - write(chunk: any, cb?: (error: Error | null | undefined) => void): boolean; - write(chunk: any, encoding?: string, cb?: (error: Error | null | undefined) => void): boolean; - setDefaultEncoding(encoding: string): this; - end(cb?: () => void): void; - end(chunk: any, cb?: () => void): void; - end(chunk: any, encoding?: string, cb?: () => void): void; - cork(): void; - uncork(): void; - } - - type TransformCallback = (error?: Error, data?: any) => void; - - export interface TransformOptions extends DuplexOptions { - read?(this: Transform, size: number): void; - write?(this: Transform, chunk: any, encoding: string, callback: (error?: Error | null) => void): void; - writev?(this: Transform, chunks: Array<{ chunk: any, encoding: string }>, callback: (error?: Error | null) => void): void; - final?(this: Transform, callback: (error?: Error | null) => void): void; - destroy?(this: Transform, error: Error | null, callback: (error: Error | null) => void): void; - transform?(this: Transform, chunk: any, encoding: string, callback: TransformCallback): void; - flush?(this: Transform, callback: TransformCallback): void; - } - - export class Transform extends Duplex { - constructor(opts?: TransformOptions); - _transform(chunk: any, encoding: string, callback: TransformCallback): void; - _flush(callback: TransformCallback): void; - } - - export class PassThrough extends Transform { } - - export function pipeline(stream1: NodeJS.ReadableStream, stream2: T, callback?: (err: NodeJS.ErrnoException) => void): T; - export function pipeline(stream1: NodeJS.ReadableStream, stream2: NodeJS.ReadWriteStream, stream3: T, callback?: (err: NodeJS.ErrnoException) => void): T; - export function pipeline(stream1: NodeJS.ReadableStream, stream2: NodeJS.ReadWriteStream, stream3: NodeJS.ReadWriteStream, stream4: T, callback?: (err: NodeJS.ErrnoException) => void): T; - export function pipeline(stream1: NodeJS.ReadableStream, stream2: NodeJS.ReadWriteStream, stream3: NodeJS.ReadWriteStream, stream4: NodeJS.ReadWriteStream, stream5: T, callback?: (err: NodeJS.ErrnoException) => void): T; - export function pipeline(streams: Array, callback?: (err: NodeJS.ErrnoException) => void): NodeJS.WritableStream; - export function pipeline(stream1: NodeJS.ReadableStream, stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, ...streams: Array void)>): NodeJS.WritableStream; - export namespace pipeline { - export function __promisify__(stream1: NodeJS.ReadableStream, stream2: T): Promise; - export function __promisify__(stream1: NodeJS.ReadableStream, stream2: NodeJS.ReadWriteStream, stream3: T): Promise; - export function __promisify__(stream1: NodeJS.ReadableStream, stream2: NodeJS.ReadWriteStream, stream3: NodeJS.ReadWriteStream, stream4: T): Promise; - export function __promisify__(stream1: NodeJS.ReadableStream, stream2: NodeJS.ReadWriteStream, stream3: NodeJS.ReadWriteStream, stream4: NodeJS.ReadWriteStream, stream5: T): Promise; - export function __promisify__(streams: Array): Promise; - export function __promisify__(stream1: NodeJS.ReadableStream, stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, ...streams: Array): Promise; - } - } - - export = internal; -} - -declare module "util" { - export interface InspectOptions extends NodeJS.InspectOptions { } - export function format(format: any, ...param: any[]): string; - /** @deprecated since v0.11.3 - use `console.error()` instead. */ - export function debug(string: string): void; - /** @deprecated since v0.11.3 - use `console.error()` instead. */ - export function error(...param: any[]): void; - /** @deprecated since v0.11.3 - use `console.log()` instead. */ - export function puts(...param: any[]): void; - /** @deprecated since v0.11.3 - use `console.log()` instead. */ - export function print(...param: any[]): void; - /** @deprecated since v0.11.3 - use a third party module instead. */ - export function log(string: string): void; - export var inspect: { - (object: any, showHidden?: boolean, depth?: number | null, color?: boolean): string; - (object: any, options: InspectOptions): string; - colors: { - [color: string]: [number, number] | undefined - } - styles: { - [style: string]: string | undefined - } - defaultOptions: InspectOptions; - custom: symbol; - }; - /** @deprecated since v4.0.0 - use `Array.isArray()` instead. */ - export function isArray(object: any): object is any[]; - /** @deprecated since v4.0.0 - use `util.types.isRegExp()` instead. */ - export function isRegExp(object: any): object is RegExp; - /** @deprecated since v4.0.0 - use `util.types.isDate()` instead. */ - export function isDate(object: any): object is Date; - /** @deprecated since v4.0.0 - use `util.types.isNativeError()` instead. */ - export function isError(object: any): object is Error; - export function inherits(constructor: any, superConstructor: any): void; - export function debuglog(key: string): (msg: string, ...param: any[]) => void; - /** @deprecated since v4.0.0 - use `typeof value === 'boolean'` instead. */ - export function isBoolean(object: any): object is boolean; - /** @deprecated since v4.0.0 - use `Buffer.isBuffer()` instead. */ - export function isBuffer(object: any): object is Buffer; - /** @deprecated since v4.0.0 - use `typeof value === 'function'` instead. */ - export function isFunction(object: any): boolean; - /** @deprecated since v4.0.0 - use `value === null` instead. */ - export function isNull(object: any): object is null; - /** @deprecated since v4.0.0 - use `value === null || value === undefined` instead. */ - export function isNullOrUndefined(object: any): object is null | undefined; - /** @deprecated since v4.0.0 - use `typeof value === 'number'` instead. */ - export function isNumber(object: any): object is number; - /** @deprecated since v4.0.0 - use `value !== null && typeof value === 'object'` instead. */ - export function isObject(object: any): boolean; - /** @deprecated since v4.0.0 - use `(typeof value !== 'object' && typeof value !== 'function') || value === null` instead. */ - export function isPrimitive(object: any): boolean; - /** @deprecated since v4.0.0 - use `typeof value === 'string'` instead. */ - export function isString(object: any): object is string; - /** @deprecated since v4.0.0 - use `typeof value === 'symbol'` instead. */ - export function isSymbol(object: any): object is symbol; - /** @deprecated since v4.0.0 - use `value === undefined` instead. */ - export function isUndefined(object: any): object is undefined; - export function deprecate(fn: T, message: string): T; - export function isDeepStrictEqual(val1: any, val2: any): boolean; - - export interface CustomPromisify extends Function { - __promisify__: TCustom; - } - - export function callbackify(fn: () => Promise): (callback: (err: NodeJS.ErrnoException) => void) => void; - export function callbackify(fn: () => Promise): (callback: (err: NodeJS.ErrnoException, result: TResult) => void) => void; - export function callbackify(fn: (arg1: T1) => Promise): (arg1: T1, callback: (err: NodeJS.ErrnoException) => void) => void; - export function callbackify(fn: (arg1: T1) => Promise): (arg1: T1, callback: (err: NodeJS.ErrnoException, result: TResult) => void) => void; - export function callbackify(fn: (arg1: T1, arg2: T2) => Promise): (arg1: T1, arg2: T2, callback: (err: NodeJS.ErrnoException) => void) => void; - export function callbackify(fn: (arg1: T1, arg2: T2) => Promise): (arg1: T1, arg2: T2, callback: (err: NodeJS.ErrnoException, result: TResult) => void) => void; - export function callbackify(fn: (arg1: T1, arg2: T2, arg3: T3) => Promise): (arg1: T1, arg2: T2, arg3: T3, callback: (err: NodeJS.ErrnoException) => void) => void; - export function callbackify(fn: (arg1: T1, arg2: T2, arg3: T3) => Promise): (arg1: T1, arg2: T2, arg3: T3, callback: (err: NodeJS.ErrnoException, result: TResult) => void) => void; - export function callbackify(fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, callback: (err: NodeJS.ErrnoException) => void) => void; - export function callbackify(fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, callback: (err: NodeJS.ErrnoException, result: TResult) => void) => void; - export function callbackify(fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, callback: (err: NodeJS.ErrnoException) => void) => void; - export function callbackify(fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, callback: (err: NodeJS.ErrnoException, result: TResult) => void) => void; - export function callbackify(fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6) => Promise): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, callback: (err: NodeJS.ErrnoException) => void) => void; - export function callbackify(fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6) => Promise): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, callback: (err: NodeJS.ErrnoException, result: TResult) => void) => void; - - export function promisify(fn: CustomPromisify): TCustom; - export function promisify(fn: (callback: (err: Error | null, result: TResult) => void) => void): () => Promise; - export function promisify(fn: (callback: (err: Error | null) => void) => void): () => Promise; - export function promisify(fn: (arg1: T1, callback: (err: Error | null, result: TResult) => void) => void): (arg1: T1) => Promise; - export function promisify(fn: (arg1: T1, callback: (err: Error | null) => void) => void): (arg1: T1) => Promise; - export function promisify(fn: (arg1: T1, arg2: T2, callback: (err: Error | null, result: TResult) => void) => void): (arg1: T1, arg2: T2) => Promise; - export function promisify(fn: (arg1: T1, arg2: T2, callback: (err: Error | null) => void) => void): (arg1: T1, arg2: T2) => Promise; - export function promisify(fn: (arg1: T1, arg2: T2, arg3: T3, callback: (err: Error | null, result: TResult) => void) => void): (arg1: T1, arg2: T2, arg3: T3) => Promise; - export function promisify(fn: (arg1: T1, arg2: T2, arg3: T3, callback: (err: Error | null) => void) => void): (arg1: T1, arg2: T2, arg3: T3) => Promise; - export function promisify(fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, callback: (err: Error | null, result: TResult) => void) => void): (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise; - export function promisify(fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, callback: (err: Error | null) => void) => void): (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise; - export function promisify(fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, callback: (err: Error | null, result: TResult) => void) => void): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise; - export function promisify(fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, callback: (err: Error | null) => void) => void): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise; - export function promisify(fn: Function): Function; - export namespace promisify { - const custom: symbol; - } - - export namespace types { - export function isAnyArrayBuffer(object: any): boolean; - export function isArgumentsObject(object: any): object is IArguments; - export function isArrayBuffer(object: any): object is ArrayBuffer; - export function isAsyncFunction(object: any): boolean; - export function isBooleanObject(object: any): object is Boolean; - export function isDataView(object: any): object is DataView; - export function isDate(object: any): object is Date; - export function isExternal(object: any): boolean; - export function isFloat32Array(object: any): object is Float32Array; - export function isFloat64Array(object: any): object is Float64Array; - export function isGeneratorFunction(object: any): boolean; - export function isGeneratorObject(object: any): boolean; - export function isInt8Array(object: any): object is Int8Array; - export function isInt16Array(object: any): object is Int16Array; - export function isInt32Array(object: any): object is Int32Array; - export function isMap(object: any): boolean; - export function isMapIterator(object: any): boolean; - export function isNativeError(object: any): object is Error; - export function isNumberObject(object: any): object is Number; - export function isPromise(object: any): boolean; - export function isProxy(object: any): boolean; - export function isRegExp(object: any): object is RegExp; - export function isSet(object: any): boolean; - export function isSetIterator(object: any): boolean; - export function isSharedArrayBuffer(object: any): boolean; - export function isStringObject(object: any): boolean; - export function isSymbolObject(object: any): boolean; - export function isTypedArray(object: any): object is NodeJS.TypedArray; - export function isUint8Array(object: any): object is Uint8Array; - export function isUint8ClampedArray(object: any): object is Uint8ClampedArray; - export function isUint16Array(object: any): object is Uint16Array; - export function isUint32Array(object: any): object is Uint32Array; - export function isWeakMap(object: any): boolean; - export function isWeakSet(object: any): boolean; - export function isWebAssemblyCompiledModule(object: any): boolean; - } - - export class TextDecoder { - readonly encoding: string; - readonly fatal: boolean; - readonly ignoreBOM: boolean; - constructor( - encoding?: string, - options?: { fatal?: boolean; ignoreBOM?: boolean } - ); - decode( - input?: NodeJS.TypedArray | DataView | ArrayBuffer | null, - options?: { stream?: boolean } - ): string; - } - - export class TextEncoder { - readonly encoding: string; - constructor(); - encode(input?: string): Uint8Array; - } -} - -declare module "assert" { - function internal(value: any, message?: string | Error): void; - namespace internal { - export class AssertionError implements Error { - name: string; - message: string; - actual: any; - expected: any; - operator: string; - generatedMessage: boolean; - code: 'ERR_ASSERTION'; - - constructor(options?: { - message?: string; actual?: any; expected?: any; - operator?: string; stackStartFn?: Function - }); - } - - export function fail(message?: string | Error): never; - /** @deprecated since v10.0.0 - use fail([message]) or other assert functions instead. */ - export function fail(actual: any, expected: any, message?: string | Error, operator?: string, stackStartFn?: Function): never; - export function ok(value: any, message?: string | Error): void; - /** @deprecated since v9.9.0 - use strictEqual() instead. */ - export function equal(actual: any, expected: any, message?: string | Error): void; - /** @deprecated since v9.9.0 - use notStrictEqual() instead. */ - export function notEqual(actual: any, expected: any, message?: string | Error): void; - /** @deprecated since v9.9.0 - use deepStrictEqual() instead. */ - export function deepEqual(actual: any, expected: any, message?: string | Error): void; - /** @deprecated since v9.9.0 - use notDeepStrictEqual() instead. */ - export function notDeepEqual(actual: any, expected: any, message?: string | Error): void; - export function strictEqual(actual: any, expected: any, message?: string | Error): void; - export function notStrictEqual(actual: any, expected: any, message?: string | Error): void; - export function deepStrictEqual(actual: any, expected: any, message?: string | Error): void; - export function notDeepStrictEqual(actual: any, expected: any, message?: string | Error): void; - - export function throws(block: Function, message?: string | Error): void; - export function throws(block: Function, error: RegExp | Function | Object | Error, message?: string | Error): void; - export function doesNotThrow(block: Function, message?: string | Error): void; - export function doesNotThrow(block: Function, error: RegExp | Function, message?: string | Error): void; - - export function ifError(value: any): void; - - export function rejects(block: Function | Promise, message?: string | Error): Promise; - export function rejects(block: Function | Promise, error: RegExp | Function | Object | Error, message?: string | Error): Promise; - export function doesNotReject(block: Function | Promise, message?: string | Error): Promise; - export function doesNotReject(block: Function | Promise, error: RegExp | Function, message?: string | Error): Promise; - - export var strict: typeof internal; - } - - export = internal; -} - -declare module "tty" { - import * as net from "net"; - - export function isatty(fd: number): boolean; - export class ReadStream extends net.Socket { - isRaw: boolean; - setRawMode(mode: boolean): void; - isTTY: boolean; - } - export class WriteStream extends net.Socket { - columns: number; - rows: number; - isTTY: boolean; - } -} - -declare module "domain" { - import * as events from "events"; - - export class Domain extends events.EventEmitter implements NodeJS.Domain { - run(fn: Function): void; - add(emitter: events.EventEmitter): void; - remove(emitter: events.EventEmitter): void; - bind(cb: (err: Error, data: any) => any): any; - intercept(cb: (data: any) => any): any; - members: any[]; - enter(): void; - exit(): void; - } - - export function create(): Domain; -} - -declare module "constants" { - export var E2BIG: number; - export var EACCES: number; - export var EADDRINUSE: number; - export var EADDRNOTAVAIL: number; - export var EAFNOSUPPORT: number; - export var EAGAIN: number; - export var EALREADY: number; - export var EBADF: number; - export var EBADMSG: number; - export var EBUSY: number; - export var ECANCELED: number; - export var ECHILD: number; - export var ECONNABORTED: number; - export var ECONNREFUSED: number; - export var ECONNRESET: number; - export var EDEADLK: number; - export var EDESTADDRREQ: number; - export var EDOM: number; - export var EEXIST: number; - export var EFAULT: number; - export var EFBIG: number; - export var EHOSTUNREACH: number; - export var EIDRM: number; - export var EILSEQ: number; - export var EINPROGRESS: number; - export var EINTR: number; - export var EINVAL: number; - export var EIO: number; - export var EISCONN: number; - export var EISDIR: number; - export var ELOOP: number; - export var EMFILE: number; - export var EMLINK: number; - export var EMSGSIZE: number; - export var ENAMETOOLONG: number; - export var ENETDOWN: number; - export var ENETRESET: number; - export var ENETUNREACH: number; - export var ENFILE: number; - export var ENOBUFS: number; - export var ENODATA: number; - export var ENODEV: number; - export var ENOENT: number; - export var ENOEXEC: number; - export var ENOLCK: number; - export var ENOLINK: number; - export var ENOMEM: number; - export var ENOMSG: number; - export var ENOPROTOOPT: number; - export var ENOSPC: number; - export var ENOSR: number; - export var ENOSTR: number; - export var ENOSYS: number; - export var ENOTCONN: number; - export var ENOTDIR: number; - export var ENOTEMPTY: number; - export var ENOTSOCK: number; - export var ENOTSUP: number; - export var ENOTTY: number; - export var ENXIO: number; - export var EOPNOTSUPP: number; - export var EOVERFLOW: number; - export var EPERM: number; - export var EPIPE: number; - export var EPROTO: number; - export var EPROTONOSUPPORT: number; - export var EPROTOTYPE: number; - export var ERANGE: number; - export var EROFS: number; - export var ESPIPE: number; - export var ESRCH: number; - export var ETIME: number; - export var ETIMEDOUT: number; - export var ETXTBSY: number; - export var EWOULDBLOCK: number; - export var EXDEV: number; - export var WSAEINTR: number; - export var WSAEBADF: number; - export var WSAEACCES: number; - export var WSAEFAULT: number; - export var WSAEINVAL: number; - export var WSAEMFILE: number; - export var WSAEWOULDBLOCK: number; - export var WSAEINPROGRESS: number; - export var WSAEALREADY: number; - export var WSAENOTSOCK: number; - export var WSAEDESTADDRREQ: number; - export var WSAEMSGSIZE: number; - export var WSAEPROTOTYPE: number; - export var WSAENOPROTOOPT: number; - export var WSAEPROTONOSUPPORT: number; - export var WSAESOCKTNOSUPPORT: number; - export var WSAEOPNOTSUPP: number; - export var WSAEPFNOSUPPORT: number; - export var WSAEAFNOSUPPORT: number; - export var WSAEADDRINUSE: number; - export var WSAEADDRNOTAVAIL: number; - export var WSAENETDOWN: number; - export var WSAENETUNREACH: number; - export var WSAENETRESET: number; - export var WSAECONNABORTED: number; - export var WSAECONNRESET: number; - export var WSAENOBUFS: number; - export var WSAEISCONN: number; - export var WSAENOTCONN: number; - export var WSAESHUTDOWN: number; - export var WSAETOOMANYREFS: number; - export var WSAETIMEDOUT: number; - export var WSAECONNREFUSED: number; - export var WSAELOOP: number; - export var WSAENAMETOOLONG: number; - export var WSAEHOSTDOWN: number; - export var WSAEHOSTUNREACH: number; - export var WSAENOTEMPTY: number; - export var WSAEPROCLIM: number; - export var WSAEUSERS: number; - export var WSAEDQUOT: number; - export var WSAESTALE: number; - export var WSAEREMOTE: number; - export var WSASYSNOTREADY: number; - export var WSAVERNOTSUPPORTED: number; - export var WSANOTINITIALISED: number; - export var WSAEDISCON: number; - export var WSAENOMORE: number; - export var WSAECANCELLED: number; - export var WSAEINVALIDPROCTABLE: number; - export var WSAEINVALIDPROVIDER: number; - export var WSAEPROVIDERFAILEDINIT: number; - export var WSASYSCALLFAILURE: number; - export var WSASERVICE_NOT_FOUND: number; - export var WSATYPE_NOT_FOUND: number; - export var WSA_E_NO_MORE: number; - export var WSA_E_CANCELLED: number; - export var WSAEREFUSED: number; - export var SIGHUP: number; - export var SIGINT: number; - export var SIGILL: number; - export var SIGABRT: number; - export var SIGFPE: number; - export var SIGKILL: number; - export var SIGSEGV: number; - export var SIGTERM: number; - export var SIGBREAK: number; - export var SIGWINCH: number; - export var SSL_OP_ALL: number; - export var SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION: number; - export var SSL_OP_CIPHER_SERVER_PREFERENCE: number; - export var SSL_OP_CISCO_ANYCONNECT: number; - export var SSL_OP_COOKIE_EXCHANGE: number; - export var SSL_OP_CRYPTOPRO_TLSEXT_BUG: number; - export var SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS: number; - export var SSL_OP_EPHEMERAL_RSA: number; - export var SSL_OP_LEGACY_SERVER_CONNECT: number; - export var SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER: number; - export var SSL_OP_MICROSOFT_SESS_ID_BUG: number; - export var SSL_OP_MSIE_SSLV2_RSA_PADDING: number; - export var SSL_OP_NETSCAPE_CA_DN_BUG: number; - export var SSL_OP_NETSCAPE_CHALLENGE_BUG: number; - export var SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG: number; - export var SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG: number; - export var SSL_OP_NO_COMPRESSION: number; - export var SSL_OP_NO_QUERY_MTU: number; - export var SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION: number; - export var SSL_OP_NO_SSLv2: number; - export var SSL_OP_NO_SSLv3: number; - export var SSL_OP_NO_TICKET: number; - export var SSL_OP_NO_TLSv1: number; - export var SSL_OP_NO_TLSv1_1: number; - export var SSL_OP_NO_TLSv1_2: number; - export var SSL_OP_PKCS1_CHECK_1: number; - export var SSL_OP_PKCS1_CHECK_2: number; - export var SSL_OP_SINGLE_DH_USE: number; - export var SSL_OP_SINGLE_ECDH_USE: number; - export var SSL_OP_SSLEAY_080_CLIENT_DH_BUG: number; - export var SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG: number; - export var SSL_OP_TLS_BLOCK_PADDING_BUG: number; - export var SSL_OP_TLS_D5_BUG: number; - export var SSL_OP_TLS_ROLLBACK_BUG: number; - export var ENGINE_METHOD_DSA: number; - export var ENGINE_METHOD_DH: number; - export var ENGINE_METHOD_RAND: number; - export var ENGINE_METHOD_ECDH: number; - export var ENGINE_METHOD_ECDSA: number; - export var ENGINE_METHOD_CIPHERS: number; - export var ENGINE_METHOD_DIGESTS: number; - export var ENGINE_METHOD_STORE: number; - export var ENGINE_METHOD_PKEY_METHS: number; - export var ENGINE_METHOD_PKEY_ASN1_METHS: number; - export var ENGINE_METHOD_ALL: number; - export var ENGINE_METHOD_NONE: number; - export var DH_CHECK_P_NOT_SAFE_PRIME: number; - export var DH_CHECK_P_NOT_PRIME: number; - export var DH_UNABLE_TO_CHECK_GENERATOR: number; - export var DH_NOT_SUITABLE_GENERATOR: number; - export var NPN_ENABLED: number; - export var RSA_PKCS1_PADDING: number; - export var RSA_SSLV23_PADDING: number; - export var RSA_NO_PADDING: number; - export var RSA_PKCS1_OAEP_PADDING: number; - export var RSA_X931_PADDING: number; - export var RSA_PKCS1_PSS_PADDING: number; - export var POINT_CONVERSION_COMPRESSED: number; - export var POINT_CONVERSION_UNCOMPRESSED: number; - export var POINT_CONVERSION_HYBRID: number; - export var O_RDONLY: number; - export var O_WRONLY: number; - export var O_RDWR: number; - export var S_IFMT: number; - export var S_IFREG: number; - export var S_IFDIR: number; - export var S_IFCHR: number; - export var S_IFBLK: number; - export var S_IFIFO: number; - export var S_IFSOCK: number; - export var S_IRWXU: number; - export var S_IRUSR: number; - export var S_IWUSR: number; - export var S_IXUSR: number; - export var S_IRWXG: number; - export var S_IRGRP: number; - export var S_IWGRP: number; - export var S_IXGRP: number; - export var S_IRWXO: number; - export var S_IROTH: number; - export var S_IWOTH: number; - export var S_IXOTH: number; - export var S_IFLNK: number; - export var O_CREAT: number; - export var O_EXCL: number; - export var O_NOCTTY: number; - export var O_DIRECTORY: number; - export var O_NOATIME: number; - export var O_NOFOLLOW: number; - export var O_SYNC: number; - export var O_DSYNC: number; - export var O_SYMLINK: number; - export var O_DIRECT: number; - export var O_NONBLOCK: number; - export var O_TRUNC: number; - export var O_APPEND: number; - export var F_OK: number; - export var R_OK: number; - export var W_OK: number; - export var X_OK: number; - export var COPYFILE_EXCL: number; - export var COPYFILE_FICLONE: number; - export var COPYFILE_FICLONE_FORCE: number; - export var UV_UDP_REUSEADDR: number; - export var SIGQUIT: number; - export var SIGTRAP: number; - export var SIGIOT: number; - export var SIGBUS: number; - export var SIGUSR1: number; - export var SIGUSR2: number; - export var SIGPIPE: number; - export var SIGALRM: number; - export var SIGCHLD: number; - export var SIGSTKFLT: number; - export var SIGCONT: number; - export var SIGSTOP: number; - export var SIGTSTP: number; - export var SIGTTIN: number; - export var SIGTTOU: number; - export var SIGURG: number; - export var SIGXCPU: number; - export var SIGXFSZ: number; - export var SIGVTALRM: number; - export var SIGPROF: number; - export var SIGIO: number; - export var SIGPOLL: number; - export var SIGPWR: number; - export var SIGSYS: number; - export var SIGUNUSED: number; - export var defaultCoreCipherList: string; - export var defaultCipherList: string; - export var ENGINE_METHOD_RSA: number; - export var ALPN_ENABLED: number; -} - -declare module "module" { - export = NodeJS.Module; -} - -declare module "process" { - export = process; -} - -declare module "v8" { - interface HeapSpaceInfo { - space_name: string; - space_size: number; - space_used_size: number; - space_available_size: number; - physical_space_size: number; - } - - // ** Signifies if the --zap_code_space option is enabled or not. 1 == enabled, 0 == disabled. */ - type DoesZapCodeSpaceFlag = 0 | 1; - - interface HeapInfo { - total_heap_size: number; - total_heap_size_executable: number; - total_physical_size: number; - total_available_size: number; - used_heap_size: number; - heap_size_limit: number; - malloced_memory: number; - peak_malloced_memory: number; - does_zap_garbage: DoesZapCodeSpaceFlag; - } - - export function getHeapStatistics(): HeapInfo; - export function getHeapSpaceStatistics(): HeapSpaceInfo[]; - export function setFlagsFromString(flags: string): void; -} - -declare module "timers" { - export function setTimeout(callback: (...args: any[]) => void, ms: number, ...args: any[]): NodeJS.Timer; - export namespace setTimeout { - export function __promisify__(ms: number): Promise; - export function __promisify__(ms: number, value: T): Promise; - } - export function clearTimeout(timeoutId: NodeJS.Timer): void; - export function setInterval(callback: (...args: any[]) => void, ms: number, ...args: any[]): NodeJS.Timer; - export function clearInterval(intervalId: NodeJS.Timer): void; - export function setImmediate(callback: (...args: any[]) => void, ...args: any[]): any; - export namespace setImmediate { - export function __promisify__(): Promise; - export function __promisify__(value: T): Promise; - } - export function clearImmediate(immediateId: any): void; -} - -declare module "console" { - export = console; -} - -/** - * Async Hooks module: https://nodejs.org/api/async_hooks.html - */ -declare module "async_hooks" { - /** - * Returns the asyncId of the current execution context. - */ - export function executionAsyncId(): number; - - /** - * Returns the ID of the resource responsible for calling the callback that is currently being executed. - */ - export function triggerAsyncId(): number; - - export interface HookCallbacks { - /** - * Called when a class is constructed that has the possibility to emit an asynchronous event. - * @param asyncId a unique ID for the async resource - * @param type the type of the async resource - * @param triggerAsyncId the unique ID of the async resource in whose execution context this async resource was created - * @param resource reference to the resource representing the async operation, needs to be released during destroy - */ - init?(asyncId: number, type: string, triggerAsyncId: number, resource: Object): void; - - /** - * When an asynchronous operation is initiated or completes a callback is called to notify the user. - * The before callback is called just before said callback is executed. - * @param asyncId the unique identifier assigned to the resource about to execute the callback. - */ - before?(asyncId: number): void; - - /** - * Called immediately after the callback specified in before is completed. - * @param asyncId the unique identifier assigned to the resource which has executed the callback. - */ - after?(asyncId: number): void; - - /** - * Called when a promise has resolve() called. This may not be in the same execution id - * as the promise itself. - * @param asyncId the unique id for the promise that was resolve()d. - */ - promiseResolve?(asyncId: number): void; - - /** - * Called after the resource corresponding to asyncId is destroyed - * @param asyncId a unique ID for the async resource - */ - destroy?(asyncId: number): void; - } - - export interface AsyncHook { - /** - * Enable the callbacks for a given AsyncHook instance. If no callbacks are provided enabling is a noop. - */ - enable(): this; - - /** - * Disable the callbacks for a given AsyncHook instance from the global pool of AsyncHook callbacks to be executed. Once a hook has been disabled it will not be called again until enabled. - */ - disable(): this; - } - - /** - * Registers functions to be called for different lifetime events of each async operation. - * @param options the callbacks to register - * @return an AsyncHooks instance used for disabling and enabling hooks - */ - export function createHook(options: HookCallbacks): AsyncHook; - - export interface AsyncResourceOptions { - /** - * The ID of the execution context that created this async event. - * Default: `executionAsyncId()` - */ - triggerAsyncId?: number; - - /** - * Disables automatic `emitDestroy` when the object is garbage collected. - * This usually does not need to be set (even if `emitDestroy` is called - * manually), unless the resource's `asyncId` is retrieved and the - * sensitive API's `emitDestroy` is called with it. - * Default: `false` - */ - requireManualDestroy?: boolean; - } - - /** - * The class AsyncResource was designed to be extended by the embedder's async resources. - * Using this users can easily trigger the lifetime events of their own resources. - */ - export class AsyncResource { - /** - * AsyncResource() is meant to be extended. Instantiating a - * new AsyncResource() also triggers init. If triggerAsyncId is omitted then - * async_hook.executionAsyncId() is used. - * @param type The type of async event. - * @param triggerAsyncId The ID of the execution context that created - * this async event (default: `executionAsyncId()`), or an - * AsyncResourceOptions object (since 9.3) - */ - constructor(type: string, triggerAsyncId?: number|AsyncResourceOptions); - - /** - * Call AsyncHooks before callbacks. - * @deprecated since 9.6 - Use asyncResource.runInAsyncScope() instead. - */ - emitBefore(): void; - - /** - * Call AsyncHooks after callbacks. - * @deprecated since 9.6 - Use asyncResource.runInAsyncScope() instead. - */ - emitAfter(): void; - - /** - * Call the provided function with the provided arguments in the - * execution context of the async resource. This will establish the - * context, trigger the AsyncHooks before callbacks, call the function, - * trigger the AsyncHooks after callbacks, and then restore the original - * execution context. - * @param fn The function to call in the execution context of this - * async resource. - * @param thisArg The receiver to be used for the function call. - * @param args Optional arguments to pass to the function. - */ - runInAsyncScope(fn: (this: This, ...args: any[]) => Result, thisArg?: This, ...args: any[]): Result; - - /** - * Call AsyncHooks destroy callbacks. - */ - emitDestroy(): void; - - /** - * @return the unique ID assigned to this AsyncResource instance. - */ - asyncId(): number; - - /** - * @return the trigger ID for this AsyncResource instance. - */ - triggerAsyncId(): number; - } -} - -declare module "http2" { - import * as events from "events"; - import * as fs from "fs"; - import * as net from "net"; - import * as stream from "stream"; - import * as tls from "tls"; - import * as url from "url"; - - import { IncomingHttpHeaders as Http1IncomingHttpHeaders, OutgoingHttpHeaders } from "http"; - export { OutgoingHttpHeaders } from "http"; - - export interface IncomingHttpStatusHeader { - ":status"?: number; - } - - export interface IncomingHttpHeaders extends Http1IncomingHttpHeaders { - ":path"?: string; - ":method"?: string; - ":authority"?: string; - ":scheme"?: string; - } - - // Http2Stream - - export interface StreamPriorityOptions { - exclusive?: boolean; - parent?: number; - weight?: number; - silent?: boolean; - } - - export interface StreamState { - localWindowSize?: number; - state?: number; - streamLocalClose?: number; - streamRemoteClose?: number; - sumDependencyWeight?: number; - weight?: number; - } - - export interface ServerStreamResponseOptions { - endStream?: boolean; - getTrailers?: (trailers: OutgoingHttpHeaders) => void; - } - - export interface StatOptions { - offset: number; - length: number; - } - - export interface ServerStreamFileResponseOptions { - statCheck?: (stats: fs.Stats, headers: OutgoingHttpHeaders, statOptions: StatOptions) => void | boolean; - getTrailers?: (trailers: OutgoingHttpHeaders) => void; - offset?: number; - length?: number; - } - - export interface ServerStreamFileResponseOptionsWithError extends ServerStreamFileResponseOptions { - onError?: (err: NodeJS.ErrnoException) => void; - } - - export interface Http2Stream extends stream.Duplex { - readonly aborted: boolean; - close(code?: number, callback?: () => void): void; - readonly closed: boolean; - readonly destroyed: boolean; - readonly pending: boolean; - priority(options: StreamPriorityOptions): void; - readonly rstCode: number; - readonly session: Http2Session; - setTimeout(msecs: number, callback?: () => void): void; - readonly state: StreamState; - - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "aborted", listener: () => void): this; - addListener(event: "close", listener: () => void): this; - addListener(event: "data", listener: (chunk: Buffer | string) => void): this; - addListener(event: "drain", listener: () => void): this; - addListener(event: "end", listener: () => void): this; - addListener(event: "error", listener: (err: Error) => void): this; - addListener(event: "finish", listener: () => void): this; - addListener(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; - addListener(event: "pipe", listener: (src: stream.Readable) => void): this; - addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; - addListener(event: "streamClosed", listener: (code: number) => void): this; - addListener(event: "timeout", listener: () => void): this; - addListener(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "aborted"): boolean; - emit(event: "close"): boolean; - emit(event: "data", chunk: Buffer | string): boolean; - emit(event: "drain"): boolean; - emit(event: "end"): boolean; - emit(event: "error", err: Error): boolean; - emit(event: "finish"): boolean; - emit(event: "frameError", frameType: number, errorCode: number): boolean; - emit(event: "pipe", src: stream.Readable): boolean; - emit(event: "unpipe", src: stream.Readable): boolean; - emit(event: "streamClosed", code: number): boolean; - emit(event: "timeout"): boolean; - emit(event: "trailers", trailers: IncomingHttpHeaders, flags: number): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "aborted", listener: () => void): this; - on(event: "close", listener: () => void): this; - on(event: "data", listener: (chunk: Buffer | string) => void): this; - on(event: "drain", listener: () => void): this; - on(event: "end", listener: () => void): this; - on(event: "error", listener: (err: Error) => void): this; - on(event: "finish", listener: () => void): this; - on(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; - on(event: "pipe", listener: (src: stream.Readable) => void): this; - on(event: "unpipe", listener: (src: stream.Readable) => void): this; - on(event: "streamClosed", listener: (code: number) => void): this; - on(event: "timeout", listener: () => void): this; - on(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "aborted", listener: () => void): this; - once(event: "close", listener: () => void): this; - once(event: "data", listener: (chunk: Buffer | string) => void): this; - once(event: "drain", listener: () => void): this; - once(event: "end", listener: () => void): this; - once(event: "error", listener: (err: Error) => void): this; - once(event: "finish", listener: () => void): this; - once(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; - once(event: "pipe", listener: (src: stream.Readable) => void): this; - once(event: "unpipe", listener: (src: stream.Readable) => void): this; - once(event: "streamClosed", listener: (code: number) => void): this; - once(event: "timeout", listener: () => void): this; - once(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "aborted", listener: () => void): this; - prependListener(event: "close", listener: () => void): this; - prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; - prependListener(event: "drain", listener: () => void): this; - prependListener(event: "end", listener: () => void): this; - prependListener(event: "error", listener: (err: Error) => void): this; - prependListener(event: "finish", listener: () => void): this; - prependListener(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; - prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; - prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; - prependListener(event: "streamClosed", listener: (code: number) => void): this; - prependListener(event: "timeout", listener: () => void): this; - prependListener(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "aborted", listener: () => void): this; - prependOnceListener(event: "close", listener: () => void): this; - prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; - prependOnceListener(event: "drain", listener: () => void): this; - prependOnceListener(event: "end", listener: () => void): this; - prependOnceListener(event: "error", listener: (err: Error) => void): this; - prependOnceListener(event: "finish", listener: () => void): this; - prependOnceListener(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; - prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; - prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; - prependOnceListener(event: "streamClosed", listener: (code: number) => void): this; - prependOnceListener(event: "timeout", listener: () => void): this; - prependOnceListener(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; - } - - export interface ClientHttp2Stream extends Http2Stream { - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "headers", listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; - addListener(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; - addListener(event: "response", listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "headers", headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number): boolean; - emit(event: "push", headers: IncomingHttpHeaders, flags: number): boolean; - emit(event: "response", headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "headers", listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; - on(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; - on(event: "response", listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "headers", listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; - once(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; - once(event: "response", listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "headers", listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; - prependListener(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; - prependListener(event: "response", listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "headers", listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; - prependOnceListener(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; - prependOnceListener(event: "response", listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; - } - - export interface ServerHttp2Stream extends Http2Stream { - additionalHeaders(headers: OutgoingHttpHeaders): void; - readonly headersSent: boolean; - readonly pushAllowed: boolean; - pushStream(headers: OutgoingHttpHeaders, callback?: (err: Error | null, pushStream: ServerHttp2Stream, headers: OutgoingHttpHeaders) => void): void; - pushStream(headers: OutgoingHttpHeaders, options?: StreamPriorityOptions, callback?: (err: Error | null, pushStream: ServerHttp2Stream, headers: OutgoingHttpHeaders) => void): void; - respond(headers?: OutgoingHttpHeaders, options?: ServerStreamResponseOptions): void; - respondWithFD(fd: number, headers?: OutgoingHttpHeaders, options?: ServerStreamFileResponseOptions): void; - respondWithFile(path: string, headers?: OutgoingHttpHeaders, options?: ServerStreamFileResponseOptionsWithError): void; - } - - // Http2Session - - export interface Settings { - headerTableSize?: number; - enablePush?: boolean; - initialWindowSize?: number; - maxFrameSize?: number; - maxConcurrentStreams?: number; - maxHeaderListSize?: number; - } - - export interface ClientSessionRequestOptions { - endStream?: boolean; - exclusive?: boolean; - parent?: number; - weight?: number; - getTrailers?: (trailers: OutgoingHttpHeaders, flags: number) => void; - } - - export interface SessionState { - effectiveLocalWindowSize?: number; - effectiveRecvDataLength?: number; - nextStreamID?: number; - localWindowSize?: number; - lastProcStreamID?: number; - remoteWindowSize?: number; - outboundQueueSize?: number; - deflateDynamicTableSize?: number; - inflateDynamicTableSize?: number; - } - - export interface Http2Session extends events.EventEmitter { - readonly alpnProtocol?: string; - close(callback?: () => void): void; - readonly closed: boolean; - readonly connecting: boolean; - destroy(error?: Error, code?: number): void; - readonly destroyed: boolean; - readonly encrypted?: boolean; - goaway(code?: number, lastStreamID?: number, opaqueData?: Buffer | DataView | NodeJS.TypedArray): void; - readonly localSettings: Settings; - readonly originSet?: string[]; - readonly pendingSettingsAck: boolean; - ping(callback: (err: Error | null, duration: number, payload: Buffer) => void): boolean; - ping(payload: Buffer | DataView | NodeJS.TypedArray , callback: (err: Error | null, duration: number, payload: Buffer) => void): boolean; - ref(): void; - readonly remoteSettings: Settings; - rstStream(stream: Http2Stream, code?: number): void; - setTimeout(msecs: number, callback?: () => void): void; - readonly socket: net.Socket | tls.TLSSocket; - readonly state: SessionState; - priority(stream: Http2Stream, options: StreamPriorityOptions): void; - settings(settings: Settings): void; - readonly type: number; - unref(): void; - - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "close", listener: () => void): this; - addListener(event: "error", listener: (err: Error) => void): this; - addListener(event: "frameError", listener: (frameType: number, errorCode: number, streamID: number) => void): this; - addListener(event: "goaway", listener: (errorCode: number, lastStreamID: number, opaqueData: Buffer) => void): this; - addListener(event: "localSettings", listener: (settings: Settings) => void): this; - addListener(event: "remoteSettings", listener: (settings: Settings) => void): this; - addListener(event: "timeout", listener: () => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "close"): boolean; - emit(event: "error", err: Error): boolean; - emit(event: "frameError", frameType: number, errorCode: number, streamID: number): boolean; - emit(event: "goaway", errorCode: number, lastStreamID: number, opaqueData: Buffer): boolean; - emit(event: "localSettings", settings: Settings): boolean; - emit(event: "remoteSettings", settings: Settings): boolean; - emit(event: "timeout"): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "close", listener: () => void): this; - on(event: "error", listener: (err: Error) => void): this; - on(event: "frameError", listener: (frameType: number, errorCode: number, streamID: number) => void): this; - on(event: "goaway", listener: (errorCode: number, lastStreamID: number, opaqueData: Buffer) => void): this; - on(event: "localSettings", listener: (settings: Settings) => void): this; - on(event: "remoteSettings", listener: (settings: Settings) => void): this; - on(event: "timeout", listener: () => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "close", listener: () => void): this; - once(event: "error", listener: (err: Error) => void): this; - once(event: "frameError", listener: (frameType: number, errorCode: number, streamID: number) => void): this; - once(event: "goaway", listener: (errorCode: number, lastStreamID: number, opaqueData: Buffer) => void): this; - once(event: "localSettings", listener: (settings: Settings) => void): this; - once(event: "remoteSettings", listener: (settings: Settings) => void): this; - once(event: "timeout", listener: () => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "close", listener: () => void): this; - prependListener(event: "error", listener: (err: Error) => void): this; - prependListener(event: "frameError", listener: (frameType: number, errorCode: number, streamID: number) => void): this; - prependListener(event: "goaway", listener: (errorCode: number, lastStreamID: number, opaqueData: Buffer) => void): this; - prependListener(event: "localSettings", listener: (settings: Settings) => void): this; - prependListener(event: "remoteSettings", listener: (settings: Settings) => void): this; - prependListener(event: "timeout", listener: () => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "close", listener: () => void): this; - prependOnceListener(event: "error", listener: (err: Error) => void): this; - prependOnceListener(event: "frameError", listener: (frameType: number, errorCode: number, streamID: number) => void): this; - prependOnceListener(event: "goaway", listener: (errorCode: number, lastStreamID: number, opaqueData: Buffer) => void): this; - prependOnceListener(event: "localSettings", listener: (settings: Settings) => void): this; - prependOnceListener(event: "remoteSettings", listener: (settings: Settings) => void): this; - prependOnceListener(event: "timeout", listener: () => void): this; - } - - export interface ClientHttp2Session extends Http2Session { - request(headers?: OutgoingHttpHeaders, options?: ClientSessionRequestOptions): ClientHttp2Stream; - - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; - addListener(event: "connect", listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; - addListener(event: "stream", listener: (stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "altsvc", alt: string, origin: string, stream: number): boolean; - emit(event: "connect", session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket): boolean; - emit(event: "stream", stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; - on(event: "connect", listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; - on(event: "stream", listener: (stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; - once(event: "connect", listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; - once(event: "stream", listener: (stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; - prependListener(event: "connect", listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; - prependListener(event: "stream", listener: (stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; - prependOnceListener(event: "connect", listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; - prependOnceListener(event: "stream", listener: (stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; - } - - export interface AlternativeServiceOptions { - origin: number | string | url.URL; - } - - export interface ServerHttp2Session extends Http2Session { - altsvc(alt: string, originOrStream: number | string | url.URL | AlternativeServiceOptions): void; - readonly server: Http2Server | Http2SecureServer; - - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "connect", listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; - addListener(event: "stream", listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "connect", session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket): boolean; - emit(event: "stream", stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "connect", listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; - on(event: "stream", listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "connect", listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; - once(event: "stream", listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "connect", listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; - prependListener(event: "stream", listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "connect", listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; - prependOnceListener(event: "stream", listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; - } - - // Http2Server - - export interface SessionOptions { - maxDeflateDynamicTableSize?: number; - maxReservedRemoteStreams?: number; - maxSendHeaderBlockLength?: number; - paddingStrategy?: number; - peerMaxConcurrentStreams?: number; - selectPadding?: (frameLen: number, maxFrameLen: number) => number; - settings?: Settings; - } - - export type ClientSessionOptions = SessionOptions; - export type ServerSessionOptions = SessionOptions; - - export interface SecureClientSessionOptions extends ClientSessionOptions, tls.ConnectionOptions { } - export interface SecureServerSessionOptions extends ServerSessionOptions, tls.TlsOptions { } - - export interface ServerOptions extends ServerSessionOptions { - allowHTTP1?: boolean; - } - - export interface SecureServerOptions extends SecureServerSessionOptions { - allowHTTP1?: boolean; - } - - export interface Http2Server extends net.Server { - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "checkContinue", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - addListener(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - addListener(event: "sessionError", listener: (err: Error) => void): this; - addListener(event: "stream", listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; - addListener(event: "timeout", listener: () => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "checkContinue", request: Http2ServerRequest, response: Http2ServerResponse): boolean; - emit(event: "request", request: Http2ServerRequest, response: Http2ServerResponse): boolean; - emit(event: "sessionError", err: Error): boolean; - emit(event: "stream", stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; - emit(event: "timeout"): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "checkContinue", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - on(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - on(event: "sessionError", listener: (err: Error) => void): this; - on(event: "stream", listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; - on(event: "timeout", listener: () => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "checkContinue", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - once(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - once(event: "sessionError", listener: (err: Error) => void): this; - once(event: "stream", listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; - once(event: "timeout", listener: () => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "checkContinue", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - prependListener(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - prependListener(event: "sessionError", listener: (err: Error) => void): this; - prependListener(event: "stream", listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; - prependListener(event: "timeout", listener: () => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "checkContinue", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - prependOnceListener(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - prependOnceListener(event: "sessionError", listener: (err: Error) => void): this; - prependOnceListener(event: "stream", listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; - prependOnceListener(event: "timeout", listener: () => void): this; - } - - export interface Http2SecureServer extends tls.Server { - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "checkContinue", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - addListener(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - addListener(event: "sessionError", listener: (err: Error) => void): this; - addListener(event: "stream", listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; - addListener(event: "timeout", listener: () => void): this; - addListener(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "checkContinue", request: Http2ServerRequest, response: Http2ServerResponse): boolean; - emit(event: "request", request: Http2ServerRequest, response: Http2ServerResponse): boolean; - emit(event: "sessionError", err: Error): boolean; - emit(event: "stream", stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; - emit(event: "timeout"): boolean; - emit(event: "unknownProtocol", socket: tls.TLSSocket): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "checkContinue", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - on(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - on(event: "sessionError", listener: (err: Error) => void): this; - on(event: "stream", listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; - on(event: "timeout", listener: () => void): this; - on(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "checkContinue", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - once(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - once(event: "sessionError", listener: (err: Error) => void): this; - once(event: "stream", listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; - once(event: "timeout", listener: () => void): this; - once(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "checkContinue", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - prependListener(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - prependListener(event: "sessionError", listener: (err: Error) => void): this; - prependListener(event: "stream", listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; - prependListener(event: "timeout", listener: () => void): this; - prependListener(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "checkContinue", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - prependOnceListener(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; - prependOnceListener(event: "sessionError", listener: (err: Error) => void): this; - prependOnceListener(event: "stream", listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; - prependOnceListener(event: "timeout", listener: () => void): this; - prependOnceListener(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; - } - - export interface Http2ServerRequest extends stream.Readable { - headers: IncomingHttpHeaders; - httpVersion: string; - method: string; - rawHeaders: string[]; - rawTrailers: string[]; - setTimeout(msecs: number, callback?: () => void): void; - socket: net.Socket | tls.TLSSocket; - stream: ServerHttp2Stream; - trailers: IncomingHttpHeaders; - url: string; - - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "aborted", hadError: boolean, code: number): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "aborted", listener: (hadError: boolean, code: number) => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "aborted", listener: (hadError: boolean, code: number) => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; - } - - export interface Http2ServerResponse extends events.EventEmitter { - addTrailers(trailers: OutgoingHttpHeaders): void; - connection: net.Socket | tls.TLSSocket; - end(callback?: () => void): void; - end(data?: string | Buffer, callback?: () => void): void; - end(data?: string | Buffer, encoding?: string, callback?: () => void): void; - readonly finished: boolean; - getHeader(name: string): string; - getHeaderNames(): string[]; - getHeaders(): OutgoingHttpHeaders; - hasHeader(name: string): boolean; - readonly headersSent: boolean; - removeHeader(name: string): void; - sendDate: boolean; - setHeader(name: string, value: number | string | string[]): void; - setTimeout(msecs: number, callback?: () => void): void; - socket: net.Socket | tls.TLSSocket; - statusCode: number; - statusMessage: ''; - stream: ServerHttp2Stream; - write(chunk: string | Buffer, callback?: (err: Error) => void): boolean; - write(chunk: string | Buffer, encoding?: string, callback?: (err: Error) => void): boolean; - writeContinue(): void; - writeHead(statusCode: number, headers?: OutgoingHttpHeaders): void; - writeHead(statusCode: number, statusMessage?: string, headers?: OutgoingHttpHeaders): void; - createPushResponse(headers: OutgoingHttpHeaders, callback: (err: Error | null, res: Http2ServerResponse) => void): void; - - addListener(event: string, listener: (...args: any[]) => void): this; - addListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; - addListener(event: "close", listener: () => void): this; - addListener(event: "drain", listener: () => void): this; - addListener(event: "error", listener: (error: Error) => void): this; - addListener(event: "finish", listener: () => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "aborted", hadError: boolean, code: number): boolean; - emit(event: "close"): boolean; - emit(event: "drain"): boolean; - emit(event: "error", error: Error): boolean; - emit(event: "finish"): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: "aborted", listener: (hadError: boolean, code: number) => void): this; - on(event: "close", listener: () => void): this; - on(event: "drain", listener: () => void): this; - on(event: "error", listener: (error: Error) => void): this; - on(event: "finish", listener: () => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - once(event: "aborted", listener: (hadError: boolean, code: number) => void): this; - once(event: "close", listener: () => void): this; - once(event: "drain", listener: () => void): this; - once(event: "error", listener: (error: Error) => void): this; - once(event: "finish", listener: () => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - prependListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; - prependListener(event: "close", listener: () => void): this; - prependListener(event: "drain", listener: () => void): this; - prependListener(event: "error", listener: (error: Error) => void): this; - prependListener(event: "finish", listener: () => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - prependOnceListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; - prependOnceListener(event: "close", listener: () => void): this; - prependOnceListener(event: "drain", listener: () => void): this; - prependOnceListener(event: "error", listener: (error: Error) => void): this; - prependOnceListener(event: "finish", listener: () => void): this; - } - - // Public API - - export namespace constants { - export const NGHTTP2_SESSION_SERVER: number; - export const NGHTTP2_SESSION_CLIENT: number; - export const NGHTTP2_STREAM_STATE_IDLE: number; - export const NGHTTP2_STREAM_STATE_OPEN: number; - export const NGHTTP2_STREAM_STATE_RESERVED_LOCAL: number; - export const NGHTTP2_STREAM_STATE_RESERVED_REMOTE: number; - export const NGHTTP2_STREAM_STATE_HALF_CLOSED_LOCAL: number; - export const NGHTTP2_STREAM_STATE_HALF_CLOSED_REMOTE: number; - export const NGHTTP2_STREAM_STATE_CLOSED: number; - export const NGHTTP2_NO_ERROR: number; - export const NGHTTP2_PROTOCOL_ERROR: number; - export const NGHTTP2_INTERNAL_ERROR: number; - export const NGHTTP2_FLOW_CONTROL_ERROR: number; - export const NGHTTP2_SETTINGS_TIMEOUT: number; - export const NGHTTP2_STREAM_CLOSED: number; - export const NGHTTP2_FRAME_SIZE_ERROR: number; - export const NGHTTP2_REFUSED_STREAM: number; - export const NGHTTP2_CANCEL: number; - export const NGHTTP2_COMPRESSION_ERROR: number; - export const NGHTTP2_CONNECT_ERROR: number; - export const NGHTTP2_ENHANCE_YOUR_CALM: number; - export const NGHTTP2_INADEQUATE_SECURITY: number; - export const NGHTTP2_HTTP_1_1_REQUIRED: number; - export const NGHTTP2_ERR_FRAME_SIZE_ERROR: number; - export const NGHTTP2_FLAG_NONE: number; - export const NGHTTP2_FLAG_END_STREAM: number; - export const NGHTTP2_FLAG_END_HEADERS: number; - export const NGHTTP2_FLAG_ACK: number; - export const NGHTTP2_FLAG_PADDED: number; - export const NGHTTP2_FLAG_PRIORITY: number; - export const DEFAULT_SETTINGS_HEADER_TABLE_SIZE: number; - export const DEFAULT_SETTINGS_ENABLE_PUSH: number; - export const DEFAULT_SETTINGS_INITIAL_WINDOW_SIZE: number; - export const DEFAULT_SETTINGS_MAX_FRAME_SIZE: number; - export const MAX_MAX_FRAME_SIZE: number; - export const MIN_MAX_FRAME_SIZE: number; - export const MAX_INITIAL_WINDOW_SIZE: number; - export const NGHTTP2_DEFAULT_WEIGHT: number; - export const NGHTTP2_SETTINGS_HEADER_TABLE_SIZE: number; - export const NGHTTP2_SETTINGS_ENABLE_PUSH: number; - export const NGHTTP2_SETTINGS_MAX_CONCURRENT_STREAMS: number; - export const NGHTTP2_SETTINGS_INITIAL_WINDOW_SIZE: number; - export const NGHTTP2_SETTINGS_MAX_FRAME_SIZE: number; - export const NGHTTP2_SETTINGS_MAX_HEADER_LIST_SIZE: number; - export const PADDING_STRATEGY_NONE: number; - export const PADDING_STRATEGY_MAX: number; - export const PADDING_STRATEGY_CALLBACK: number; - export const HTTP2_HEADER_STATUS: string; - export const HTTP2_HEADER_METHOD: string; - export const HTTP2_HEADER_AUTHORITY: string; - export const HTTP2_HEADER_SCHEME: string; - export const HTTP2_HEADER_PATH: string; - export const HTTP2_HEADER_ACCEPT_CHARSET: string; - export const HTTP2_HEADER_ACCEPT_ENCODING: string; - export const HTTP2_HEADER_ACCEPT_LANGUAGE: string; - export const HTTP2_HEADER_ACCEPT_RANGES: string; - export const HTTP2_HEADER_ACCEPT: string; - export const HTTP2_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN: string; - export const HTTP2_HEADER_AGE: string; - export const HTTP2_HEADER_ALLOW: string; - export const HTTP2_HEADER_AUTHORIZATION: string; - export const HTTP2_HEADER_CACHE_CONTROL: string; - export const HTTP2_HEADER_CONNECTION: string; - export const HTTP2_HEADER_CONTENT_DISPOSITION: string; - export const HTTP2_HEADER_CONTENT_ENCODING: string; - export const HTTP2_HEADER_CONTENT_LANGUAGE: string; - export const HTTP2_HEADER_CONTENT_LENGTH: string; - export const HTTP2_HEADER_CONTENT_LOCATION: string; - export const HTTP2_HEADER_CONTENT_MD5: string; - export const HTTP2_HEADER_CONTENT_RANGE: string; - export const HTTP2_HEADER_CONTENT_TYPE: string; - export const HTTP2_HEADER_COOKIE: string; - export const HTTP2_HEADER_DATE: string; - export const HTTP2_HEADER_ETAG: string; - export const HTTP2_HEADER_EXPECT: string; - export const HTTP2_HEADER_EXPIRES: string; - export const HTTP2_HEADER_FROM: string; - export const HTTP2_HEADER_HOST: string; - export const HTTP2_HEADER_IF_MATCH: string; - export const HTTP2_HEADER_IF_MODIFIED_SINCE: string; - export const HTTP2_HEADER_IF_NONE_MATCH: string; - export const HTTP2_HEADER_IF_RANGE: string; - export const HTTP2_HEADER_IF_UNMODIFIED_SINCE: string; - export const HTTP2_HEADER_LAST_MODIFIED: string; - export const HTTP2_HEADER_LINK: string; - export const HTTP2_HEADER_LOCATION: string; - export const HTTP2_HEADER_MAX_FORWARDS: string; - export const HTTP2_HEADER_PREFER: string; - export const HTTP2_HEADER_PROXY_AUTHENTICATE: string; - export const HTTP2_HEADER_PROXY_AUTHORIZATION: string; - export const HTTP2_HEADER_RANGE: string; - export const HTTP2_HEADER_REFERER: string; - export const HTTP2_HEADER_REFRESH: string; - export const HTTP2_HEADER_RETRY_AFTER: string; - export const HTTP2_HEADER_SERVER: string; - export const HTTP2_HEADER_SET_COOKIE: string; - export const HTTP2_HEADER_STRICT_TRANSPORT_SECURITY: string; - export const HTTP2_HEADER_TRANSFER_ENCODING: string; - export const HTTP2_HEADER_TE: string; - export const HTTP2_HEADER_UPGRADE: string; - export const HTTP2_HEADER_USER_AGENT: string; - export const HTTP2_HEADER_VARY: string; - export const HTTP2_HEADER_VIA: string; - export const HTTP2_HEADER_WWW_AUTHENTICATE: string; - export const HTTP2_HEADER_HTTP2_SETTINGS: string; - export const HTTP2_HEADER_KEEP_ALIVE: string; - export const HTTP2_HEADER_PROXY_CONNECTION: string; - export const HTTP2_METHOD_ACL: string; - export const HTTP2_METHOD_BASELINE_CONTROL: string; - export const HTTP2_METHOD_BIND: string; - export const HTTP2_METHOD_CHECKIN: string; - export const HTTP2_METHOD_CHECKOUT: string; - export const HTTP2_METHOD_CONNECT: string; - export const HTTP2_METHOD_COPY: string; - export const HTTP2_METHOD_DELETE: string; - export const HTTP2_METHOD_GET: string; - export const HTTP2_METHOD_HEAD: string; - export const HTTP2_METHOD_LABEL: string; - export const HTTP2_METHOD_LINK: string; - export const HTTP2_METHOD_LOCK: string; - export const HTTP2_METHOD_MERGE: string; - export const HTTP2_METHOD_MKACTIVITY: string; - export const HTTP2_METHOD_MKCALENDAR: string; - export const HTTP2_METHOD_MKCOL: string; - export const HTTP2_METHOD_MKREDIRECTREF: string; - export const HTTP2_METHOD_MKWORKSPACE: string; - export const HTTP2_METHOD_MOVE: string; - export const HTTP2_METHOD_OPTIONS: string; - export const HTTP2_METHOD_ORDERPATCH: string; - export const HTTP2_METHOD_PATCH: string; - export const HTTP2_METHOD_POST: string; - export const HTTP2_METHOD_PRI: string; - export const HTTP2_METHOD_PROPFIND: string; - export const HTTP2_METHOD_PROPPATCH: string; - export const HTTP2_METHOD_PUT: string; - export const HTTP2_METHOD_REBIND: string; - export const HTTP2_METHOD_REPORT: string; - export const HTTP2_METHOD_SEARCH: string; - export const HTTP2_METHOD_TRACE: string; - export const HTTP2_METHOD_UNBIND: string; - export const HTTP2_METHOD_UNCHECKOUT: string; - export const HTTP2_METHOD_UNLINK: string; - export const HTTP2_METHOD_UNLOCK: string; - export const HTTP2_METHOD_UPDATE: string; - export const HTTP2_METHOD_UPDATEREDIRECTREF: string; - export const HTTP2_METHOD_VERSION_CONTROL: string; - export const HTTP_STATUS_CONTINUE: number; - export const HTTP_STATUS_SWITCHING_PROTOCOLS: number; - export const HTTP_STATUS_PROCESSING: number; - export const HTTP_STATUS_OK: number; - export const HTTP_STATUS_CREATED: number; - export const HTTP_STATUS_ACCEPTED: number; - export const HTTP_STATUS_NON_AUTHORITATIVE_INFORMATION: number; - export const HTTP_STATUS_NO_CONTENT: number; - export const HTTP_STATUS_RESET_CONTENT: number; - export const HTTP_STATUS_PARTIAL_CONTENT: number; - export const HTTP_STATUS_MULTI_STATUS: number; - export const HTTP_STATUS_ALREADY_REPORTED: number; - export const HTTP_STATUS_IM_USED: number; - export const HTTP_STATUS_MULTIPLE_CHOICES: number; - export const HTTP_STATUS_MOVED_PERMANENTLY: number; - export const HTTP_STATUS_FOUND: number; - export const HTTP_STATUS_SEE_OTHER: number; - export const HTTP_STATUS_NOT_MODIFIED: number; - export const HTTP_STATUS_USE_PROXY: number; - export const HTTP_STATUS_TEMPORARY_REDIRECT: number; - export const HTTP_STATUS_PERMANENT_REDIRECT: number; - export const HTTP_STATUS_BAD_REQUEST: number; - export const HTTP_STATUS_UNAUTHORIZED: number; - export const HTTP_STATUS_PAYMENT_REQUIRED: number; - export const HTTP_STATUS_FORBIDDEN: number; - export const HTTP_STATUS_NOT_FOUND: number; - export const HTTP_STATUS_METHOD_NOT_ALLOWED: number; - export const HTTP_STATUS_NOT_ACCEPTABLE: number; - export const HTTP_STATUS_PROXY_AUTHENTICATION_REQUIRED: number; - export const HTTP_STATUS_REQUEST_TIMEOUT: number; - export const HTTP_STATUS_CONFLICT: number; - export const HTTP_STATUS_GONE: number; - export const HTTP_STATUS_LENGTH_REQUIRED: number; - export const HTTP_STATUS_PRECONDITION_FAILED: number; - export const HTTP_STATUS_PAYLOAD_TOO_LARGE: number; - export const HTTP_STATUS_URI_TOO_LONG: number; - export const HTTP_STATUS_UNSUPPORTED_MEDIA_TYPE: number; - export const HTTP_STATUS_RANGE_NOT_SATISFIABLE: number; - export const HTTP_STATUS_EXPECTATION_FAILED: number; - export const HTTP_STATUS_TEAPOT: number; - export const HTTP_STATUS_MISDIRECTED_REQUEST: number; - export const HTTP_STATUS_UNPROCESSABLE_ENTITY: number; - export const HTTP_STATUS_LOCKED: number; - export const HTTP_STATUS_FAILED_DEPENDENCY: number; - export const HTTP_STATUS_UNORDERED_COLLECTION: number; - export const HTTP_STATUS_UPGRADE_REQUIRED: number; - export const HTTP_STATUS_PRECONDITION_REQUIRED: number; - export const HTTP_STATUS_TOO_MANY_REQUESTS: number; - export const HTTP_STATUS_REQUEST_HEADER_FIELDS_TOO_LARGE: number; - export const HTTP_STATUS_UNAVAILABLE_FOR_LEGAL_REASONS: number; - export const HTTP_STATUS_INTERNAL_SERVER_ERROR: number; - export const HTTP_STATUS_NOT_IMPLEMENTED: number; - export const HTTP_STATUS_BAD_GATEWAY: number; - export const HTTP_STATUS_SERVICE_UNAVAILABLE: number; - export const HTTP_STATUS_GATEWAY_TIMEOUT: number; - export const HTTP_STATUS_HTTP_VERSION_NOT_SUPPORTED: number; - export const HTTP_STATUS_VARIANT_ALSO_NEGOTIATES: number; - export const HTTP_STATUS_INSUFFICIENT_STORAGE: number; - export const HTTP_STATUS_LOOP_DETECTED: number; - export const HTTP_STATUS_BANDWIDTH_LIMIT_EXCEEDED: number; - export const HTTP_STATUS_NOT_EXTENDED: number; - export const HTTP_STATUS_NETWORK_AUTHENTICATION_REQUIRED: number; - } - - export function getDefaultSettings(): Settings; - export function getPackedSettings(settings: Settings): Settings; - export function getUnpackedSettings(buf: Buffer | Uint8Array): Settings; - - export function createServer(onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void): Http2Server; - export function createServer(options: ServerOptions, onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void): Http2Server; - - export function createSecureServer(onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void): Http2SecureServer; - export function createSecureServer(options: SecureServerOptions, onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void): Http2SecureServer; - - export function connect(authority: string | url.URL, listener?: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): ClientHttp2Session; - export function connect(authority: string | url.URL, options?: ClientSessionOptions | SecureClientSessionOptions, listener?: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): ClientHttp2Session; -} - -declare module "perf_hooks" { - import { AsyncResource } from "async_hooks"; - - export interface PerformanceEntry { - /** - * The total number of milliseconds elapsed for this entry. - * This value will not be meaningful for all Performance Entry types. - */ - readonly duration: number; - - /** - * The name of the performance entry. - */ - readonly name: string; - - /** - * The high resolution millisecond timestamp marking the starting time of the Performance Entry. - */ - readonly startTime: number; - - /** - * The type of the performance entry. - * Currently it may be one of: 'node', 'mark', 'measure', 'gc', or 'function'. - */ - readonly entryType: string; - - /** - * When performanceEntry.entryType is equal to 'gc', the performance.kind property identifies - * the type of garbage collection operation that occurred. - * The value may be one of perf_hooks.constants. - */ - readonly kind?: number; - } - - export interface PerformanceNodeTiming extends PerformanceEntry { - /** - * The high resolution millisecond timestamp at which the Node.js process completed bootstrap. - */ - readonly bootstrapComplete: number; - - /** - * The high resolution millisecond timestamp at which cluster processing ended. - */ - readonly clusterSetupEnd: number; - - /** - * The high resolution millisecond timestamp at which cluster processing started. - */ - readonly clusterSetupStart: number; - - /** - * The high resolution millisecond timestamp at which the Node.js event loop exited. - */ - readonly loopExit: number; - - /** - * The high resolution millisecond timestamp at which the Node.js event loop started. - */ - readonly loopStart: number; - - /** - * The high resolution millisecond timestamp at which main module load ended. - */ - readonly moduleLoadEnd: number; - - /** - * The high resolution millisecond timestamp at which main module load started. - */ - readonly moduleLoadStart: number; - - /** - * The high resolution millisecond timestamp at which the Node.js process was initialized. - */ - readonly nodeStart: number; - - /** - * The high resolution millisecond timestamp at which preload module load ended. - */ - readonly preloadModuleLoadEnd: number; - - /** - * The high resolution millisecond timestamp at which preload module load started. - */ - readonly preloadModuleLoadStart: number; - - /** - * The high resolution millisecond timestamp at which third_party_main processing ended. - */ - readonly thirdPartyMainEnd: number; - - /** - * The high resolution millisecond timestamp at which third_party_main processing started. - */ - readonly thirdPartyMainStart: number; - - /** - * The high resolution millisecond timestamp at which the V8 platform was initialized. - */ - readonly v8Start: number; - } - - export interface Performance { - /** - * If name is not provided, removes all PerformanceFunction objects from the Performance Timeline. - * If name is provided, removes entries with name. - * @param name - */ - clearFunctions(name?: string): void; - - /** - * If name is not provided, removes all PerformanceMark objects from the Performance Timeline. - * If name is provided, removes only the named mark. - * @param name - */ - clearMarks(name?: string): void; - - /** - * If name is not provided, removes all PerformanceMeasure objects from the Performance Timeline. - * If name is provided, removes only objects whose performanceEntry.name matches name. - */ - clearMeasures(name?: string): void; - - /** - * Returns a list of all PerformanceEntry objects in chronological order with respect to performanceEntry.startTime. - * @return list of all PerformanceEntry objects - */ - getEntries(): PerformanceEntry[]; - - /** - * Returns a list of all PerformanceEntry objects in chronological order with respect to performanceEntry.startTime - * whose performanceEntry.name is equal to name, and optionally, whose performanceEntry.entryType is equal to type. - * @param name - * @param type - * @return list of all PerformanceEntry objects - */ - getEntriesByName(name: string, type?: string): PerformanceEntry[]; - - /** - * Returns a list of all PerformanceEntry objects in chronological order with respect to performanceEntry.startTime - * whose performanceEntry.entryType is equal to type. - * @param type - * @return list of all PerformanceEntry objects - */ - getEntriesByType(type: string): PerformanceEntry[]; - - /** - * Creates a new PerformanceMark entry in the Performance Timeline. - * A PerformanceMark is a subclass of PerformanceEntry whose performanceEntry.entryType is always 'mark', - * and whose performanceEntry.duration is always 0. - * Performance marks are used to mark specific significant moments in the Performance Timeline. - * @param name - */ - mark(name?: string): void; - - /** - * Creates a new PerformanceMeasure entry in the Performance Timeline. - * A PerformanceMeasure is a subclass of PerformanceEntry whose performanceEntry.entryType is always 'measure', - * and whose performanceEntry.duration measures the number of milliseconds elapsed since startMark and endMark. - * - * The startMark argument may identify any existing PerformanceMark in the the Performance Timeline, or may identify - * any of the timestamp properties provided by the PerformanceNodeTiming class. If the named startMark does not exist, - * then startMark is set to timeOrigin by default. - * - * The endMark argument must identify any existing PerformanceMark in the the Performance Timeline or any of the timestamp - * properties provided by the PerformanceNodeTiming class. If the named endMark does not exist, an error will be thrown. - * @param name - * @param startMark - * @param endMark - */ - measure(name: string, startMark: string, endMark: string): void; - - /** - * An instance of the PerformanceNodeTiming class that provides performance metrics for specific Node.js operational milestones. - */ - readonly nodeTiming: PerformanceNodeTiming; - - /** - * @return the current high resolution millisecond timestamp - */ - now(): number; - - /** - * The timeOrigin specifies the high resolution millisecond timestamp from which all performance metric durations are measured. - */ - readonly timeOrigin: number; - - /** - * Wraps a function within a new function that measures the running time of the wrapped function. - * A PerformanceObserver must be subscribed to the 'function' event type in order for the timing details to be accessed. - * @param fn - */ - timerify any>(fn: T): T; - } - - export interface PerformanceObserverEntryList { - /** - * @return a list of PerformanceEntry objects in chronological order with respect to performanceEntry.startTime. - */ - getEntries(): PerformanceEntry[]; - - /** - * @return a list of PerformanceEntry objects in chronological order with respect to performanceEntry.startTime - * whose performanceEntry.name is equal to name, and optionally, whose performanceEntry.entryType is equal to type. - */ - getEntriesByName(name: string, type?: string): PerformanceEntry[]; - - /** - * @return Returns a list of PerformanceEntry objects in chronological order with respect to performanceEntry.startTime - * whose performanceEntry.entryType is equal to type. - */ - getEntriesByType(type: string): PerformanceEntry[]; - } - - export type PerformanceObserverCallback = (list: PerformanceObserverEntryList, observer: PerformanceObserver) => void; - - export class PerformanceObserver extends AsyncResource { - constructor(callback: PerformanceObserverCallback); - - /** - * Disconnects the PerformanceObserver instance from all notifications. - */ - disconnect(): void; - - /** - * Subscribes the PerformanceObserver instance to notifications of new PerformanceEntry instances identified by options.entryTypes. - * When options.buffered is false, the callback will be invoked once for every PerformanceEntry instance. - * Property buffered defaults to false. - * @param options - */ - observe(options: { entryTypes: string[], buffered?: boolean }): void; - } - - export namespace constants { - export const NODE_PERFORMANCE_GC_MAJOR: number; - export const NODE_PERFORMANCE_GC_MINOR: number; - export const NODE_PERFORMANCE_GC_INCREMENTAL: number; - export const NODE_PERFORMANCE_GC_WEAKCB: number; - } - - const performance: Performance; -} diff --git a/node_modules/@types/node/inspector.d.ts b/node_modules/@types/node/inspector.d.ts deleted file mode 100644 index 955239b486aa9..0000000000000 --- a/node_modules/@types/node/inspector.d.ts +++ /dev/null @@ -1,2488 +0,0 @@ -// Type definitions for inspector - -// These definitions are auto-generated. -// Please see https://github.com/DefinitelyTyped/DefinitelyTyped/pull/19330 -// for more information. - -/** - * The inspector module provides an API for interacting with the V8 inspector. - */ -declare module "inspector" { - import { EventEmitter } from 'events'; - - export interface InspectorNotification { - method: string; - params: T; - } - - export namespace Schema { - /** - * Description of the protocol domain. - */ - export interface Domain { - /** - * Domain name. - */ - name: string; - /** - * Domain version. - */ - version: string; - } - - export interface GetDomainsReturnType { - /** - * List of supported domains. - */ - domains: Schema.Domain[]; - } - } - - export namespace Runtime { - /** - * Unique script identifier. - */ - export type ScriptId = string; - - /** - * Unique object identifier. - */ - export type RemoteObjectId = string; - - /** - * Primitive value which cannot be JSON-stringified. - */ - export type UnserializableValue = string; - - /** - * Mirror object referencing original JavaScript object. - */ - export interface RemoteObject { - /** - * Object type. - */ - type: string; - /** - * Object subtype hint. Specified for object type values only. - */ - subtype?: string; - /** - * Object class (constructor) name. Specified for object type values only. - */ - className?: string; - /** - * Remote object value in case of primitive values or JSON values (if it was requested). - */ - value?: any; - /** - * Primitive value which can not be JSON-stringified does not have value, but gets this property. - */ - unserializableValue?: Runtime.UnserializableValue; - /** - * String representation of the object. - */ - description?: string; - /** - * Unique object identifier (for non-primitive values). - */ - objectId?: Runtime.RemoteObjectId; - /** - * Preview containing abbreviated property values. Specified for object type values only. - * @experimental - */ - preview?: Runtime.ObjectPreview; - /** - * @experimental - */ - customPreview?: Runtime.CustomPreview; - } - - /** - * @experimental - */ - export interface CustomPreview { - header: string; - hasBody: boolean; - formatterObjectId: Runtime.RemoteObjectId; - bindRemoteObjectFunctionId: Runtime.RemoteObjectId; - configObjectId?: Runtime.RemoteObjectId; - } - - /** - * Object containing abbreviated remote object value. - * @experimental - */ - export interface ObjectPreview { - /** - * Object type. - */ - type: string; - /** - * Object subtype hint. Specified for object type values only. - */ - subtype?: string; - /** - * String representation of the object. - */ - description?: string; - /** - * True iff some of the properties or entries of the original object did not fit. - */ - overflow: boolean; - /** - * List of the properties. - */ - properties: Runtime.PropertyPreview[]; - /** - * List of the entries. Specified for map and set subtype values only. - */ - entries?: Runtime.EntryPreview[]; - } - - /** - * @experimental - */ - export interface PropertyPreview { - /** - * Property name. - */ - name: string; - /** - * Object type. Accessor means that the property itself is an accessor property. - */ - type: string; - /** - * User-friendly property value string. - */ - value?: string; - /** - * Nested value preview. - */ - valuePreview?: Runtime.ObjectPreview; - /** - * Object subtype hint. Specified for object type values only. - */ - subtype?: string; - } - - /** - * @experimental - */ - export interface EntryPreview { - /** - * Preview of the key. Specified for map-like collection entries. - */ - key?: Runtime.ObjectPreview; - /** - * Preview of the value. - */ - value: Runtime.ObjectPreview; - } - - /** - * Object property descriptor. - */ - export interface PropertyDescriptor { - /** - * Property name or symbol description. - */ - name: string; - /** - * The value associated with the property. - */ - value?: Runtime.RemoteObject; - /** - * True if the value associated with the property may be changed (data descriptors only). - */ - writable?: boolean; - /** - * A function which serves as a getter for the property, or undefined if there is no getter (accessor descriptors only). - */ - get?: Runtime.RemoteObject; - /** - * A function which serves as a setter for the property, or undefined if there is no setter (accessor descriptors only). - */ - set?: Runtime.RemoteObject; - /** - * True if the type of this property descriptor may be changed and if the property may be deleted from the corresponding object. - */ - configurable: boolean; - /** - * True if this property shows up during enumeration of the properties on the corresponding object. - */ - enumerable: boolean; - /** - * True if the result was thrown during the evaluation. - */ - wasThrown?: boolean; - /** - * True if the property is owned for the object. - */ - isOwn?: boolean; - /** - * Property symbol object, if the property is of the symbol type. - */ - symbol?: Runtime.RemoteObject; - } - - /** - * Object internal property descriptor. This property isn't normally visible in JavaScript code. - */ - export interface InternalPropertyDescriptor { - /** - * Conventional property name. - */ - name: string; - /** - * The value associated with the property. - */ - value?: Runtime.RemoteObject; - } - - /** - * Represents function call argument. Either remote object id objectId, primitive value, unserializable primitive value or neither of (for undefined) them should be specified. - */ - export interface CallArgument { - /** - * Primitive value. - */ - value?: any; - /** - * Primitive value which can not be JSON-stringified. - */ - unserializableValue?: Runtime.UnserializableValue; - /** - * Remote object handle. - */ - objectId?: Runtime.RemoteObjectId; - } - - /** - * Id of an execution context. - */ - export type ExecutionContextId = number; - - /** - * Description of an isolated world. - */ - export interface ExecutionContextDescription { - /** - * Unique id of the execution context. It can be used to specify in which execution context script evaluation should be performed. - */ - id: Runtime.ExecutionContextId; - /** - * Execution context origin. - */ - origin: string; - /** - * Human readable name describing given context. - */ - name: string; - /** - * Embedder-specific auxiliary data. - */ - auxData?: {}; - } - - /** - * Detailed information about exception (or error) that was thrown during script compilation or execution. - */ - export interface ExceptionDetails { - /** - * Exception id. - */ - exceptionId: number; - /** - * Exception text, which should be used together with exception object when available. - */ - text: string; - /** - * Line number of the exception location (0-based). - */ - lineNumber: number; - /** - * Column number of the exception location (0-based). - */ - columnNumber: number; - /** - * Script ID of the exception location. - */ - scriptId?: Runtime.ScriptId; - /** - * URL of the exception location, to be used when the script was not reported. - */ - url?: string; - /** - * JavaScript stack trace if available. - */ - stackTrace?: Runtime.StackTrace; - /** - * Exception object if available. - */ - exception?: Runtime.RemoteObject; - /** - * Identifier of the context where exception happened. - */ - executionContextId?: Runtime.ExecutionContextId; - } - - /** - * Number of milliseconds since epoch. - */ - export type Timestamp = number; - - /** - * Stack entry for runtime errors and assertions. - */ - export interface CallFrame { - /** - * JavaScript function name. - */ - functionName: string; - /** - * JavaScript script id. - */ - scriptId: Runtime.ScriptId; - /** - * JavaScript script name or url. - */ - url: string; - /** - * JavaScript script line number (0-based). - */ - lineNumber: number; - /** - * JavaScript script column number (0-based). - */ - columnNumber: number; - } - - /** - * Call frames for assertions or error messages. - */ - export interface StackTrace { - /** - * String label of this stack trace. For async traces this may be a name of the function that initiated the async call. - */ - description?: string; - /** - * JavaScript function name. - */ - callFrames: Runtime.CallFrame[]; - /** - * Asynchronous JavaScript stack trace that preceded this stack, if available. - */ - parent?: Runtime.StackTrace; - /** - * Creation frame of the Promise which produced the next synchronous trace when resolved, if available. - * @experimental - */ - promiseCreationFrame?: Runtime.CallFrame; - } - - export interface EvaluateParameterType { - /** - * Expression to evaluate. - */ - expression: string; - /** - * Symbolic group name that can be used to release multiple objects. - */ - objectGroup?: string; - /** - * Determines whether Command Line API should be available during the evaluation. - */ - includeCommandLineAPI?: boolean; - /** - * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. - */ - silent?: boolean; - /** - * Specifies in which execution context to perform evaluation. If the parameter is omitted the evaluation will be performed in the context of the inspected page. - */ - contextId?: Runtime.ExecutionContextId; - /** - * Whether the result is expected to be a JSON object that should be sent by value. - */ - returnByValue?: boolean; - /** - * Whether preview should be generated for the result. - * @experimental - */ - generatePreview?: boolean; - /** - * Whether execution should be treated as initiated by user in the UI. - * @experimental - */ - userGesture?: boolean; - /** - * Whether execution should wait for promise to be resolved. If the result of evaluation is not a Promise, it's considered to be an error. - */ - awaitPromise?: boolean; - } - - export interface AwaitPromiseParameterType { - /** - * Identifier of the promise. - */ - promiseObjectId: Runtime.RemoteObjectId; - /** - * Whether the result is expected to be a JSON object that should be sent by value. - */ - returnByValue?: boolean; - /** - * Whether preview should be generated for the result. - */ - generatePreview?: boolean; - } - - export interface CallFunctionOnParameterType { - /** - * Identifier of the object to call function on. - */ - objectId: Runtime.RemoteObjectId; - /** - * Declaration of the function to call. - */ - functionDeclaration: string; - /** - * Call arguments. All call arguments must belong to the same JavaScript world as the target object. - */ - arguments?: Runtime.CallArgument[]; - /** - * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. - */ - silent?: boolean; - /** - * Whether the result is expected to be a JSON object which should be sent by value. - */ - returnByValue?: boolean; - /** - * Whether preview should be generated for the result. - * @experimental - */ - generatePreview?: boolean; - /** - * Whether execution should be treated as initiated by user in the UI. - * @experimental - */ - userGesture?: boolean; - /** - * Whether execution should wait for promise to be resolved. If the result of evaluation is not a Promise, it's considered to be an error. - */ - awaitPromise?: boolean; - } - - export interface GetPropertiesParameterType { - /** - * Identifier of the object to return properties for. - */ - objectId: Runtime.RemoteObjectId; - /** - * If true, returns properties belonging only to the element itself, not to its prototype chain. - */ - ownProperties?: boolean; - /** - * If true, returns accessor properties (with getter/setter) only; internal properties are not returned either. - * @experimental - */ - accessorPropertiesOnly?: boolean; - /** - * Whether preview should be generated for the results. - * @experimental - */ - generatePreview?: boolean; - } - - export interface ReleaseObjectParameterType { - /** - * Identifier of the object to release. - */ - objectId: Runtime.RemoteObjectId; - } - - export interface ReleaseObjectGroupParameterType { - /** - * Symbolic object group name. - */ - objectGroup: string; - } - - export interface SetCustomObjectFormatterEnabledParameterType { - enabled: boolean; - } - - export interface CompileScriptParameterType { - /** - * Expression to compile. - */ - expression: string; - /** - * Source url to be set for the script. - */ - sourceURL: string; - /** - * Specifies whether the compiled script should be persisted. - */ - persistScript: boolean; - /** - * Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page. - */ - executionContextId?: Runtime.ExecutionContextId; - } - - export interface RunScriptParameterType { - /** - * Id of the script to run. - */ - scriptId: Runtime.ScriptId; - /** - * Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page. - */ - executionContextId?: Runtime.ExecutionContextId; - /** - * Symbolic group name that can be used to release multiple objects. - */ - objectGroup?: string; - /** - * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. - */ - silent?: boolean; - /** - * Determines whether Command Line API should be available during the evaluation. - */ - includeCommandLineAPI?: boolean; - /** - * Whether the result is expected to be a JSON object which should be sent by value. - */ - returnByValue?: boolean; - /** - * Whether preview should be generated for the result. - */ - generatePreview?: boolean; - /** - * Whether execution should wait for promise to be resolved. If the result of evaluation is not a Promise, it's considered to be an error. - */ - awaitPromise?: boolean; - } - - export interface EvaluateReturnType { - /** - * Evaluation result. - */ - result: Runtime.RemoteObject; - /** - * Exception details. - */ - exceptionDetails?: Runtime.ExceptionDetails; - } - - export interface AwaitPromiseReturnType { - /** - * Promise result. Will contain rejected value if promise was rejected. - */ - result: Runtime.RemoteObject; - /** - * Exception details if stack strace is available. - */ - exceptionDetails?: Runtime.ExceptionDetails; - } - - export interface CallFunctionOnReturnType { - /** - * Call result. - */ - result: Runtime.RemoteObject; - /** - * Exception details. - */ - exceptionDetails?: Runtime.ExceptionDetails; - } - - export interface GetPropertiesReturnType { - /** - * Object properties. - */ - result: Runtime.PropertyDescriptor[]; - /** - * Internal object properties (only of the element itself). - */ - internalProperties?: Runtime.InternalPropertyDescriptor[]; - /** - * Exception details. - */ - exceptionDetails?: Runtime.ExceptionDetails; - } - - export interface CompileScriptReturnType { - /** - * Id of the script. - */ - scriptId?: Runtime.ScriptId; - /** - * Exception details. - */ - exceptionDetails?: Runtime.ExceptionDetails; - } - - export interface RunScriptReturnType { - /** - * Run result. - */ - result: Runtime.RemoteObject; - /** - * Exception details. - */ - exceptionDetails?: Runtime.ExceptionDetails; - } - - export interface ExecutionContextCreatedEventDataType { - /** - * A newly created execution context. - */ - context: Runtime.ExecutionContextDescription; - } - - export interface ExecutionContextDestroyedEventDataType { - /** - * Id of the destroyed context - */ - executionContextId: Runtime.ExecutionContextId; - } - - export interface ExceptionThrownEventDataType { - /** - * Timestamp of the exception. - */ - timestamp: Runtime.Timestamp; - exceptionDetails: Runtime.ExceptionDetails; - } - - export interface ExceptionRevokedEventDataType { - /** - * Reason describing why exception was revoked. - */ - reason: string; - /** - * The id of revoked exception, as reported in exceptionUnhandled. - */ - exceptionId: number; - } - - export interface ConsoleAPICalledEventDataType { - /** - * Type of the call. - */ - type: string; - /** - * Call arguments. - */ - args: Runtime.RemoteObject[]; - /** - * Identifier of the context where the call was made. - */ - executionContextId: Runtime.ExecutionContextId; - /** - * Call timestamp. - */ - timestamp: Runtime.Timestamp; - /** - * Stack trace captured when the call was made. - */ - stackTrace?: Runtime.StackTrace; - /** - * Console context descriptor for calls on non-default console context (not console.*): 'anonymous#unique-logger-id' for call on unnamed context, 'name#unique-logger-id' for call on named context. - * @experimental - */ - context?: string; - } - - export interface InspectRequestedEventDataType { - object: Runtime.RemoteObject; - hints: {}; - } - } - - export namespace Debugger { - /** - * Breakpoint identifier. - */ - export type BreakpointId = string; - - /** - * Call frame identifier. - */ - export type CallFrameId = string; - - /** - * Location in the source code. - */ - export interface Location { - /** - * Script identifier as reported in the Debugger.scriptParsed. - */ - scriptId: Runtime.ScriptId; - /** - * Line number in the script (0-based). - */ - lineNumber: number; - /** - * Column number in the script (0-based). - */ - columnNumber?: number; - } - - /** - * Location in the source code. - * @experimental - */ - export interface ScriptPosition { - lineNumber: number; - columnNumber: number; - } - - /** - * JavaScript call frame. Array of call frames form the call stack. - */ - export interface CallFrame { - /** - * Call frame identifier. This identifier is only valid while the virtual machine is paused. - */ - callFrameId: Debugger.CallFrameId; - /** - * Name of the JavaScript function called on this call frame. - */ - functionName: string; - /** - * Location in the source code. - * @experimental - */ - functionLocation?: Debugger.Location; - /** - * Location in the source code. - */ - location: Debugger.Location; - /** - * Scope chain for this call frame. - */ - scopeChain: Debugger.Scope[]; - /** - * this object for this call frame. - */ - this: Runtime.RemoteObject; - /** - * The value being returned, if the function is at return point. - */ - returnValue?: Runtime.RemoteObject; - } - - /** - * Scope description. - */ - export interface Scope { - /** - * Scope type. - */ - type: string; - /** - * Object representing the scope. For global and with scopes it represents the actual object; for the rest of the scopes, it is artificial transient object enumerating scope variables as its properties. - */ - object: Runtime.RemoteObject; - name?: string; - /** - * Location in the source code where scope starts - */ - startLocation?: Debugger.Location; - /** - * Location in the source code where scope ends - */ - endLocation?: Debugger.Location; - } - - /** - * Search match for resource. - * @experimental - */ - export interface SearchMatch { - /** - * Line number in resource content. - */ - lineNumber: number; - /** - * Line with match content. - */ - lineContent: string; - } - - /** - * @experimental - */ - export interface BreakLocation { - /** - * Script identifier as reported in the Debugger.scriptParsed. - */ - scriptId: Runtime.ScriptId; - /** - * Line number in the script (0-based). - */ - lineNumber: number; - /** - * Column number in the script (0-based). - */ - columnNumber?: number; - type?: string; - } - - export interface SetBreakpointsActiveParameterType { - /** - * New value for breakpoints active state. - */ - active: boolean; - } - - export interface SetSkipAllPausesParameterType { - /** - * New value for skip pauses state. - */ - skip: boolean; - } - - export interface SetBreakpointByUrlParameterType { - /** - * Line number to set breakpoint at. - */ - lineNumber: number; - /** - * URL of the resources to set breakpoint on. - */ - url?: string; - /** - * Regex pattern for the URLs of the resources to set breakpoints on. Either url or urlRegex must be specified. - */ - urlRegex?: string; - /** - * Offset in the line to set breakpoint at. - */ - columnNumber?: number; - /** - * Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true. - */ - condition?: string; - } - - export interface SetBreakpointParameterType { - /** - * Location to set breakpoint in. - */ - location: Debugger.Location; - /** - * Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true. - */ - condition?: string; - } - - export interface RemoveBreakpointParameterType { - breakpointId: Debugger.BreakpointId; - } - - export interface GetPossibleBreakpointsParameterType { - /** - * Start of range to search possible breakpoint locations in. - */ - start: Debugger.Location; - /** - * End of range to search possible breakpoint locations in (excluding). When not specified, end of scripts is used as end of range. - */ - end?: Debugger.Location; - /** - * Only consider locations which are in the same (non-nested) function as start. - */ - restrictToFunction?: boolean; - } - - export interface ContinueToLocationParameterType { - /** - * Location to continue to. - */ - location: Debugger.Location; - /** - * @experimental - */ - targetCallFrames?: string; - } - - export interface SearchInContentParameterType { - /** - * Id of the script to search in. - */ - scriptId: Runtime.ScriptId; - /** - * String to search for. - */ - query: string; - /** - * If true, search is case sensitive. - */ - caseSensitive?: boolean; - /** - * If true, treats string parameter as regex. - */ - isRegex?: boolean; - } - - export interface SetScriptSourceParameterType { - /** - * Id of the script to edit. - */ - scriptId: Runtime.ScriptId; - /** - * New content of the script. - */ - scriptSource: string; - /** - * If true the change will not actually be applied. Dry run may be used to get result description without actually modifying the code. - */ - dryRun?: boolean; - } - - export interface RestartFrameParameterType { - /** - * Call frame identifier to evaluate on. - */ - callFrameId: Debugger.CallFrameId; - } - - export interface GetScriptSourceParameterType { - /** - * Id of the script to get source for. - */ - scriptId: Runtime.ScriptId; - } - - export interface SetPauseOnExceptionsParameterType { - /** - * Pause on exceptions mode. - */ - state: string; - } - - export interface EvaluateOnCallFrameParameterType { - /** - * Call frame identifier to evaluate on. - */ - callFrameId: Debugger.CallFrameId; - /** - * Expression to evaluate. - */ - expression: string; - /** - * String object group name to put result into (allows rapid releasing resulting object handles using releaseObjectGroup). - */ - objectGroup?: string; - /** - * Specifies whether command line API should be available to the evaluated expression, defaults to false. - */ - includeCommandLineAPI?: boolean; - /** - * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. - */ - silent?: boolean; - /** - * Whether the result is expected to be a JSON object that should be sent by value. - */ - returnByValue?: boolean; - /** - * Whether preview should be generated for the result. - * @experimental - */ - generatePreview?: boolean; - /** - * Whether to throw an exception if side effect cannot be ruled out during evaluation. - * @experimental - */ - throwOnSideEffect?: boolean; - } - - export interface SetVariableValueParameterType { - /** - * 0-based number of scope as was listed in scope chain. Only 'local', 'closure' and 'catch' scope types are allowed. Other scopes could be manipulated manually. - */ - scopeNumber: number; - /** - * Variable name. - */ - variableName: string; - /** - * New variable value. - */ - newValue: Runtime.CallArgument; - /** - * Id of callframe that holds variable. - */ - callFrameId: Debugger.CallFrameId; - } - - export interface SetAsyncCallStackDepthParameterType { - /** - * Maximum depth of async call stacks. Setting to 0 will effectively disable collecting async call stacks (default). - */ - maxDepth: number; - } - - export interface SetBlackboxPatternsParameterType { - /** - * Array of regexps that will be used to check script url for blackbox state. - */ - patterns: string[]; - } - - export interface SetBlackboxedRangesParameterType { - /** - * Id of the script. - */ - scriptId: Runtime.ScriptId; - positions: Debugger.ScriptPosition[]; - } - - export interface SetBreakpointByUrlReturnType { - /** - * Id of the created breakpoint for further reference. - */ - breakpointId: Debugger.BreakpointId; - /** - * List of the locations this breakpoint resolved into upon addition. - */ - locations: Debugger.Location[]; - } - - export interface SetBreakpointReturnType { - /** - * Id of the created breakpoint for further reference. - */ - breakpointId: Debugger.BreakpointId; - /** - * Location this breakpoint resolved into. - */ - actualLocation: Debugger.Location; - } - - export interface GetPossibleBreakpointsReturnType { - /** - * List of the possible breakpoint locations. - */ - locations: Debugger.BreakLocation[]; - } - - export interface SearchInContentReturnType { - /** - * List of search matches. - */ - result: Debugger.SearchMatch[]; - } - - export interface SetScriptSourceReturnType { - /** - * New stack trace in case editing has happened while VM was stopped. - */ - callFrames?: Debugger.CallFrame[]; - /** - * Whether current call stack was modified after applying the changes. - */ - stackChanged?: boolean; - /** - * Async stack trace, if any. - */ - asyncStackTrace?: Runtime.StackTrace; - /** - * Exception details if any. - */ - exceptionDetails?: Runtime.ExceptionDetails; - } - - export interface RestartFrameReturnType { - /** - * New stack trace. - */ - callFrames: Debugger.CallFrame[]; - /** - * Async stack trace, if any. - */ - asyncStackTrace?: Runtime.StackTrace; - } - - export interface GetScriptSourceReturnType { - /** - * Script source. - */ - scriptSource: string; - } - - export interface EvaluateOnCallFrameReturnType { - /** - * Object wrapper for the evaluation result. - */ - result: Runtime.RemoteObject; - /** - * Exception details. - */ - exceptionDetails?: Runtime.ExceptionDetails; - } - - export interface ScriptParsedEventDataType { - /** - * Identifier of the script parsed. - */ - scriptId: Runtime.ScriptId; - /** - * URL or name of the script parsed (if any). - */ - url: string; - /** - * Line offset of the script within the resource with given URL (for script tags). - */ - startLine: number; - /** - * Column offset of the script within the resource with given URL. - */ - startColumn: number; - /** - * Last line of the script. - */ - endLine: number; - /** - * Length of the last line of the script. - */ - endColumn: number; - /** - * Specifies script creation context. - */ - executionContextId: Runtime.ExecutionContextId; - /** - * Content hash of the script. - */ - hash: string; - /** - * Embedder-specific auxiliary data. - */ - executionContextAuxData?: {}; - /** - * True, if this script is generated as a result of the live edit operation. - * @experimental - */ - isLiveEdit?: boolean; - /** - * URL of source map associated with script (if any). - */ - sourceMapURL?: string; - /** - * True, if this script has sourceURL. - * @experimental - */ - hasSourceURL?: boolean; - /** - * True, if this script is ES6 module. - * @experimental - */ - isModule?: boolean; - /** - * This script length. - * @experimental - */ - length?: number; - /** - * JavaScript top stack frame of where the script parsed event was triggered if available. - * @experimental - */ - stackTrace?: Runtime.StackTrace; - } - - export interface ScriptFailedToParseEventDataType { - /** - * Identifier of the script parsed. - */ - scriptId: Runtime.ScriptId; - /** - * URL or name of the script parsed (if any). - */ - url: string; - /** - * Line offset of the script within the resource with given URL (for script tags). - */ - startLine: number; - /** - * Column offset of the script within the resource with given URL. - */ - startColumn: number; - /** - * Last line of the script. - */ - endLine: number; - /** - * Length of the last line of the script. - */ - endColumn: number; - /** - * Specifies script creation context. - */ - executionContextId: Runtime.ExecutionContextId; - /** - * Content hash of the script. - */ - hash: string; - /** - * Embedder-specific auxiliary data. - */ - executionContextAuxData?: {}; - /** - * URL of source map associated with script (if any). - */ - sourceMapURL?: string; - /** - * True, if this script has sourceURL. - * @experimental - */ - hasSourceURL?: boolean; - /** - * True, if this script is ES6 module. - * @experimental - */ - isModule?: boolean; - /** - * This script length. - * @experimental - */ - length?: number; - /** - * JavaScript top stack frame of where the script parsed event was triggered if available. - * @experimental - */ - stackTrace?: Runtime.StackTrace; - } - - export interface BreakpointResolvedEventDataType { - /** - * Breakpoint unique identifier. - */ - breakpointId: Debugger.BreakpointId; - /** - * Actual breakpoint location. - */ - location: Debugger.Location; - } - - export interface PausedEventDataType { - /** - * Call stack the virtual machine stopped on. - */ - callFrames: Debugger.CallFrame[]; - /** - * Pause reason. - */ - reason: string; - /** - * Object containing break-specific auxiliary properties. - */ - data?: {}; - /** - * Hit breakpoints IDs - */ - hitBreakpoints?: string[]; - /** - * Async stack trace, if any. - */ - asyncStackTrace?: Runtime.StackTrace; - } - } - - export namespace Console { - /** - * Console message. - */ - export interface ConsoleMessage { - /** - * Message source. - */ - source: string; - /** - * Message severity. - */ - level: string; - /** - * Message text. - */ - text: string; - /** - * URL of the message origin. - */ - url?: string; - /** - * Line number in the resource that generated this message (1-based). - */ - line?: number; - /** - * Column number in the resource that generated this message (1-based). - */ - column?: number; - } - - export interface MessageAddedEventDataType { - /** - * Console message that has been added. - */ - message: Console.ConsoleMessage; - } - } - - export namespace Profiler { - /** - * Profile node. Holds callsite information, execution statistics and child nodes. - */ - export interface ProfileNode { - /** - * Unique id of the node. - */ - id: number; - /** - * Function location. - */ - callFrame: Runtime.CallFrame; - /** - * Number of samples where this node was on top of the call stack. - * @experimental - */ - hitCount?: number; - /** - * Child node ids. - */ - children?: number[]; - /** - * The reason of being not optimized. The function may be deoptimized or marked as don't optimize. - */ - deoptReason?: string; - /** - * An array of source position ticks. - * @experimental - */ - positionTicks?: Profiler.PositionTickInfo[]; - } - - /** - * Profile. - */ - export interface Profile { - /** - * The list of profile nodes. First item is the root node. - */ - nodes: Profiler.ProfileNode[]; - /** - * Profiling start timestamp in microseconds. - */ - startTime: number; - /** - * Profiling end timestamp in microseconds. - */ - endTime: number; - /** - * Ids of samples top nodes. - */ - samples?: number[]; - /** - * Time intervals between adjacent samples in microseconds. The first delta is relative to the profile startTime. - */ - timeDeltas?: number[]; - } - - /** - * Specifies a number of samples attributed to a certain source position. - * @experimental - */ - export interface PositionTickInfo { - /** - * Source line number (1-based). - */ - line: number; - /** - * Number of samples attributed to the source line. - */ - ticks: number; - } - - /** - * Coverage data for a source range. - * @experimental - */ - export interface CoverageRange { - /** - * JavaScript script source offset for the range start. - */ - startOffset: number; - /** - * JavaScript script source offset for the range end. - */ - endOffset: number; - /** - * Collected execution count of the source range. - */ - count: number; - } - - /** - * Coverage data for a JavaScript function. - * @experimental - */ - export interface FunctionCoverage { - /** - * JavaScript function name. - */ - functionName: string; - /** - * Source ranges inside the function with coverage data. - */ - ranges: Profiler.CoverageRange[]; - /** - * Whether coverage data for this function has block granularity. - */ - isBlockCoverage: boolean; - } - - /** - * Coverage data for a JavaScript script. - * @experimental - */ - export interface ScriptCoverage { - /** - * JavaScript script id. - */ - scriptId: Runtime.ScriptId; - /** - * JavaScript script name or url. - */ - url: string; - /** - * Functions contained in the script that has coverage data. - */ - functions: Profiler.FunctionCoverage[]; - } - - export interface SetSamplingIntervalParameterType { - /** - * New sampling interval in microseconds. - */ - interval: number; - } - - export interface StartPreciseCoverageParameterType { - /** - * Collect accurate call counts beyond simple 'covered' or 'not covered'. - */ - callCount?: boolean; - } - - export interface StopReturnType { - /** - * Recorded profile. - */ - profile: Profiler.Profile; - } - - export interface TakePreciseCoverageReturnType { - /** - * Coverage data for the current isolate. - */ - result: Profiler.ScriptCoverage[]; - } - - export interface GetBestEffortCoverageReturnType { - /** - * Coverage data for the current isolate. - */ - result: Profiler.ScriptCoverage[]; - } - - export interface ConsoleProfileStartedEventDataType { - id: string; - /** - * Location of console.profile(). - */ - location: Debugger.Location; - /** - * Profile title passed as an argument to console.profile(). - */ - title?: string; - } - - export interface ConsoleProfileFinishedEventDataType { - id: string; - /** - * Location of console.profileEnd(). - */ - location: Debugger.Location; - profile: Profiler.Profile; - /** - * Profile title passed as an argument to console.profile(). - */ - title?: string; - } - } - - export namespace HeapProfiler { - /** - * Heap snapshot object id. - */ - export type HeapSnapshotObjectId = string; - - /** - * Sampling Heap Profile node. Holds callsite information, allocation statistics and child nodes. - */ - export interface SamplingHeapProfileNode { - /** - * Function location. - */ - callFrame: Runtime.CallFrame; - /** - * Allocations size in bytes for the node excluding children. - */ - selfSize: number; - /** - * Child nodes. - */ - children: HeapProfiler.SamplingHeapProfileNode[]; - } - - /** - * Profile. - */ - export interface SamplingHeapProfile { - head: HeapProfiler.SamplingHeapProfileNode; - } - - export interface StartTrackingHeapObjectsParameterType { - trackAllocations?: boolean; - } - - export interface StopTrackingHeapObjectsParameterType { - /** - * If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken when the tracking is stopped. - */ - reportProgress?: boolean; - } - - export interface TakeHeapSnapshotParameterType { - /** - * If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken. - */ - reportProgress?: boolean; - } - - export interface GetObjectByHeapObjectIdParameterType { - objectId: HeapProfiler.HeapSnapshotObjectId; - /** - * Symbolic group name that can be used to release multiple objects. - */ - objectGroup?: string; - } - - export interface AddInspectedHeapObjectParameterType { - /** - * Heap snapshot object id to be accessible by means of $x command line API. - */ - heapObjectId: HeapProfiler.HeapSnapshotObjectId; - } - - export interface GetHeapObjectIdParameterType { - /** - * Identifier of the object to get heap object id for. - */ - objectId: Runtime.RemoteObjectId; - } - - export interface StartSamplingParameterType { - /** - * Average sample interval in bytes. Poisson distribution is used for the intervals. The default value is 32768 bytes. - */ - samplingInterval?: number; - } - - export interface GetObjectByHeapObjectIdReturnType { - /** - * Evaluation result. - */ - result: Runtime.RemoteObject; - } - - export interface GetHeapObjectIdReturnType { - /** - * Id of the heap snapshot object corresponding to the passed remote object id. - */ - heapSnapshotObjectId: HeapProfiler.HeapSnapshotObjectId; - } - - export interface StopSamplingReturnType { - /** - * Recorded sampling heap profile. - */ - profile: HeapProfiler.SamplingHeapProfile; - } - - export interface AddHeapSnapshotChunkEventDataType { - chunk: string; - } - - export interface ReportHeapSnapshotProgressEventDataType { - done: number; - total: number; - finished?: boolean; - } - - export interface LastSeenObjectIdEventDataType { - lastSeenObjectId: number; - timestamp: number; - } - - export interface HeapStatsUpdateEventDataType { - /** - * An array of triplets. Each triplet describes a fragment. The first integer is the fragment index, the second integer is a total count of objects for the fragment, the third integer is a total size of the objects for the fragment. - */ - statsUpdate: number[]; - } - } - - /** - * The inspector.Session is used for dispatching messages to the V8 inspector back-end and receiving message responses and notifications. - */ - export class Session extends EventEmitter { - /** - * Create a new instance of the inspector.Session class. The inspector session needs to be connected through session.connect() before the messages can be dispatched to the inspector backend. - */ - constructor(); - - /** - * Connects a session to the inspector back-end. An exception will be thrown if there is already a connected session established either through the API or by a front-end connected to the Inspector WebSocket port. - */ - connect(): void; - - /** - * Immediately close the session. All pending message callbacks will be called with an error. session.connect() will need to be called to be able to send messages again. Reconnected session will lose all inspector state, such as enabled agents or configured breakpoints. - */ - disconnect(): void; - - /** - * Posts a message to the inspector back-end. callback will be notified when a response is received. callback is a function that accepts two optional arguments - error and message-specific result. - */ - post(method: string, params?: {}, callback?: (err: Error | null, params?: {}) => void): void; - post(method: string, callback?: (err: Error | null, params?: {}) => void): void; - - /** - * Returns supported domains. - */ - post(method: "Schema.getDomains", callback?: (err: Error | null, params: Schema.GetDomainsReturnType) => void): void; - /** - * Evaluates expression on global object. - */ - post(method: "Runtime.evaluate", params?: Runtime.EvaluateParameterType, callback?: (err: Error | null, params: Runtime.EvaluateReturnType) => void): void; - post(method: "Runtime.evaluate", callback?: (err: Error | null, params: Runtime.EvaluateReturnType) => void): void; - - /** - * Add handler to promise with given promise object id. - */ - post(method: "Runtime.awaitPromise", params?: Runtime.AwaitPromiseParameterType, callback?: (err: Error | null, params: Runtime.AwaitPromiseReturnType) => void): void; - post(method: "Runtime.awaitPromise", callback?: (err: Error | null, params: Runtime.AwaitPromiseReturnType) => void): void; - - /** - * Calls function with given declaration on the given object. Object group of the result is inherited from the target object. - */ - post(method: "Runtime.callFunctionOn", params?: Runtime.CallFunctionOnParameterType, callback?: (err: Error | null, params: Runtime.CallFunctionOnReturnType) => void): void; - post(method: "Runtime.callFunctionOn", callback?: (err: Error | null, params: Runtime.CallFunctionOnReturnType) => void): void; - - /** - * Returns properties of a given object. Object group of the result is inherited from the target object. - */ - post(method: "Runtime.getProperties", params?: Runtime.GetPropertiesParameterType, callback?: (err: Error | null, params: Runtime.GetPropertiesReturnType) => void): void; - post(method: "Runtime.getProperties", callback?: (err: Error | null, params: Runtime.GetPropertiesReturnType) => void): void; - - /** - * Releases remote object with given id. - */ - post(method: "Runtime.releaseObject", params?: Runtime.ReleaseObjectParameterType, callback?: (err: Error | null) => void): void; - post(method: "Runtime.releaseObject", callback?: (err: Error | null) => void): void; - - /** - * Releases all remote objects that belong to a given group. - */ - post(method: "Runtime.releaseObjectGroup", params?: Runtime.ReleaseObjectGroupParameterType, callback?: (err: Error | null) => void): void; - post(method: "Runtime.releaseObjectGroup", callback?: (err: Error | null) => void): void; - - /** - * Tells inspected instance to run if it was waiting for debugger to attach. - */ - post(method: "Runtime.runIfWaitingForDebugger", callback?: (err: Error | null) => void): void; - - /** - * Enables reporting of execution contexts creation by means of executionContextCreated event. When the reporting gets enabled the event will be sent immediately for each existing execution context. - */ - post(method: "Runtime.enable", callback?: (err: Error | null) => void): void; - - /** - * Disables reporting of execution contexts creation. - */ - post(method: "Runtime.disable", callback?: (err: Error | null) => void): void; - - /** - * Discards collected exceptions and console API calls. - */ - post(method: "Runtime.discardConsoleEntries", callback?: (err: Error | null) => void): void; - - /** - * @experimental - */ - post(method: "Runtime.setCustomObjectFormatterEnabled", params?: Runtime.SetCustomObjectFormatterEnabledParameterType, callback?: (err: Error | null) => void): void; - post(method: "Runtime.setCustomObjectFormatterEnabled", callback?: (err: Error | null) => void): void; - - /** - * Compiles expression. - */ - post(method: "Runtime.compileScript", params?: Runtime.CompileScriptParameterType, callback?: (err: Error | null, params: Runtime.CompileScriptReturnType) => void): void; - post(method: "Runtime.compileScript", callback?: (err: Error | null, params: Runtime.CompileScriptReturnType) => void): void; - - /** - * Runs script with given id in a given context. - */ - post(method: "Runtime.runScript", params?: Runtime.RunScriptParameterType, callback?: (err: Error | null, params: Runtime.RunScriptReturnType) => void): void; - post(method: "Runtime.runScript", callback?: (err: Error | null, params: Runtime.RunScriptReturnType) => void): void; - /** - * Enables debugger for the given page. Clients should not assume that the debugging has been enabled until the result for this command is received. - */ - post(method: "Debugger.enable", callback?: (err: Error | null) => void): void; - - /** - * Disables debugger for given page. - */ - post(method: "Debugger.disable", callback?: (err: Error | null) => void): void; - - /** - * Activates / deactivates all breakpoints on the page. - */ - post(method: "Debugger.setBreakpointsActive", params?: Debugger.SetBreakpointsActiveParameterType, callback?: (err: Error | null) => void): void; - post(method: "Debugger.setBreakpointsActive", callback?: (err: Error | null) => void): void; - - /** - * Makes page not interrupt on any pauses (breakpoint, exception, dom exception etc). - */ - post(method: "Debugger.setSkipAllPauses", params?: Debugger.SetSkipAllPausesParameterType, callback?: (err: Error | null) => void): void; - post(method: "Debugger.setSkipAllPauses", callback?: (err: Error | null) => void): void; - - /** - * Sets JavaScript breakpoint at given location specified either by URL or URL regex. Once this command is issued, all existing parsed scripts will have breakpoints resolved and returned in locations property. Further matching script parsing will result in subsequent breakpointResolved events issued. This logical breakpoint will survive page reloads. - */ - post(method: "Debugger.setBreakpointByUrl", params?: Debugger.SetBreakpointByUrlParameterType, callback?: (err: Error | null, params: Debugger.SetBreakpointByUrlReturnType) => void): void; - post(method: "Debugger.setBreakpointByUrl", callback?: (err: Error | null, params: Debugger.SetBreakpointByUrlReturnType) => void): void; - - /** - * Sets JavaScript breakpoint at a given location. - */ - post(method: "Debugger.setBreakpoint", params?: Debugger.SetBreakpointParameterType, callback?: (err: Error | null, params: Debugger.SetBreakpointReturnType) => void): void; - post(method: "Debugger.setBreakpoint", callback?: (err: Error | null, params: Debugger.SetBreakpointReturnType) => void): void; - - /** - * Removes JavaScript breakpoint. - */ - post(method: "Debugger.removeBreakpoint", params?: Debugger.RemoveBreakpointParameterType, callback?: (err: Error | null) => void): void; - post(method: "Debugger.removeBreakpoint", callback?: (err: Error | null) => void): void; - - /** - * Returns possible locations for breakpoint. scriptId in start and end range locations should be the same. - * @experimental - */ - post(method: "Debugger.getPossibleBreakpoints", params?: Debugger.GetPossibleBreakpointsParameterType, callback?: (err: Error | null, params: Debugger.GetPossibleBreakpointsReturnType) => void): void; - post(method: "Debugger.getPossibleBreakpoints", callback?: (err: Error | null, params: Debugger.GetPossibleBreakpointsReturnType) => void): void; - - /** - * Continues execution until specific location is reached. - */ - post(method: "Debugger.continueToLocation", params?: Debugger.ContinueToLocationParameterType, callback?: (err: Error | null) => void): void; - post(method: "Debugger.continueToLocation", callback?: (err: Error | null) => void): void; - - /** - * Steps over the statement. - */ - post(method: "Debugger.stepOver", callback?: (err: Error | null) => void): void; - - /** - * Steps into the function call. - */ - post(method: "Debugger.stepInto", callback?: (err: Error | null) => void): void; - - /** - * Steps out of the function call. - */ - post(method: "Debugger.stepOut", callback?: (err: Error | null) => void): void; - - /** - * Stops on the next JavaScript statement. - */ - post(method: "Debugger.pause", callback?: (err: Error | null) => void): void; - - /** - * Steps into next scheduled async task if any is scheduled before next pause. Returns success when async task is actually scheduled, returns error if no task were scheduled or another scheduleStepIntoAsync was called. - * @experimental - */ - post(method: "Debugger.scheduleStepIntoAsync", callback?: (err: Error | null) => void): void; - - /** - * Resumes JavaScript execution. - */ - post(method: "Debugger.resume", callback?: (err: Error | null) => void): void; - - /** - * Searches for given string in script content. - * @experimental - */ - post(method: "Debugger.searchInContent", params?: Debugger.SearchInContentParameterType, callback?: (err: Error | null, params: Debugger.SearchInContentReturnType) => void): void; - post(method: "Debugger.searchInContent", callback?: (err: Error | null, params: Debugger.SearchInContentReturnType) => void): void; - - /** - * Edits JavaScript source live. - */ - post(method: "Debugger.setScriptSource", params?: Debugger.SetScriptSourceParameterType, callback?: (err: Error | null, params: Debugger.SetScriptSourceReturnType) => void): void; - post(method: "Debugger.setScriptSource", callback?: (err: Error | null, params: Debugger.SetScriptSourceReturnType) => void): void; - - /** - * Restarts particular call frame from the beginning. - */ - post(method: "Debugger.restartFrame", params?: Debugger.RestartFrameParameterType, callback?: (err: Error | null, params: Debugger.RestartFrameReturnType) => void): void; - post(method: "Debugger.restartFrame", callback?: (err: Error | null, params: Debugger.RestartFrameReturnType) => void): void; - - /** - * Returns source for the script with given id. - */ - post(method: "Debugger.getScriptSource", params?: Debugger.GetScriptSourceParameterType, callback?: (err: Error | null, params: Debugger.GetScriptSourceReturnType) => void): void; - post(method: "Debugger.getScriptSource", callback?: (err: Error | null, params: Debugger.GetScriptSourceReturnType) => void): void; - - /** - * Defines pause on exceptions state. Can be set to stop on all exceptions, uncaught exceptions or no exceptions. Initial pause on exceptions state is none. - */ - post(method: "Debugger.setPauseOnExceptions", params?: Debugger.SetPauseOnExceptionsParameterType, callback?: (err: Error | null) => void): void; - post(method: "Debugger.setPauseOnExceptions", callback?: (err: Error | null) => void): void; - - /** - * Evaluates expression on a given call frame. - */ - post(method: "Debugger.evaluateOnCallFrame", params?: Debugger.EvaluateOnCallFrameParameterType, callback?: (err: Error | null, params: Debugger.EvaluateOnCallFrameReturnType) => void): void; - post(method: "Debugger.evaluateOnCallFrame", callback?: (err: Error | null, params: Debugger.EvaluateOnCallFrameReturnType) => void): void; - - /** - * Changes value of variable in a callframe. Object-based scopes are not supported and must be mutated manually. - */ - post(method: "Debugger.setVariableValue", params?: Debugger.SetVariableValueParameterType, callback?: (err: Error | null) => void): void; - post(method: "Debugger.setVariableValue", callback?: (err: Error | null) => void): void; - - /** - * Enables or disables async call stacks tracking. - */ - post(method: "Debugger.setAsyncCallStackDepth", params?: Debugger.SetAsyncCallStackDepthParameterType, callback?: (err: Error | null) => void): void; - post(method: "Debugger.setAsyncCallStackDepth", callback?: (err: Error | null) => void): void; - - /** - * Replace previous blackbox patterns with passed ones. Forces backend to skip stepping/pausing in scripts with url matching one of the patterns. VM will try to leave blackboxed script by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. - * @experimental - */ - post(method: "Debugger.setBlackboxPatterns", params?: Debugger.SetBlackboxPatternsParameterType, callback?: (err: Error | null) => void): void; - post(method: "Debugger.setBlackboxPatterns", callback?: (err: Error | null) => void): void; - - /** - * Makes backend skip steps in the script in blackboxed ranges. VM will try leave blacklisted scripts by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. Positions array contains positions where blackbox state is changed. First interval isn't blackboxed. Array should be sorted. - * @experimental - */ - post(method: "Debugger.setBlackboxedRanges", params?: Debugger.SetBlackboxedRangesParameterType, callback?: (err: Error | null) => void): void; - post(method: "Debugger.setBlackboxedRanges", callback?: (err: Error | null) => void): void; - /** - * Enables console domain, sends the messages collected so far to the client by means of the messageAdded notification. - */ - post(method: "Console.enable", callback?: (err: Error | null) => void): void; - - /** - * Disables console domain, prevents further console messages from being reported to the client. - */ - post(method: "Console.disable", callback?: (err: Error | null) => void): void; - - /** - * Does nothing. - */ - post(method: "Console.clearMessages", callback?: (err: Error | null) => void): void; - post(method: "Profiler.enable", callback?: (err: Error | null) => void): void; - - post(method: "Profiler.disable", callback?: (err: Error | null) => void): void; - - /** - * Changes CPU profiler sampling interval. Must be called before CPU profiles recording started. - */ - post(method: "Profiler.setSamplingInterval", params?: Profiler.SetSamplingIntervalParameterType, callback?: (err: Error | null) => void): void; - post(method: "Profiler.setSamplingInterval", callback?: (err: Error | null) => void): void; - - post(method: "Profiler.start", callback?: (err: Error | null) => void): void; - - post(method: "Profiler.stop", callback?: (err: Error | null, params: Profiler.StopReturnType) => void): void; - - /** - * Enable precise code coverage. Coverage data for JavaScript executed before enabling precise code coverage may be incomplete. Enabling prevents running optimized code and resets execution counters. - * @experimental - */ - post(method: "Profiler.startPreciseCoverage", params?: Profiler.StartPreciseCoverageParameterType, callback?: (err: Error | null) => void): void; - post(method: "Profiler.startPreciseCoverage", callback?: (err: Error | null) => void): void; - - /** - * Disable precise code coverage. Disabling releases unnecessary execution count records and allows executing optimized code. - * @experimental - */ - post(method: "Profiler.stopPreciseCoverage", callback?: (err: Error | null) => void): void; - - /** - * Collect coverage data for the current isolate, and resets execution counters. Precise code coverage needs to have started. - * @experimental - */ - post(method: "Profiler.takePreciseCoverage", callback?: (err: Error | null, params: Profiler.TakePreciseCoverageReturnType) => void): void; - - /** - * Collect coverage data for the current isolate. The coverage data may be incomplete due to garbage collection. - * @experimental - */ - post(method: "Profiler.getBestEffortCoverage", callback?: (err: Error | null, params: Profiler.GetBestEffortCoverageReturnType) => void): void; - post(method: "HeapProfiler.enable", callback?: (err: Error | null) => void): void; - - post(method: "HeapProfiler.disable", callback?: (err: Error | null) => void): void; - - post(method: "HeapProfiler.startTrackingHeapObjects", params?: HeapProfiler.StartTrackingHeapObjectsParameterType, callback?: (err: Error | null) => void): void; - post(method: "HeapProfiler.startTrackingHeapObjects", callback?: (err: Error | null) => void): void; - - post(method: "HeapProfiler.stopTrackingHeapObjects", params?: HeapProfiler.StopTrackingHeapObjectsParameterType, callback?: (err: Error | null) => void): void; - post(method: "HeapProfiler.stopTrackingHeapObjects", callback?: (err: Error | null) => void): void; - - post(method: "HeapProfiler.takeHeapSnapshot", params?: HeapProfiler.TakeHeapSnapshotParameterType, callback?: (err: Error | null) => void): void; - post(method: "HeapProfiler.takeHeapSnapshot", callback?: (err: Error | null) => void): void; - - post(method: "HeapProfiler.collectGarbage", callback?: (err: Error | null) => void): void; - - post(method: "HeapProfiler.getObjectByHeapObjectId", params?: HeapProfiler.GetObjectByHeapObjectIdParameterType, callback?: (err: Error | null, params: HeapProfiler.GetObjectByHeapObjectIdReturnType) => void): void; - post(method: "HeapProfiler.getObjectByHeapObjectId", callback?: (err: Error | null, params: HeapProfiler.GetObjectByHeapObjectIdReturnType) => void): void; - - /** - * Enables console to refer to the node with given id via $x (see Command Line API for more details $x functions). - */ - post(method: "HeapProfiler.addInspectedHeapObject", params?: HeapProfiler.AddInspectedHeapObjectParameterType, callback?: (err: Error | null) => void): void; - post(method: "HeapProfiler.addInspectedHeapObject", callback?: (err: Error | null) => void): void; - - post(method: "HeapProfiler.getHeapObjectId", params?: HeapProfiler.GetHeapObjectIdParameterType, callback?: (err: Error | null, params: HeapProfiler.GetHeapObjectIdReturnType) => void): void; - post(method: "HeapProfiler.getHeapObjectId", callback?: (err: Error | null, params: HeapProfiler.GetHeapObjectIdReturnType) => void): void; - - post(method: "HeapProfiler.startSampling", params?: HeapProfiler.StartSamplingParameterType, callback?: (err: Error | null) => void): void; - post(method: "HeapProfiler.startSampling", callback?: (err: Error | null) => void): void; - - post(method: "HeapProfiler.stopSampling", callback?: (err: Error | null, params: HeapProfiler.StopSamplingReturnType) => void): void; - - // Events - - addListener(event: string, listener: (...args: any[]) => void): this; - - /** - * Emitted when any notification from the V8 Inspector is received. - */ - addListener(event: "inspectorNotification", listener: (message: InspectorNotification<{}>) => void): this; - - /** - * Issued when new execution context is created. - */ - addListener(event: "Runtime.executionContextCreated", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when execution context is destroyed. - */ - addListener(event: "Runtime.executionContextDestroyed", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when all executionContexts were cleared in browser - */ - addListener(event: "Runtime.executionContextsCleared", listener: () => void): this; - - /** - * Issued when exception was thrown and unhandled. - */ - addListener(event: "Runtime.exceptionThrown", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when unhandled exception was revoked. - */ - addListener(event: "Runtime.exceptionRevoked", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when console API was called. - */ - addListener(event: "Runtime.consoleAPICalled", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when object should be inspected (for example, as a result of inspect() command line API call). - */ - addListener(event: "Runtime.inspectRequested", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. - */ - addListener(event: "Debugger.scriptParsed", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when virtual machine fails to parse the script. - */ - addListener(event: "Debugger.scriptFailedToParse", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when breakpoint is resolved to an actual script and location. - */ - addListener(event: "Debugger.breakpointResolved", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. - */ - addListener(event: "Debugger.paused", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when the virtual machine resumed execution. - */ - addListener(event: "Debugger.resumed", listener: () => void): this; - - /** - * Issued when new console message is added. - */ - addListener(event: "Console.messageAdded", listener: (message: InspectorNotification) => void): this; - - /** - * Sent when new profile recording is started using console.profile() call. - */ - addListener(event: "Profiler.consoleProfileStarted", listener: (message: InspectorNotification) => void): this; - - addListener(event: "Profiler.consoleProfileFinished", listener: (message: InspectorNotification) => void): this; - addListener(event: "HeapProfiler.addHeapSnapshotChunk", listener: (message: InspectorNotification) => void): this; - addListener(event: "HeapProfiler.resetProfiles", listener: () => void): this; - addListener(event: "HeapProfiler.reportHeapSnapshotProgress", listener: (message: InspectorNotification) => void): this; - - /** - * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. - */ - addListener(event: "HeapProfiler.lastSeenObjectId", listener: (message: InspectorNotification) => void): this; - - /** - * If heap objects tracking has been started then backend may send update for one or more fragments - */ - addListener(event: "HeapProfiler.heapStatsUpdate", listener: (message: InspectorNotification) => void): this; - - emit(event: string | symbol, ...args: any[]): boolean; - emit(event: "inspectorNotification", message: InspectorNotification<{}>): boolean; - emit(event: "Runtime.executionContextCreated", message: InspectorNotification): boolean; - emit(event: "Runtime.executionContextDestroyed", message: InspectorNotification): boolean; - emit(event: "Runtime.executionContextsCleared"): boolean; - emit(event: "Runtime.exceptionThrown", message: InspectorNotification): boolean; - emit(event: "Runtime.exceptionRevoked", message: InspectorNotification): boolean; - emit(event: "Runtime.consoleAPICalled", message: InspectorNotification): boolean; - emit(event: "Runtime.inspectRequested", message: InspectorNotification): boolean; - emit(event: "Debugger.scriptParsed", message: InspectorNotification): boolean; - emit(event: "Debugger.scriptFailedToParse", message: InspectorNotification): boolean; - emit(event: "Debugger.breakpointResolved", message: InspectorNotification): boolean; - emit(event: "Debugger.paused", message: InspectorNotification): boolean; - emit(event: "Debugger.resumed"): boolean; - emit(event: "Console.messageAdded", message: InspectorNotification): boolean; - emit(event: "Profiler.consoleProfileStarted", message: InspectorNotification): boolean; - emit(event: "Profiler.consoleProfileFinished", message: InspectorNotification): boolean; - emit(event: "HeapProfiler.addHeapSnapshotChunk", message: InspectorNotification): boolean; - emit(event: "HeapProfiler.resetProfiles"): boolean; - emit(event: "HeapProfiler.reportHeapSnapshotProgress", message: InspectorNotification): boolean; - emit(event: "HeapProfiler.lastSeenObjectId", message: InspectorNotification): boolean; - emit(event: "HeapProfiler.heapStatsUpdate", message: InspectorNotification): boolean; - - on(event: string, listener: (...args: any[]) => void): this; - - /** - * Emitted when any notification from the V8 Inspector is received. - */ - on(event: "inspectorNotification", listener: (message: InspectorNotification<{}>) => void): this; - - /** - * Issued when new execution context is created. - */ - on(event: "Runtime.executionContextCreated", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when execution context is destroyed. - */ - on(event: "Runtime.executionContextDestroyed", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when all executionContexts were cleared in browser - */ - on(event: "Runtime.executionContextsCleared", listener: () => void): this; - - /** - * Issued when exception was thrown and unhandled. - */ - on(event: "Runtime.exceptionThrown", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when unhandled exception was revoked. - */ - on(event: "Runtime.exceptionRevoked", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when console API was called. - */ - on(event: "Runtime.consoleAPICalled", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when object should be inspected (for example, as a result of inspect() command line API call). - */ - on(event: "Runtime.inspectRequested", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. - */ - on(event: "Debugger.scriptParsed", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when virtual machine fails to parse the script. - */ - on(event: "Debugger.scriptFailedToParse", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when breakpoint is resolved to an actual script and location. - */ - on(event: "Debugger.breakpointResolved", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. - */ - on(event: "Debugger.paused", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when the virtual machine resumed execution. - */ - on(event: "Debugger.resumed", listener: () => void): this; - - /** - * Issued when new console message is added. - */ - on(event: "Console.messageAdded", listener: (message: InspectorNotification) => void): this; - - /** - * Sent when new profile recording is started using console.profile() call. - */ - on(event: "Profiler.consoleProfileStarted", listener: (message: InspectorNotification) => void): this; - - on(event: "Profiler.consoleProfileFinished", listener: (message: InspectorNotification) => void): this; - on(event: "HeapProfiler.addHeapSnapshotChunk", listener: (message: InspectorNotification) => void): this; - on(event: "HeapProfiler.resetProfiles", listener: () => void): this; - on(event: "HeapProfiler.reportHeapSnapshotProgress", listener: (message: InspectorNotification) => void): this; - - /** - * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. - */ - on(event: "HeapProfiler.lastSeenObjectId", listener: (message: InspectorNotification) => void): this; - - /** - * If heap objects tracking has been started then backend may send update for one or more fragments - */ - on(event: "HeapProfiler.heapStatsUpdate", listener: (message: InspectorNotification) => void): this; - - once(event: string, listener: (...args: any[]) => void): this; - - /** - * Emitted when any notification from the V8 Inspector is received. - */ - once(event: "inspectorNotification", listener: (message: InspectorNotification<{}>) => void): this; - - /** - * Issued when new execution context is created. - */ - once(event: "Runtime.executionContextCreated", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when execution context is destroyed. - */ - once(event: "Runtime.executionContextDestroyed", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when all executionContexts were cleared in browser - */ - once(event: "Runtime.executionContextsCleared", listener: () => void): this; - - /** - * Issued when exception was thrown and unhandled. - */ - once(event: "Runtime.exceptionThrown", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when unhandled exception was revoked. - */ - once(event: "Runtime.exceptionRevoked", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when console API was called. - */ - once(event: "Runtime.consoleAPICalled", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when object should be inspected (for example, as a result of inspect() command line API call). - */ - once(event: "Runtime.inspectRequested", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. - */ - once(event: "Debugger.scriptParsed", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when virtual machine fails to parse the script. - */ - once(event: "Debugger.scriptFailedToParse", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when breakpoint is resolved to an actual script and location. - */ - once(event: "Debugger.breakpointResolved", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. - */ - once(event: "Debugger.paused", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when the virtual machine resumed execution. - */ - once(event: "Debugger.resumed", listener: () => void): this; - - /** - * Issued when new console message is added. - */ - once(event: "Console.messageAdded", listener: (message: InspectorNotification) => void): this; - - /** - * Sent when new profile recording is started using console.profile() call. - */ - once(event: "Profiler.consoleProfileStarted", listener: (message: InspectorNotification) => void): this; - - once(event: "Profiler.consoleProfileFinished", listener: (message: InspectorNotification) => void): this; - once(event: "HeapProfiler.addHeapSnapshotChunk", listener: (message: InspectorNotification) => void): this; - once(event: "HeapProfiler.resetProfiles", listener: () => void): this; - once(event: "HeapProfiler.reportHeapSnapshotProgress", listener: (message: InspectorNotification) => void): this; - - /** - * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. - */ - once(event: "HeapProfiler.lastSeenObjectId", listener: (message: InspectorNotification) => void): this; - - /** - * If heap objects tracking has been started then backend may send update for one or more fragments - */ - once(event: "HeapProfiler.heapStatsUpdate", listener: (message: InspectorNotification) => void): this; - - prependListener(event: string, listener: (...args: any[]) => void): this; - - /** - * Emitted when any notification from the V8 Inspector is received. - */ - prependListener(event: "inspectorNotification", listener: (message: InspectorNotification<{}>) => void): this; - - /** - * Issued when new execution context is created. - */ - prependListener(event: "Runtime.executionContextCreated", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when execution context is destroyed. - */ - prependListener(event: "Runtime.executionContextDestroyed", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when all executionContexts were cleared in browser - */ - prependListener(event: "Runtime.executionContextsCleared", listener: () => void): this; - - /** - * Issued when exception was thrown and unhandled. - */ - prependListener(event: "Runtime.exceptionThrown", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when unhandled exception was revoked. - */ - prependListener(event: "Runtime.exceptionRevoked", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when console API was called. - */ - prependListener(event: "Runtime.consoleAPICalled", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when object should be inspected (for example, as a result of inspect() command line API call). - */ - prependListener(event: "Runtime.inspectRequested", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. - */ - prependListener(event: "Debugger.scriptParsed", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when virtual machine fails to parse the script. - */ - prependListener(event: "Debugger.scriptFailedToParse", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when breakpoint is resolved to an actual script and location. - */ - prependListener(event: "Debugger.breakpointResolved", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. - */ - prependListener(event: "Debugger.paused", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when the virtual machine resumed execution. - */ - prependListener(event: "Debugger.resumed", listener: () => void): this; - - /** - * Issued when new console message is added. - */ - prependListener(event: "Console.messageAdded", listener: (message: InspectorNotification) => void): this; - - /** - * Sent when new profile recording is started using console.profile() call. - */ - prependListener(event: "Profiler.consoleProfileStarted", listener: (message: InspectorNotification) => void): this; - - prependListener(event: "Profiler.consoleProfileFinished", listener: (message: InspectorNotification) => void): this; - prependListener(event: "HeapProfiler.addHeapSnapshotChunk", listener: (message: InspectorNotification) => void): this; - prependListener(event: "HeapProfiler.resetProfiles", listener: () => void): this; - prependListener(event: "HeapProfiler.reportHeapSnapshotProgress", listener: (message: InspectorNotification) => void): this; - - /** - * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. - */ - prependListener(event: "HeapProfiler.lastSeenObjectId", listener: (message: InspectorNotification) => void): this; - - /** - * If heap objects tracking has been started then backend may send update for one or more fragments - */ - prependListener(event: "HeapProfiler.heapStatsUpdate", listener: (message: InspectorNotification) => void): this; - - prependOnceListener(event: string, listener: (...args: any[]) => void): this; - - /** - * Emitted when any notification from the V8 Inspector is received. - */ - prependOnceListener(event: "inspectorNotification", listener: (message: InspectorNotification<{}>) => void): this; - - /** - * Issued when new execution context is created. - */ - prependOnceListener(event: "Runtime.executionContextCreated", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when execution context is destroyed. - */ - prependOnceListener(event: "Runtime.executionContextDestroyed", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when all executionContexts were cleared in browser - */ - prependOnceListener(event: "Runtime.executionContextsCleared", listener: () => void): this; - - /** - * Issued when exception was thrown and unhandled. - */ - prependOnceListener(event: "Runtime.exceptionThrown", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when unhandled exception was revoked. - */ - prependOnceListener(event: "Runtime.exceptionRevoked", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when console API was called. - */ - prependOnceListener(event: "Runtime.consoleAPICalled", listener: (message: InspectorNotification) => void): this; - - /** - * Issued when object should be inspected (for example, as a result of inspect() command line API call). - */ - prependOnceListener(event: "Runtime.inspectRequested", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. - */ - prependOnceListener(event: "Debugger.scriptParsed", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when virtual machine fails to parse the script. - */ - prependOnceListener(event: "Debugger.scriptFailedToParse", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when breakpoint is resolved to an actual script and location. - */ - prependOnceListener(event: "Debugger.breakpointResolved", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. - */ - prependOnceListener(event: "Debugger.paused", listener: (message: InspectorNotification) => void): this; - - /** - * Fired when the virtual machine resumed execution. - */ - prependOnceListener(event: "Debugger.resumed", listener: () => void): this; - - /** - * Issued when new console message is added. - */ - prependOnceListener(event: "Console.messageAdded", listener: (message: InspectorNotification) => void): this; - - /** - * Sent when new profile recording is started using console.profile() call. - */ - prependOnceListener(event: "Profiler.consoleProfileStarted", listener: (message: InspectorNotification) => void): this; - - prependOnceListener(event: "Profiler.consoleProfileFinished", listener: (message: InspectorNotification) => void): this; - prependOnceListener(event: "HeapProfiler.addHeapSnapshotChunk", listener: (message: InspectorNotification) => void): this; - prependOnceListener(event: "HeapProfiler.resetProfiles", listener: () => void): this; - prependOnceListener(event: "HeapProfiler.reportHeapSnapshotProgress", listener: (message: InspectorNotification) => void): this; - - /** - * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. - */ - prependOnceListener(event: "HeapProfiler.lastSeenObjectId", listener: (message: InspectorNotification) => void): this; - - /** - * If heap objects tracking has been started then backend may send update for one or more fragments - */ - prependOnceListener(event: "HeapProfiler.heapStatsUpdate", listener: (message: InspectorNotification) => void): this; - } - - // Top Level API - - /** - * Activate inspector on host and port. Equivalent to node --inspect=[[host:]port], but can be done programatically after node has started. - * If wait is true, will block until a client has connected to the inspect port and flow control has been passed to the debugger client. - * @param port Port to listen on for inspector connections. Optional, defaults to what was specified on the CLI. - * @param host Host to listen on for inspector connections. Optional, defaults to what was specified on the CLI. - * @param wait Block until a client has connected. Optional, defaults to false. - */ - export function open(port?: number, host?: string, wait?: boolean): void; - - /** - * Deactivate the inspector. Blocks until there are no active connections. - */ - export function close(): void; - - /** - * Return the URL of the active inspector, or undefined if there is none. - */ - export function url(): string; -} diff --git a/node_modules/@types/node/package.json b/node_modules/@types/node/package.json deleted file mode 100644 index 4e98d7b411cff..0000000000000 --- a/node_modules/@types/node/package.json +++ /dev/null @@ -1,156 +0,0 @@ -{ - "_from": "@types/node@*", - "_id": "@types/node@10.7.0", - "_inBundle": false, - "_integrity": "sha512-dmYIvoQEZWnyQfgrwPCoxztv/93NYQGEiOoQhuI56rJahv9de6Q2apZl3bufV46YJ0OAXdaktIuw4RIRl4DTeA==", - "_location": "/@types/node", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "@types/node@*", - "name": "@types/node", - "escapedName": "@types%2fnode", - "scope": "@types", - "rawSpec": "*", - "saveSpec": null, - "fetchSpec": "*" - }, - "_requiredBy": [ - "/@types/form-data", - "/@types/request" - ], - "_resolved": "https://registry.npmjs.org/@types/node/-/node-10.7.0.tgz", - "_shasum": "d384b2c8625414ab2aa18fdf989c288d6a7a8202", - "_spec": "@types/node@*", - "_where": "/Users/zkat/Documents/code/work/npm/node_modules/@types/request", - "bugs": { - "url": "https://github.com/DefinitelyTyped/DefinitelyTyped/issues" - }, - "bundleDependencies": false, - "contributors": [ - { - "name": "Microsoft TypeScript", - "url": "http://typescriptlang.org" - }, - { - "name": "DefinitelyTyped", - "url": "https://github.com/DefinitelyTyped/DefinitelyTyped" - }, - { - "name": "Parambir Singh", - "url": "https://github.com/parambirs" - }, - { - "name": "Christian Vaagland Tellnes", - "url": "https://github.com/tellnes" - }, - { - "name": "Wilco Bakker", - "url": "https://github.com/WilcoBakker" - }, - { - "name": "Nicolas Voigt", - "url": "https://github.com/octo-sniffle" - }, - { - "name": "Chigozirim C.", - "url": "https://github.com/smac89" - }, - { - "name": "Flarna", - "url": "https://github.com/Flarna" - }, - { - "name": "Mariusz Wiktorczyk", - "url": "https://github.com/mwiktorczyk" - }, - { - "name": "wwwy3y3", - "url": "https://github.com/wwwy3y3" - }, - { - "name": "Deividas Bakanas", - "url": "https://github.com/DeividasBakanas" - }, - { - "name": "Kelvin Jin", - "url": "https://github.com/kjin" - }, - { - "name": "Alvis HT Tang", - "url": "https://github.com/alvis" - }, - { - "name": "Sebastian Silbermann", - "url": "https://github.com/eps1lon" - }, - { - "name": "Hannes Magnusson", - "url": "https://github.com/Hannes-Magnusson-CK" - }, - { - "name": "Alberto Schiabel", - "url": "https://github.com/jkomyno" - }, - { - "name": "Klaus Meinhardt", - "url": "https://github.com/ajafff" - }, - { - "name": "Huw", - "url": "https://github.com/hoo29" - }, - { - "name": "Nicolas Even", - "url": "https://github.com/n-e" - }, - { - "name": "Bruno Scheufler", - "url": "https://github.com/brunoscheufler" - }, - { - "name": "Mohsen Azimi", - "url": "https://github.com/mohsen1" - }, - { - "name": "Hoàng Văn Khải", - "url": "https://github.com/KSXGitHub" - }, - { - "name": "Alexander T.", - "url": "https://github.com/a-tarasyuk" - }, - { - "name": "Lishude", - "url": "https://github.com/islishude" - }, - { - "name": "Andrew Makarov", - "url": "https://github.com/r3nya" - }, - { - "name": "Zane Hannan AU", - "url": "https://github.com/ZaneHannanAU" - }, - { - "name": "Eugene Y. Q. Shen", - "url": "https://github.com/eyqs" - } - ], - "dependencies": {}, - "deprecated": false, - "description": "TypeScript definitions for Node.js", - "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped#readme", - "license": "MIT", - "main": "", - "name": "@types/node", - "repository": { - "type": "git", - "url": "git+https://github.com/DefinitelyTyped/DefinitelyTyped.git" - }, - "scripts": {}, - "typeScriptVersion": "2.0", - "typesPublisherContentHash": "2bbf8205320aa5fb12aaaa8dab46553da05dde879dcbf3276c12a0af32e104a2", - "version": "10.7.0" -} diff --git a/node_modules/@types/request/LICENSE b/node_modules/@types/request/LICENSE deleted file mode 100644 index 21071075c2459..0000000000000 --- a/node_modules/@types/request/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ - MIT License - - Copyright (c) Microsoft Corporation. All rights reserved. - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE diff --git a/node_modules/@types/request/README.md b/node_modules/@types/request/README.md deleted file mode 100644 index bbf6b9da6fe86..0000000000000 --- a/node_modules/@types/request/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# Installation -> `npm install --save @types/request` - -# Summary -This package contains type definitions for request (https://github.com/request/request). - -# Details -Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/request - -Additional Details - * Last updated: Fri, 15 Jun 2018 21:26:44 GMT - * Dependencies: caseless, stream, http, https, fs, form-data, net, tough-cookie, url, node - * Global values: none - -# Credits -These definitions were written by Carlos Ballesteros Velasco , bonnici , Bart van der Schoor , Joe Skeen , Christopher Currens , Jon Stevens , Matt R. Wilson , Jose Colella . diff --git a/node_modules/@types/request/index.d.ts b/node_modules/@types/request/index.d.ts deleted file mode 100644 index 60e24b7692a8c..0000000000000 --- a/node_modules/@types/request/index.d.ts +++ /dev/null @@ -1,395 +0,0 @@ -// Type definitions for request 2.47 -// Project: https://github.com/request/request -// Definitions by: Carlos Ballesteros Velasco , -// bonnici , -// Bart van der Schoor , -// Joe Skeen , -// Christopher Currens , -// Jon Stevens , -// Matt R. Wilson -// Jose Colella -// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped -// TypeScript Version: 2.3 - -// Imported from: https://github.com/soywiz/typescript-node-definitions/d.ts - -/// - -import caseless = require('caseless'); -import stream = require('stream'); -import http = require('http'); -import https = require('https'); -import fs = require('fs'); -import FormData = require('form-data'); -import net = require('net'); -import tough = require('tough-cookie'); -import { Url } from 'url'; - -declare namespace request { - interface RequestAPI { - defaults(options: TOptions): RequestAPI; - defaults(options: RequiredUriUrl & TOptions): DefaultUriUrlRequestApi; - - (uri: string, options?: TOptions, callback?: RequestCallback): TRequest; - (uri: string, callback?: RequestCallback): TRequest; - (options: TUriUrlOptions & TOptions, callback?: RequestCallback): TRequest; - - get(uri: string, options?: TOptions, callback?: RequestCallback): TRequest; - get(uri: string, callback?: RequestCallback): TRequest; - get(options: TUriUrlOptions & TOptions, callback?: RequestCallback): TRequest; - - post(uri: string, options?: TOptions, callback?: RequestCallback): TRequest; - post(uri: string, callback?: RequestCallback): TRequest; - post(options: TUriUrlOptions & TOptions, callback?: RequestCallback): TRequest; - - put(uri: string, options?: TOptions, callback?: RequestCallback): TRequest; - put(uri: string, callback?: RequestCallback): TRequest; - put(options: TUriUrlOptions & TOptions, callback?: RequestCallback): TRequest; - - head(uri: string, options?: TOptions, callback?: RequestCallback): TRequest; - head(uri: string, callback?: RequestCallback): TRequest; - head(options: TUriUrlOptions & TOptions, callback?: RequestCallback): TRequest; - - patch(uri: string, options?: TOptions, callback?: RequestCallback): TRequest; - patch(uri: string, callback?: RequestCallback): TRequest; - patch(options: TUriUrlOptions & TOptions, callback?: RequestCallback): TRequest; - - del(uri: string, options?: TOptions, callback?: RequestCallback): TRequest; - del(uri: string, callback?: RequestCallback): TRequest; - del(options: TUriUrlOptions & TOptions, callback?: RequestCallback): TRequest; - - delete(uri: string, options?: TOptions, callback?: RequestCallback): TRequest; - delete(uri: string, callback?: RequestCallback): TRequest; - delete(options: TUriUrlOptions & TOptions, callback?: RequestCallback): TRequest; - - initParams(uri: string, options?: TOptions, callback?: RequestCallback): RequiredUriUrl & TOptions; - initParams(uriOrOpts: string | RequiredUriUrl & TOptions, callback?: RequestCallback): RequiredUriUrl & TOptions; - - forever(agentOptions: any, optionsArg: any): TRequest; - jar(store?: any): CookieJar; - cookie(str: string): Cookie | undefined; - - debug: boolean; - } - - interface DefaultUriUrlRequestApi extends RequestAPI { - defaults(options: TOptions): DefaultUriUrlRequestApi; - (callback?: RequestCallback): TRequest; - - get(uri: string, options?: TOptions, callback?: RequestCallback): TRequest; - get(uri: string, callback?: RequestCallback): TRequest; - get(options: TUriUrlOptions & TOptions, callback?: RequestCallback): TRequest; - get(callback?: RequestCallback): TRequest; - - post(uri: string, options?: TOptions, callback?: RequestCallback): TRequest; - post(uri: string, callback?: RequestCallback): TRequest; - post(options: TUriUrlOptions & TOptions, callback?: RequestCallback): TRequest; - post(callback?: RequestCallback): TRequest; - - put(uri: string, options?: TOptions, callback?: RequestCallback): TRequest; - put(uri: string, callback?: RequestCallback): TRequest; - put(options: TUriUrlOptions & TOptions, callback?: RequestCallback): TRequest; - put(callback?: RequestCallback): TRequest; - - head(uri: string, options?: TOptions, callback?: RequestCallback): TRequest; - head(uri: string, callback?: RequestCallback): TRequest; - head(options: TUriUrlOptions & TOptions, callback?: RequestCallback): TRequest; - head(callback?: RequestCallback): TRequest; - - patch(uri: string, options?: TOptions, callback?: RequestCallback): TRequest; - patch(uri: string, callback?: RequestCallback): TRequest; - patch(options: TUriUrlOptions & TOptions, callback?: RequestCallback): TRequest; - patch(callback?: RequestCallback): TRequest; - - del(uri: string, options?: TOptions, callback?: RequestCallback): TRequest; - del(uri: string, callback?: RequestCallback): TRequest; - del(options: TUriUrlOptions & TOptions, callback?: RequestCallback): TRequest; - del(callback?: RequestCallback): TRequest; - - delete(uri: string, options?: TOptions, callback?: RequestCallback): TRequest; - delete(uri: string, callback?: RequestCallback): TRequest; - delete(options: TUriUrlOptions & TOptions, callback?: RequestCallback): TRequest; - delete(callback?: RequestCallback): TRequest; - } - - interface CoreOptions { - baseUrl?: string; - callback?: RequestCallback; - jar?: CookieJar | boolean; - formData?: { [key: string]: any }; - form?: { [key: string]: any } | string; - auth?: AuthOptions; - oauth?: OAuthOptions; - aws?: AWSOptions; - hawk?: HawkOptions; - qs?: any; - qsStringifyOptions?: any; - qsParseOptions?: any; - json?: any; - jsonReviver?: (key: string, value: any) => any; - jsonReplacer?: (key: string, value: any) => any; - multipart?: RequestPart[] | Multipart; - agent?: http.Agent | https.Agent; - agentOptions?: any; - agentClass?: any; - forever?: any; - host?: string; - port?: number; - method?: string; - headers?: Headers; - body?: any; - family?: 4 | 6; - followRedirect?: boolean | ((response: http.IncomingMessage) => boolean); - followAllRedirects?: boolean; - followOriginalHttpMethod?: boolean; - maxRedirects?: number; - removeRefererHeader?: boolean; - encoding?: string | null; - pool?: any; - timeout?: number; - localAddress?: string; - proxy?: any; - tunnel?: boolean; - strictSSL?: boolean; - rejectUnauthorized?: boolean; - time?: boolean; - gzip?: boolean; - preambleCRLF?: boolean; - postambleCRLF?: boolean; - withCredentials?: boolean; - key?: Buffer; - cert?: Buffer; - passphrase?: string; - ca?: string | Buffer | string[] | Buffer[]; - har?: HttpArchiveRequest; - useQuerystring?: boolean; - } - - interface UriOptions { - uri: string | Url; - } - interface UrlOptions { - url: string | Url; - } - type RequiredUriUrl = UriOptions | UrlOptions; - - type OptionalUriUrl = RequiredUriUrl | {}; - - type OptionsWithUri = UriOptions & CoreOptions; - type OptionsWithUrl = UrlOptions & CoreOptions; - type Options = OptionsWithUri | OptionsWithUrl; - - type RequestCallback = (error: any, response: Response, body: any) => void; - - interface HttpArchiveRequest { - url?: string; - method?: string; - headers?: NameValuePair[]; - postData?: { - mimeType?: string; - params?: NameValuePair[]; - }; - } - - interface NameValuePair { - name: string; - value: string; - } - - interface Multipart { - chunked?: boolean; - data?: Array<{ - 'content-type'?: string, - body: string - }>; - } - - interface RequestPart { - headers?: Headers; - body: any; - } - - interface Request extends caseless.Httpified, stream.Stream { - readable: boolean; - writable: boolean; - explicitMethod?: true; - - debug(...args: any[]): void; - pipeDest(dest: any): void; - qs(q: object, clobber?: boolean): Request; - form(): FormData; - form(form: any): Request; - multipart(multipart: RequestPart[]): Request; - json(val: any): Request; - aws(opts: AWSOptions, now?: boolean): Request; - hawk(opts: HawkOptions): void; - auth(username: string, password: string, sendImmediately?: boolean, bearer?: string): Request; - oauth(oauth: OAuthOptions): Request; - jar(jar: CookieJar): Request; - - on(event: string, listener: (...args: any[]) => void): this; - on(event: 'request', listener: (req: http.ClientRequest) => void): this; - on(event: 'response', listener: (resp: Response) => void): this; - on(event: 'data', listener: (data: Buffer | string) => void): this; - on(event: 'error', listener: (e: Error) => void): this; - on(event: 'complete', listener: (resp: Response, body?: string | Buffer) => void): this; - on(event: 'pipe', listener: (src: stream.Readable) => void): this; - on(event: 'socket', listener: (src: net.Socket) => void): this; - - write(buffer: Buffer | string, cb?: (err?: Error) => void): boolean; - write(str: string, encoding?: string, cb?: (err?: Error) => void): boolean; - end(cb?: () => void): void; - end(chunk: string | Buffer, cb?: () => void): void; - end(str: string, encoding?: string, cb?: () => void): void; - - pause(): void; - resume(): void; - abort(): void; - destroy(): void; - toJSON(): RequestAsJSON; - - // several of the CoreOptions are copied onto the request instance - host?: string; - port?: number; - followAllRedirects?: boolean; - followOriginalHttpMethod?: boolean; - maxRedirects?: number; - removeRefererHeader?: boolean; - encoding?: string | null; - timeout?: number; - localAddress?: string; - strictSSL?: boolean; - rejectUnauthorized?: boolean; - time?: boolean; - gzip?: boolean; - preambleCRLF?: boolean; - postambleCRLF?: boolean; - withCredentials?: boolean; - key?: Buffer; - cert?: Buffer; - passphrase?: string; - ca?: string | Buffer | string[] | Buffer[]; - har?: HttpArchiveRequest; - - // set in `Request.prototype.init` - headers: Headers; - method: string; - pool: false | { [key: string]: http.Agent | https.Agent }; - dests: stream.Readable[]; - callback?: RequestCallback; - uri: Url & { href: string, pathname: string }; - proxy: null | string | Url; - tunnel: boolean; - setHost: boolean; - path: string; - agent: false | http.Agent | https.Agent; - body: Buffer | Buffer[] | string | string[] | stream.Readable; - timing?: boolean; - src?: stream.Readable; - - // set in `Request.prototype.start` - href: string; - startTime?: number; - startTimeNow?: number; - timings?: { - socket: number; - lookup: number; - connect: number; - response: number; - end: number; - }; - - // set in `Request.prototype.onRequestResponse` - elapsedTime?: number; - response?: Response; - } - - interface Response extends http.IncomingMessage { - statusCode: number; - statusMessage: string; - request: Request; - body: any; // Buffer, string, stream.Readable, or a plain object if `json` was truthy - caseless: caseless.Caseless; // case-insensitive access to headers - toJSON(): ResponseAsJSON; - - timingStart?: number; - elapsedTime?: number; - timings?: { - socket: number; - lookup: number; - connect: number; - response: number; - end: number; - }; - timingPhases?: { - wait: number; - dns: number; - tcp: number; - firstByte: number; - download: number; - total: number; - }; - } - - // aliases for backwards compatibility - type ResponseRequest = Request; - type RequestResponse = Response; - - interface Headers { - [key: string]: any; - } - - interface AuthOptions { - user?: string; - username?: string; - pass?: string; - password?: string; - sendImmediately?: boolean; - bearer?: string | (() => string); - } - - interface OAuthOptions { - callback?: string; - consumer_key?: string; - consumer_secret?: string; - token?: string; - token_secret?: string; - transport_method?: 'body' | 'header' | 'query'; - verifier?: string; - body_hash?: true | string; - } - - interface HawkOptions { - credentials: any; - } - - interface AWSOptions { - secret: string; - bucket?: string; - } - - interface RequestAsJSON { - uri: Url; - method: string; - headers: Headers; - } - - interface ResponseAsJSON { - statusCode: number; - body: any; - headers: Headers; - request: RequestAsJSON; - } - - type Cookie = tough.Cookie; - - interface CookieJar { - setCookie(cookieOrStr: Cookie | string, uri: string | Url, options?: tough.CookieJar.SetCookieOptions): void; - getCookieString(uri: string | Url): string; - getCookies(uri: string | Url): Cookie[]; - } -} -declare var request: request.RequestAPI; -export = request; diff --git a/node_modules/@types/request/package.json b/node_modules/@types/request/package.json deleted file mode 100644 index f9dd90eb0f92d..0000000000000 --- a/node_modules/@types/request/package.json +++ /dev/null @@ -1,84 +0,0 @@ -{ - "_from": "@types/request@^2.47.1", - "_id": "@types/request@2.47.1", - "_inBundle": false, - "_integrity": "sha512-TV3XLvDjQbIeVxJ1Z3oCTDk/KuYwwcNKVwz2YaT0F5u86Prgc4syDAp6P96rkTQQ4bIdh+VswQIC9zS6NjY7/g==", - "_location": "/@types/request", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "@types/request@^2.47.1", - "name": "@types/request", - "escapedName": "@types%2frequest", - "scope": "@types", - "rawSpec": "^2.47.1", - "saveSpec": null, - "fetchSpec": "^2.47.1" - }, - "_requiredBy": [ - "/nano" - ], - "_resolved": "https://registry.npmjs.org/@types/request/-/request-2.47.1.tgz", - "_shasum": "25410d3afbdac04c91a94ad9efc9824100735824", - "_spec": "@types/request@^2.47.1", - "_where": "/Users/zkat/Documents/code/work/npm/node_modules/nano", - "bugs": { - "url": "https://github.com/DefinitelyTyped/DefinitelyTyped/issues" - }, - "bundleDependencies": false, - "contributors": [ - { - "name": "Carlos Ballesteros Velasco", - "url": "https://github.com/soywiz" - }, - { - "name": "bonnici", - "url": "https://github.com/bonnici" - }, - { - "name": "Bart van der Schoor", - "url": "https://github.com/Bartvds" - }, - { - "name": "Joe Skeen", - "url": "https://github.com/joeskeen" - }, - { - "name": "Christopher Currens", - "url": "https://github.com/ccurrens" - }, - { - "name": "Jon Stevens", - "url": "https://github.com/lookfirst" - }, - { - "name": "Matt R. Wilson", - "url": "https://github.com/mastermatt" - }, - { - "name": "Jose Colella", - "url": "https://github.com/josecolella" - } - ], - "dependencies": { - "@types/caseless": "*", - "@types/form-data": "*", - "@types/node": "*", - "@types/tough-cookie": "*" - }, - "deprecated": false, - "description": "TypeScript definitions for request", - "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped#readme", - "license": "MIT", - "main": "", - "name": "@types/request", - "repository": { - "type": "git", - "url": "git+https://github.com/DefinitelyTyped/DefinitelyTyped.git" - }, - "scripts": {}, - "typeScriptVersion": "2.3", - "typesPublisherContentHash": "5b1574ed6feec1a1a58d64c2fd2eddab01d7594c6209159ef0ac92ed906e67b9", - "version": "2.47.1" -} diff --git a/node_modules/@types/tough-cookie/LICENSE b/node_modules/@types/tough-cookie/LICENSE deleted file mode 100644 index 21071075c2459..0000000000000 --- a/node_modules/@types/tough-cookie/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ - MIT License - - Copyright (c) Microsoft Corporation. All rights reserved. - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE diff --git a/node_modules/@types/tough-cookie/README.md b/node_modules/@types/tough-cookie/README.md deleted file mode 100644 index d01fca3928ff7..0000000000000 --- a/node_modules/@types/tough-cookie/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# Installation -> `npm install --save @types/tough-cookie` - -# Summary -This package contains type definitions for tough-cookie (https://github.com/salesforce/tough-cookie). - -# Details -Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped.git/tree/master/types/tough-cookie - -Additional Details - * Last updated: Tue, 08 May 2018 22:09:43 GMT - * Dependencies: none - * Global values: none - -# Credits -These definitions were written by Leonard Thieu , LiJinyao , Michael Wei . diff --git a/node_modules/@types/tough-cookie/index.d.ts b/node_modules/@types/tough-cookie/index.d.ts deleted file mode 100644 index d4e9a585489ea..0000000000000 --- a/node_modules/@types/tough-cookie/index.d.ts +++ /dev/null @@ -1,244 +0,0 @@ -// Type definitions for tough-cookie 2.3 -// Project: https://github.com/salesforce/tough-cookie -// Definitions by: Leonard Thieu -// LiJinyao -// Michael Wei -// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped -// TypeScript Version: 2.2 - -/** - * Parse a cookie date string into a Date. - * Parses according to RFC6265 Section 5.1.1, not Date.parse(). - */ -export function parseDate(string: string): Date; - -/** - * Format a Date into a RFC1123 string (the RFC6265-recommended format). - */ -export function formatDate(date: Date): string; - -/** - * Transforms a domain-name into a canonical domain-name. - * The canonical domain-name is a trimmed, lowercased, stripped-of-leading-dot - * and optionally punycode-encoded domain-name (Section 5.1.2 of RFC6265). - * For the most part, this function is idempotent (can be run again on its output without ill effects). - */ -export function canonicalDomain(str: string): string; - -/** - * Answers "does this real domain match the domain in a cookie?". - * The str is the "current" domain-name and the domStr is the "cookie" domain-name. - * Matches according to RFC6265 Section 5.1.3, but it helps to think of it as a "suffix match". - * - * The canonicalize parameter will run the other two paramters through canonicalDomain or not. - */ -export function domainMatch(str: string, domStr: string, canonicalize?: boolean): boolean; - -/** - * Given a current request/response path, gives the Path apropriate for storing in a cookie. - * This is basically the "directory" of a "file" in the path, but is specified by Section 5.1.4 of the RFC. - * - * The path parameter MUST be only the pathname part of a URI (i.e. excludes the hostname, query, fragment, etc.). - * This is the .pathname property of node's uri.parse() output. - */ -export function defaultPath(path: string): string; - -/** - * Answers "does the request-path path-match a given cookie-path?" as per RFC6265 Section 5.1.4. - * Returns a boolean. - * - * This is essentially a prefix-match where cookiePath is a prefix of reqPath. - */ -export function pathMatch(reqPath: string, cookiePath: string): boolean; - -/** - * alias for Cookie.fromJSON(string) - */ -export function fromJSON(string: string): Cookie; - -export function getPublicSuffix(hostname: string): string | null; - -export function cookieCompare(a: Cookie, b: Cookie): number; - -export function permuteDomain(domain: string): string[]; - -export function permutePath(path: string): string[]; - -// region Cookie - -export class Cookie { - static parse(cookieString: string, options?: Cookie.ParseOptions): Cookie | undefined; - - static fromJSON(strOrObj: string | object): Cookie | null; - - constructor(properties?: Cookie.Properties); - - // TODO: Some of the following properties might actually be nullable. - - key: string; - value: string; - expires: Date; - maxAge: number | 'Infinity' | '-Infinity'; - domain: string; - path: string; - secure: boolean; - httpOnly: boolean; - extensions: string[]; - creation: Date; - creationIndex: number; - - hostOnly: boolean | null; - pathIsDefault: boolean | null; - lastAccessed: Date | null; - - toString(): string; - - cookieString(): string; - - setExpires(String: string): void; - - setMaxAge(number: number): void; - - expiryTime(now?: number): number | typeof Infinity; - - expiryDate(now?: number): Date; - - TTL(now?: Date): number | typeof Infinity; - - canonicalizedDomain(): string; - - cdomain(): string; - - toJSON(): { [key: string]: any; }; - - clone(): Cookie; - - validate(): boolean | string; -} - -export namespace Cookie { - interface ParseOptions { - loose?: boolean; - } - - interface Properties { - key?: string; - value?: string; - expires?: Date; - maxAge?: number | 'Infinity' | '-Infinity'; - domain?: string; - path?: string; - secure?: boolean; - httpOnly?: boolean; - extensions?: string[]; - creation?: Date; - creationIndex?: number; - - hostOnly?: boolean; - pathIsDefault?: boolean; - lastAccessed?: Date; - } - - interface Serialized { - [key: string]: any; - } -} - -// endregion - -// region CookieJar - -export class CookieJar { - static deserialize(serialized: CookieJar.Serialized | string, store: Store, cb: (err: Error | null, object: CookieJar) => void): void; - static deserialize(serialized: CookieJar.Serialized | string, cb: (err: Error | null, object: CookieJar) => void): void; - - static deserializeSync(serialized: CookieJar.Serialized | string, store?: Store): CookieJar; - - static fromJSON(string: string): CookieJar; - - constructor(store?: Store, options?: CookieJar.Options); - - setCookie(cookieOrString: Cookie | string, currentUrl: string, options: CookieJar.SetCookieOptions, cb: (err: Error | null, cookie: Cookie) => void): void; - setCookie(cookieOrString: Cookie | string, currentUrl: string, cb: (err: Error, cookie: Cookie) => void): void; - - setCookieSync(cookieOrString: Cookie | string, currentUrl: string, options?: CookieJar.SetCookieOptions): void; - - getCookies(currentUrl: string, options: CookieJar.GetCookiesOptions, cb: (err: Error | null, cookies: Cookie[]) => void): void; - getCookies(currentUrl: string, cb: (err: Error | null, cookies: Cookie[]) => void): void; - - getCookiesSync(currentUrl: string, options?: CookieJar.GetCookiesOptions): Cookie[]; - - getCookieString(currentUrl: string, options: CookieJar.GetCookiesOptions, cb: (err: Error | null, cookies: string) => void): void; - getCookieString(currentUrl: string, cb: (err: Error | null, cookies: string) => void): void; - - getCookieStringSync(currentUrl: string, options?: CookieJar.GetCookiesOptions): string; - - getSetCookieStrings(currentUrl: string, options: CookieJar.GetCookiesOptions, cb: (err: Error | null, cookies: string) => void): void; - getSetCookieStrings(currentUrl: string, cb: (err: Error | null, cookies: string) => void): void; - - getSetCookieStringsSync(currentUrl: string, options?: CookieJar.GetCookiesOptions): string; - - serialize(cb: (err: Error | null, serializedObject: CookieJar.Serialized) => void): void; - - serializeSync(): CookieJar.Serialized; - - toJSON(): CookieJar.Serialized; - - clone(store: Store, cb: (err: Error | null, newJar: CookieJar) => void): void; - clone(cb: (err: Error | null, newJar: CookieJar) => void): void; - - cloneSync(store: Store): CookieJar; -} - -export namespace CookieJar { - interface Options { - rejectPublicSuffixes?: boolean; - looseMode?: boolean; - } - - interface SetCookieOptions { - http?: boolean; - secure?: boolean; - now?: Date; - ignoreError?: boolean; - } - - interface GetCookiesOptions { - http?: boolean; - secure?: boolean; - date?: Date; - expire?: boolean; - allPoints?: boolean; - } - - interface Serialized { - version: string; - storeType: string; - rejectPublicSuffixes: boolean; - cookies: Cookie.Serialized[]; - } -} - -// endregion - -// region Store - -export abstract class Store { - findCookie(domain: string, path: string, key: string, cb: (err: Error | null, cookie: Cookie | null) => void): void; - - findCookies(domain: string, path: string, cb: (err: Error | null, cookie: Cookie[]) => void): void; - - putCookie(cookie: Cookie, cb: (err: Error | null) => void): void; - - updateCookie(oldCookie: Cookie, newCookie: Cookie, cb: (err: Error | null) => void): void; - - removeCookie(domain: string, path: string, key: string, cb: (err: Error | null) => void): void; - - removeCookies(domain: string, path: string, cb: (err: Error | null) => void): void; - - getAllCookies(cb: (err: Error | null, cookie: Cookie[]) => void): void; -} - -export class MemoryCookieStore extends Store { } - -// endregion diff --git a/node_modules/@types/tough-cookie/package.json b/node_modules/@types/tough-cookie/package.json deleted file mode 100644 index 659ea6cde7678..0000000000000 --- a/node_modules/@types/tough-cookie/package.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "_from": "@types/tough-cookie@*", - "_id": "@types/tough-cookie@2.3.3", - "_inBundle": false, - "_integrity": "sha512-MDQLxNFRLasqS4UlkWMSACMKeSm1x4Q3TxzUC7KQUsh6RK1ZrQ0VEyE3yzXcBu+K8ejVj4wuX32eUG02yNp+YQ==", - "_location": "/@types/tough-cookie", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "@types/tough-cookie@*", - "name": "@types/tough-cookie", - "escapedName": "@types%2ftough-cookie", - "scope": "@types", - "rawSpec": "*", - "saveSpec": null, - "fetchSpec": "*" - }, - "_requiredBy": [ - "/@types/request" - ], - "_resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.3.tgz", - "_shasum": "7f226d67d654ec9070e755f46daebf014628e9d9", - "_spec": "@types/tough-cookie@*", - "_where": "/Users/zkat/Documents/code/work/npm/node_modules/@types/request", - "bugs": { - "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git/issues" - }, - "bundleDependencies": false, - "contributors": [ - { - "name": "Leonard Thieu", - "url": "https://github.com/leonard-thieu" - }, - { - "name": "LiJinyao", - "url": "https://github.com/LiJinyao" - }, - { - "name": "Michael Wei", - "url": "https://github.com/no2chem" - } - ], - "dependencies": {}, - "deprecated": false, - "description": "TypeScript definitions for tough-cookie", - "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped.git#readme", - "license": "MIT", - "main": "", - "name": "@types/tough-cookie", - "repository": { - "type": "git", - "url": "git+https://github.com/DefinitelyTyped/DefinitelyTyped.git.git" - }, - "scripts": {}, - "typeScriptVersion": "2.2", - "typesPublisherContentHash": "9e1c460bd876543784250bfe8e617c1d0413eda93f509efcf07ff11d482ab6b1", - "version": "2.3.3" -} diff --git a/node_modules/JSONStream/index.js b/node_modules/JSONStream/index.js index c3bf14f968304..f4ed901f96560 100755 --- a/node_modules/JSONStream/index.js +++ b/node_modules/JSONStream/index.js @@ -3,6 +3,8 @@ var Parser = require('jsonparse') , through = require('through') +var bufferFrom = Buffer.from && Buffer.from !== Uint8Array.from + /* the value of this.stack that creationix's jsonparse has is weird. @@ -17,7 +19,7 @@ exports.parse = function (path, map) { var parser = new Parser() var stream = through(function (chunk) { if('string' === typeof chunk) - chunk = new Buffer(chunk) + chunk = bufferFrom ? Buffer.from(chunk) : new Buffer(chunk) parser.write(chunk) }, function (data) { diff --git a/node_modules/JSONStream/package.json b/node_modules/JSONStream/package.json index 23588d31b13a4..91783af0b0ab6 100644 --- a/node_modules/JSONStream/package.json +++ b/node_modules/JSONStream/package.json @@ -1,28 +1,29 @@ { - "_from": "JSONStream@1.3.4", - "_id": "JSONStream@1.3.4", + "_from": "JSONStream@1.3.5", + "_id": "JSONStream@1.3.5", "_inBundle": false, - "_integrity": "sha512-Y7vfi3I5oMOYIr+WxV8NZxDSwcbNgzdKYsTNInmycOq9bUYwGg9ryu57Wg5NLmCjqdFPNUmpMBo3kSJN9tCbXg==", + "_integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==", "_location": "/JSONStream", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "JSONStream@1.3.4", + "raw": "JSONStream@1.3.5", "name": "JSONStream", "escapedName": "JSONStream", - "rawSpec": "1.3.4", + "rawSpec": "1.3.5", "saveSpec": null, - "fetchSpec": "1.3.4" + "fetchSpec": "1.3.5" }, "_requiredBy": [ "#USER", - "/" + "/", + "/npm-registry-fetch" ], - "_resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.4.tgz", - "_shasum": "615bb2adb0cd34c8f4c447b5f6512fa1d8f16a2e", - "_spec": "JSONStream@1.3.4", - "_where": "/Users/zkat/Documents/code/work/npm", + "_resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz", + "_shasum": "3208c1f08d3a4d99261ab64f92302bc15e111ca0", + "_spec": "JSONStream@1.3.5", + "_where": "/Users/aeschright/code/cli", "author": { "name": "Dominic Tarr", "email": "dominic.tarr@gmail.com", @@ -69,7 +70,7 @@ "url": "git://github.com/dominictarr/JSONStream.git" }, "scripts": { - "test": "set -e; for t in test/*.js; do echo '***' $t '***'; node $t; done" + "test": "node test/run.js" }, - "version": "1.3.4" + "version": "1.3.5" } diff --git a/node_modules/JSONStream/test/parsejson.js b/node_modules/JSONStream/test/parsejson.js index a94333446df2a..e70dabc184624 100644 --- a/node_modules/JSONStream/test/parsejson.js +++ b/node_modules/JSONStream/test/parsejson.js @@ -9,6 +9,9 @@ var r = Math.random() , p = new Parser() , assert = require('assert') , times = 20 + , bufferFrom = Buffer.from && Buffer.from !== Uint8Array.from + , str + while (times --) { assert.equal(JSON.parse(JSON.stringify(r)), r, 'core JSON') @@ -18,7 +21,8 @@ while (times --) { assert.equal(v,r) } console.error('correct', r) - p.write (new Buffer(JSON.stringify([r]))) + str = JSON.stringify([r]) + p.write (bufferFrom ? Buffer.from(str) : new Buffer(str)) diff --git a/node_modules/JSONStream/test/run.js b/node_modules/JSONStream/test/run.js new file mode 100644 index 0000000000000..7d62e7385bd44 --- /dev/null +++ b/node_modules/JSONStream/test/run.js @@ -0,0 +1,13 @@ +var readdirSync = require('fs').readdirSync +var spawnSync = require('child_process').spawnSync +var extname = require('path').extname + +var files = readdirSync(__dirname) +files.forEach(function(file){ + if (extname(file) !== '.js' || file === 'run.js') + return + console.log(`*** ${file} ***`) + var result = spawnSync(process.argv0, [file], { stdio: 'inherit', cwd: __dirname} ) + if (result.status !== 0) + process.exit(result.status) +}) diff --git a/node_modules/agent-base/.travis.yml b/node_modules/agent-base/.travis.yml index 6ce862c6f63a7..76200951f0450 100644 --- a/node_modules/agent-base/.travis.yml +++ b/node_modules/agent-base/.travis.yml @@ -9,6 +9,7 @@ node_js: - "7" - "8" - "9" + - "10" install: - PATH="`npm bin`:`npm bin -g`:$PATH" diff --git a/node_modules/agent-base/index.d.ts b/node_modules/agent-base/index.d.ts new file mode 100644 index 0000000000000..ff6788bdc77c6 --- /dev/null +++ b/node_modules/agent-base/index.d.ts @@ -0,0 +1,43 @@ +// Type definitions for agent-base 4.2.1 +// Project: https://github.com/TooTallNate/node-agent-base +// Definitions by: Christopher Quadflieg + +/// +import { EventEmitter } from 'events'; + +declare namespace Agent { + export type AgentCallback = ( + req?: any, + opts?: { + secureEndpoint: boolean; + } + ) => void; + + export interface AgentOptions { + timeout?: number; + host?: string; + port?: number; + [key: string]: any; + } + + export interface Agent extends EventEmitter { + _promisifiedCallback: boolean; + timeout: number | null; + options?: AgentOptions; + callback: AgentCallback; + addRequest: (req?: any, opts?: any) => void; + freeSocket: (socket: any, opts: any) => void; + } +} + +/** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + */ +declare function Agent(opts?: Agent.AgentOptions): Agent.Agent; +declare function Agent( + callback: Agent.AgentCallback, + opts?: Agent.AgentOptions +): Agent.Agent; + +export = Agent; diff --git a/node_modules/agent-base/index.js b/node_modules/agent-base/index.js index b1f42e6317431..0ee6b29699a67 100644 --- a/node_modules/agent-base/index.js +++ b/node_modules/agent-base/index.js @@ -94,6 +94,7 @@ Agent.prototype.addRequest = function addRequest(req, _opts) { let timeout; let timedOut = false; const timeoutMs = this.timeout; + const freeSocket = this.freeSocket; function onerror(err) { if (req._hadError) return; @@ -133,10 +134,14 @@ Agent.prototype.addRequest = function addRequest(req, _opts) { // responsibility for this `req` to the Agent from here on socket.addRequest(req, opts); } else if (socket) { + function onfree() { + freeSocket(socket, opts); + } + socket.on('free', onfree); req.onSocket(socket); } else { const err = new Error( - `no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\`` + 'no Duplex stream was returned to agent-base for `' + req.method + ' ' + req.path + '`' ); onerror(err); } @@ -158,3 +163,8 @@ Agent.prototype.addRequest = function addRequest(req, _opts) { Promise.reject(err).catch(callbackError); } }; + +Agent.prototype.freeSocket = function freeSocket(socket, opts) { + // TODO reuse sockets + socket.destroy(); +}; diff --git a/node_modules/agent-base/package.json b/node_modules/agent-base/package.json index 59c5e0be25956..70da68723410f 100644 --- a/node_modules/agent-base/package.json +++ b/node_modules/agent-base/package.json @@ -1,8 +1,8 @@ { "_from": "agent-base@4", - "_id": "agent-base@4.2.0", + "_id": "agent-base@4.3.0", "_inBundle": false, - "_integrity": "sha512-c+R/U5X+2zz2+UCrCFv6odQzJdoqI+YecuhnAJLa1zYaMc13zPfwMwZrr91Pd1DYNo/yPRbiM4WVf9whgwFsIg==", + "_integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", "_location": "/agent-base", "_phantomChildren": {}, "_requested": { @@ -17,15 +17,12 @@ }, "_requiredBy": [ "/http-proxy-agent", - "/https-proxy-agent", - "/npm-profile/socks-proxy-agent", - "/npm-registry-fetch/socks-proxy-agent", - "/socks-proxy-agent" + "/https-proxy-agent" ], - "_resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.2.0.tgz", - "_shasum": "9838b5c3392b962bad031e6a4c5e1024abec45ce", + "_resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", + "_shasum": "8165f01c436009bccad0b1d122f05ed770efc6ee", "_spec": "agent-base@4", - "_where": "/Users/rebecca/code/npm/node_modules/http-proxy-agent", + "_where": "/Users/isaacs/dev/npm/cli/node_modules/http-proxy-agent", "author": { "name": "Nathan Rajlich", "email": "nathan@tootallnate.net", @@ -41,6 +38,8 @@ "deprecated": false, "description": "Turn a function into an `http.Agent` instance", "devDependencies": { + "@types/es6-promisify": "^5.0.0", + "@types/node": "^10.5.3", "mocha": "^3.4.2", "ws": "^3.0.0" }, @@ -65,5 +64,5 @@ "scripts": { "test": "mocha --reporter spec" }, - "version": "4.2.0" + "version": "4.3.0" } diff --git a/node_modules/agent-base/patch-core.js b/node_modules/agent-base/patch-core.js index 47d26a72b0a65..21cbbb6753bdf 100644 --- a/node_modules/agent-base/patch-core.js +++ b/node_modules/agent-base/patch-core.js @@ -8,21 +8,25 @@ const https = require('https'); * * There is currently no PR attempting to move this property upstream. */ -https.request = (function(request) { - return function(_options, cb) { - let options; - if (typeof _options === 'string') { - options = url.parse(_options); - } else { - options = Object.assign({}, _options); - } - if (null == options.port) { - options.port = 443; - } - options.secureEndpoint = true; - return request.call(https, options, cb); - }; -})(https.request); +const patchMarker = "__agent_base_https_request_patched__"; +if (!https.request[patchMarker]) { + https.request = (function(request) { + return function(_options, cb) { + let options; + if (typeof _options === 'string') { + options = url.parse(_options); + } else { + options = Object.assign({}, _options); + } + if (null == options.port) { + options.port = 443; + } + options.secureEndpoint = true; + return request.call(https, options, cb); + }; + })(https.request); + https.request[patchMarker] = true; +} /** * This is needed for Node.js >= 9.0.0 to make sure `https.get()` uses the @@ -30,7 +34,17 @@ https.request = (function(request) { * * Ref: https://github.com/nodejs/node/commit/5118f31 */ -https.get = function(options, cb) { +https.get = function (_url, _options, cb) { + let options; + if (typeof _url === 'string' && _options && typeof _options !== 'function') { + options = Object.assign({}, url.parse(_url), _options); + } else if (!_options && !cb) { + options = _url; + } else if (!cb) { + options = _url; + cb = _options; + } + const req = https.request(options, cb); req.end(); return req; diff --git a/node_modules/agent-base/test/test.js b/node_modules/agent-base/test/test.js index da2e91983548b..0f372c0760631 100644 --- a/node_modules/agent-base/test/test.js +++ b/node_modules/agent-base/test/test.js @@ -189,9 +189,10 @@ describe('Agent', function() { }) }; var req = http.request(opts, function(res) { - assert.equal('0.9', res.httpVersion); - assert.equal(111, res.statusCode); + assert.equal('1.0', res.httpVersion); + assert.equal(200, res.statusCode); assert.equal('bar', res.headers.foo); + assert.deepEqual(['1', '2'], res.headers['set-cookie']); done(); }); @@ -199,8 +200,8 @@ describe('Agent', function() { // doesn't *actually* attach the listeners to the "stream" until // this happens req.once('socket', function() { - var buf = new Buffer( - 'HTTP/0.9 111\r\n' + + var buf = Buffer.from( + 'HTTP/1.0 200\r\n' + 'Foo: bar\r\n' + 'Set-Cookie: 1\r\n' + 'Set-Cookie: 2\r\n\r\n' @@ -398,6 +399,30 @@ describe('"http" module', function() { }); }); + it('should free sockets after use', function(done) { + var agent = new Agent(function(req, opts, fn) { + var socket = net.connect(opts); + fn(null, socket); + }); + + // add HTTP server "request" listener + var gotReq = false; + server.once('request', function(req, res) { + gotReq = true; + res.end(); + }); + + var info = url.parse('http://127.0.0.1:' + port + '/foo'); + info.agent = agent; + http.get(info, function(res) { + res.socket.emit('free'); + assert.equal(true, res.socket.destroyed); + assert(gotReq); + done(); + }); + }); + + describe('PassthroughAgent', function() { it('should pass through to `http.globalAgent`', function(done) { // add HTTP server "request" listener @@ -517,6 +542,25 @@ describe('"https" module', function() { }); }); + it('should support the 3-argument `https.get()`', function(done) { + var agent = new Agent(function(req, opts, fn) { + assert.equal('google.com', opts.host); + assert.equal('/q', opts.pathname || opts.path); + assert.equal('881', opts.port); + assert.equal('bar', opts.foo); + done(); + }); + + https.get( + 'https://google.com:881/q', + { + host: 'google.com', + foo: 'bar', + agent: agent + } + ); + }); + it('should default to port 443', function(done) { var agent = new Agent(function(req, opts, fn) { assert.equal(true, opts.secureEndpoint); @@ -535,6 +579,17 @@ describe('"https" module', function() { }); }); + it('should not re-patch https.request', () => { + var patchModulePath = "../patch-core"; + var patchedRequest = https.request; + + delete require.cache[require.resolve(patchModulePath)]; + require(patchModulePath); + + assert.equal(patchedRequest, https.request); + assert.equal(true, https.request.__agent_base_https_request_patched__); + }); + describe('PassthroughAgent', function() { it('should pass through to `https.globalAgent`', function(done) { // add HTTP server "request" listener diff --git a/node_modules/agentkeepalive/History.md b/node_modules/agentkeepalive/History.md index cc32a475e7699..d5d14d8b4cb68 100644 --- a/node_modules/agentkeepalive/History.md +++ b/node_modules/agentkeepalive/History.md @@ -1,4 +1,26 @@ +3.5.2 / 2018-10-19 +================== + +**fixes** + * [[`5751fc1`](http://github.com/node-modules/agentkeepalive/commit/5751fc1180ed6544602c681ffbd08ca66a0cb12c)] - fix: sockLen being miscalculated when removing sockets (#60) (Ehden Sinai <>) + +3.5.1 / 2018-07-31 +================== + +**fixes** + * [[`495f1ab`](http://github.com/node-modules/agentkeepalive/commit/495f1ab625d43945d72f68096b97db723d4f0657)] - fix: add the lost npm files (#66) (Henry Zhuang <>) + +3.5.0 / 2018-07-31 +================== + +**features** + * [[`16f5aea`](http://github.com/node-modules/agentkeepalive/commit/16f5aeadfda57f1c602652f1472a63cc83cd05bf)] - feat: add typing define. (#65) (Henry Zhuang <>) + +**others** + * [[`28fa062`](http://github.com/node-modules/agentkeepalive/commit/28fa06246fb5103f88ebeeb8563757a9078b8157)] - docs: add "per host" to description of maxFreeSockets (tony-gutierrez <>) + * [[`7df2577`](http://github.com/node-modules/agentkeepalive/commit/7df25774f00a1031ca4daad2878a17e0539072a2)] - test: run test on node 10 (#63) (fengmk2 <>) + 3.4.1 / 2018-03-08 ================== diff --git a/node_modules/agentkeepalive/README.md b/node_modules/agentkeepalive/README.md index ce067f10c7fb7..823145821b72f 100644 --- a/node_modules/agentkeepalive/README.md +++ b/node_modules/agentkeepalive/README.md @@ -56,7 +56,7 @@ $ npm install agentkeepalive --save Default is `freeSocketKeepAliveTimeout * 2`. * `maxSockets` {Number} Maximum number of sockets to allow per host. Default = `Infinity`. - * `maxFreeSockets` {Number} Maximum number of sockets to leave open + * `maxFreeSockets` {Number} Maximum number of sockets (per host) to leave open in a free state. Only relevant if `keepAlive` is set to `true`. Default = `256`. * `socketActiveTTL` {Number} Sets the socket active time to live, even if it's in use. diff --git a/node_modules/agentkeepalive/index.d.ts b/node_modules/agentkeepalive/index.d.ts new file mode 100644 index 0000000000000..c11636f7ca116 --- /dev/null +++ b/node_modules/agentkeepalive/index.d.ts @@ -0,0 +1,43 @@ +declare module "agentkeepalive" { + import * as http from 'http'; + import * as https from 'https'; + + interface AgentStatus { + createSocketCount: number, + createSocketErrorCount: number, + closeSocketCount: number, + errorSocketCount: number, + timeoutSocketCount: number, + requestCount: number, + freeSockets: object, + sockets: object, + requests: object, + } + + interface HttpOptions extends http.AgentOptions { + freeSocketKeepAliveTimeout?: number; + timeout?: number; + socketActiveTTL?: number; + } + + interface HttpsOptions extends https.AgentOptions { + freeSocketKeepAliveTimeout?: number; + timeout?: number; + socketActiveTTL?: number; + } + + class internal extends http.Agent { + constructor(opts?: HttpOptions); + readonly statusChanged: boolean; + createSocket(req: http.IncomingMessage, options: http.RequestOptions, cb: Function): void; + getCurrentStatus(): AgentStatus; + } + + namespace internal { + export class HttpsAgent extends internal { + constructor(opts?: HttpsOptions); + } + } + + export = internal; +} diff --git a/node_modules/agentkeepalive/lib/_http_agent.js b/node_modules/agentkeepalive/lib/_http_agent.js index 83f1d115eac84..c324b7f875ec3 100644 --- a/node_modules/agentkeepalive/lib/_http_agent.js +++ b/node_modules/agentkeepalive/lib/_http_agent.js @@ -380,7 +380,7 @@ Agent.prototype.removeSocket = function removeSocket(s, options) { // [patch start] var freeLen = this.freeSockets[name] ? this.freeSockets[name].length : 0; - var sockLen = freeLen + this.sockets[name] ? this.sockets[name].length : 0; + var sockLen = freeLen + (this.sockets[name] ? this.sockets[name].length : 0); // [patch end] if (this.requests[name] && this.requests[name].length && sockLen < this.maxSockets) { diff --git a/node_modules/agentkeepalive/package.json b/node_modules/agentkeepalive/package.json index c0ce0576bc107..ba6470dba8a13 100644 --- a/node_modules/agentkeepalive/package.json +++ b/node_modules/agentkeepalive/package.json @@ -1,8 +1,8 @@ { "_from": "agentkeepalive@^3.4.1", - "_id": "agentkeepalive@3.4.1", + "_id": "agentkeepalive@3.5.2", "_inBundle": false, - "_integrity": "sha512-MPIwsZU9PP9kOrZpyu2042kYA8Fdt/AedQYkYXucHgF9QoD9dXVp0ypuGnHXSR0hTstBxdt85Xkh4JolYfK5wg==", + "_integrity": "sha512-e0L/HNe6qkQ7H19kTlRRqUibEAwDK5AFk6y3PtMsuut2VAH6+Q4xZml1tNDJD7kSAyqmbG/K08K5WEJYtUrSlQ==", "_location": "/agentkeepalive", "_phantomChildren": {}, "_requested": { @@ -16,14 +16,12 @@ "fetchSpec": "^3.4.1" }, "_requiredBy": [ - "/make-fetch-happen", - "/npm-profile/make-fetch-happen", - "/npm-registry-fetch/make-fetch-happen" + "/make-fetch-happen" ], - "_resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-3.4.1.tgz", - "_shasum": "aa95aebc3a749bca5ed53e3880a09f5235b48f0c", + "_resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-3.5.2.tgz", + "_shasum": "a113924dd3fa24a0bc3b78108c450c2abee00f67", "_spec": "agentkeepalive@^3.4.1", - "_where": "/Users/rebecca/code/npm/node_modules/make-fetch-happen", + "_where": "/Users/isaacs/dev/npm/cli/node_modules/make-fetch-happen", "author": { "name": "fengmk2", "email": "fengmk2@gmail.com", @@ -35,7 +33,7 @@ }, "bundleDependencies": false, "ci": { - "version": "4.3.2, 4, 6, 8, 9" + "version": "4, 6, 8, 10" }, "dependencies": { "humanize-ms": "^1.2.1" @@ -43,11 +41,11 @@ "deprecated": false, "description": "Missing keepalive http.Agent", "devDependencies": { - "autod": "^2.8.0", - "egg-bin": "^1.10.3", - "egg-ci": "^1.7.0", - "eslint": "^3.19.0", - "eslint-config-egg": "^4.2.0", + "autod": "^3.0.1", + "egg-bin": "^1.11.1", + "egg-ci": "^1.8.0", + "eslint": "^4.19.1", + "eslint-config-egg": "^6.0.0", "pedding": "^1.1.0" }, "engines": { @@ -55,6 +53,7 @@ }, "files": [ "index.js", + "index.d.ts", "browser.js", "lib" ], @@ -80,5 +79,5 @@ "lint": "eslint lib test index.js", "test": "egg-bin test" }, - "version": "3.4.1" + "version": "3.5.2" } diff --git a/node_modules/aproba/CHANGELOG.md b/node_modules/aproba/CHANGELOG.md new file mode 100644 index 0000000000000..bab30ecb7e625 --- /dev/null +++ b/node_modules/aproba/CHANGELOG.md @@ -0,0 +1,4 @@ +2.0.0 + * Drop support for 0.10 and 0.12. They haven't been in travis but still, + since we _know_ we'll break with them now it's only polite to do a + major bump. diff --git a/node_modules/aproba/index.js b/node_modules/aproba/index.js index 6f3f797c09a75..fd947481ba557 100644 --- a/node_modules/aproba/index.js +++ b/node_modules/aproba/index.js @@ -1,38 +1,39 @@ 'use strict' +module.exports = validate function isArguments (thingy) { return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee') } -var types = { - '*': {label: 'any', check: function () { return true }}, - A: {label: 'array', check: function (thingy) { return Array.isArray(thingy) || isArguments(thingy) }}, - S: {label: 'string', check: function (thingy) { return typeof thingy === 'string' }}, - N: {label: 'number', check: function (thingy) { return typeof thingy === 'number' }}, - F: {label: 'function', check: function (thingy) { return typeof thingy === 'function' }}, - O: {label: 'object', check: function (thingy) { return typeof thingy === 'object' && thingy != null && !types.A.check(thingy) && !types.E.check(thingy) }}, - B: {label: 'boolean', check: function (thingy) { return typeof thingy === 'boolean' }}, - E: {label: 'error', check: function (thingy) { return thingy instanceof Error }}, - Z: {label: 'null', check: function (thingy) { return thingy == null }} +const types = { + '*': {label: 'any', check: () => true}, + A: {label: 'array', check: _ => Array.isArray(_) || isArguments(_)}, + S: {label: 'string', check: _ => typeof _ === 'string'}, + N: {label: 'number', check: _ => typeof _ === 'number'}, + F: {label: 'function', check: _ => typeof _ === 'function'}, + O: {label: 'object', check: _ => typeof _ === 'object' && _ != null && !types.A.check(_) && !types.E.check(_)}, + B: {label: 'boolean', check: _ => typeof _ === 'boolean'}, + E: {label: 'error', check: _ => _ instanceof Error}, + Z: {label: 'null', check: _ => _ == null} } function addSchema (schema, arity) { - var group = arity[schema.length] = arity[schema.length] || [] + const group = arity[schema.length] = arity[schema.length] || [] if (group.indexOf(schema) === -1) group.push(schema) } -var validate = module.exports = function (rawSchemas, args) { +function validate (rawSchemas, args) { if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length) if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas') if (!args) throw missingRequiredArg(1, 'args') if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas) if (!types.A.check(args)) throw invalidType(1, ['array'], args) - var schemas = rawSchemas.split('|') - var arity = {} + const schemas = rawSchemas.split('|') + const arity = {} - schemas.forEach(function (schema) { - for (var ii = 0; ii < schema.length; ++ii) { - var type = schema[ii] + schemas.forEach(schema => { + for (let ii = 0; ii < schema.length; ++ii) { + const type = schema[ii] if (!types[type]) throw unknownType(ii, type) } if (/E.*E/.test(schema)) throw moreThanOneError(schema) @@ -43,20 +44,18 @@ var validate = module.exports = function (rawSchemas, args) { if (schema.length === 1) addSchema('', arity) } }) - var matching = arity[args.length] + let matching = arity[args.length] if (!matching) { throw wrongNumberOfArgs(Object.keys(arity), args.length) } - for (var ii = 0; ii < args.length; ++ii) { - var newMatching = matching.filter(function (schema) { - var type = schema[ii] - var typeCheck = types[type].check + for (let ii = 0; ii < args.length; ++ii) { + let newMatching = matching.filter(schema => { + const type = schema[ii] + const typeCheck = types[type].check return typeCheck(args[ii]) }) if (!newMatching.length) { - var labels = matching.map(function (schema) { - return types[schema[ii]].label - }).filter(function (schema) { return schema != null }) + const labels = matching.map(_ => types[_[ii]].label).filter(_ => _ != null) throw invalidType(ii, labels, args[ii]) } matching = newMatching @@ -72,8 +71,8 @@ function unknownType (num, type) { } function invalidType (num, expectedTypes, value) { - var valueType - Object.keys(types).forEach(function (typeCode) { + let valueType + Object.keys(types).forEach(typeCode => { if (types[typeCode].check(value)) valueType = types[typeCode].label }) return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' + @@ -85,8 +84,8 @@ function englishList (list) { } function wrongNumberOfArgs (expected, got) { - var english = englishList(expected) - var args = expected.every(function (ex) { return ex.length === 1 }) + const english = englishList(expected) + const args = expected.every(ex => ex.length === 1) ? 'argument' : 'arguments' return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got) @@ -98,8 +97,9 @@ function moreThanOneError (schema) { } function newException (code, msg) { - var e = new Error(msg) - e.code = code - if (Error.captureStackTrace) Error.captureStackTrace(e, validate) - return e + const err = new Error(msg) + err.code = code + /* istanbul ignore else */ + if (Error.captureStackTrace) Error.captureStackTrace(err, validate) + return err } diff --git a/node_modules/aproba/package.json b/node_modules/aproba/package.json index 534c6beb57e4e..42a7798b0e782 100644 --- a/node_modules/aproba/package.json +++ b/node_modules/aproba/package.json @@ -1,38 +1,29 @@ { - "_args": [ - [ - "aproba@1.2.0", - "/Users/rebecca/code/npm" - ] - ], - "_from": "aproba@1.2.0", - "_id": "aproba@1.2.0", + "_from": "aproba@2.0.0", + "_id": "aproba@2.0.0", "_inBundle": false, - "_integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", + "_integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==", "_location": "/aproba", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "aproba@1.2.0", + "raw": "aproba@2.0.0", "name": "aproba", "escapedName": "aproba", - "rawSpec": "1.2.0", + "rawSpec": "2.0.0", "saveSpec": null, - "fetchSpec": "1.2.0" + "fetchSpec": "2.0.0" }, "_requiredBy": [ + "#USER", "/", - "/copy-concurrently", - "/gauge", - "/gentle-fs", - "/move-concurrently", - "/npm-profile", - "/run-queue" + "/npm-profile" ], - "_resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", - "_spec": "1.2.0", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", + "_shasum": "52520b8ae5b569215b354efc0caa3fe1e45a8adc", + "_spec": "aproba@2.0.0", + "_where": "/Users/aeschright/code/cli", "author": { "name": "Rebecca Turner", "email": "me@re-becca.org" @@ -40,11 +31,13 @@ "bugs": { "url": "https://github.com/iarna/aproba/issues" }, + "bundleDependencies": false, "dependencies": {}, + "deprecated": false, "description": "A ridiculously light-weight argument validator (now browser friendly)", "devDependencies": { - "standard": "^10.0.3", - "tap": "^10.0.2" + "standard": "^11.0.1", + "tap": "^12.0.1" }, "directories": { "test": "test" @@ -65,7 +58,8 @@ "url": "git+https://github.com/iarna/aproba.git" }, "scripts": { - "test": "standard && tap -j3 test/*.js" + "pretest": "standard", + "test": "tap --100 -J test/*.js" }, - "version": "1.2.0" + "version": "2.0.0" } diff --git a/node_modules/readable-stream/.travis.yml b/node_modules/are-we-there-yet/node_modules/readable-stream/.travis.yml similarity index 100% rename from node_modules/readable-stream/.travis.yml rename to node_modules/are-we-there-yet/node_modules/readable-stream/.travis.yml diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/are-we-there-yet/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 0000000000000..f478d58dca85b --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/GOVERNANCE.md b/node_modules/are-we-there-yet/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 0000000000000..16ffb93f24bec --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/LICENSE b/node_modules/are-we-there-yet/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000000000..2873b3b2e5950 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/README.md b/node_modules/are-we-there-yet/node_modules/readable-stream/README.md new file mode 100644 index 0000000000000..23fe3f3e3009a --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/README.md @@ -0,0 +1,58 @@ +# readable-stream + +***Node-core v8.11.1 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) + +```bash +npm install --save readable-stream +``` + +***Node-core streams for userland*** + +This package is a mirror of the Streams2 and Streams3 implementations in +Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.11.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> + - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> + - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D +* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> diff --git a/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/are-we-there-yet/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md similarity index 100% rename from node_modules/readable-stream/doc/wg-meetings/2015-01-30.md rename to node_modules/are-we-there-yet/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md diff --git a/node_modules/readable-stream/duplex-browser.js b/node_modules/are-we-there-yet/node_modules/readable-stream/duplex-browser.js similarity index 100% rename from node_modules/readable-stream/duplex-browser.js rename to node_modules/are-we-there-yet/node_modules/readable-stream/duplex-browser.js diff --git a/node_modules/readable-stream/duplex.js b/node_modules/are-we-there-yet/node_modules/readable-stream/duplex.js similarity index 100% rename from node_modules/readable-stream/duplex.js rename to node_modules/are-we-there-yet/node_modules/readable-stream/duplex.js diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000000000..a1ca813e5acbd --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,131 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); + + pna.nextTick(cb, err); +}; \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000000000..a9c835884828d --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,47 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000000000..bf34ac65e1108 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1019 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Readable; + +/**/ +var isArray = require('isarray'); +/**/ + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = require('./internal/streams/BufferList'); +var destroyImpl = require('./internal/streams/destroy'); +var StringDecoder; + +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } + } + + return needMoreData(state); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, unpipeInfo); + }return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this, unpipeInfo); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000000000..5d1f8b876d98c --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,214 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } + + ts.writechunk = null; + ts.writecb = null; + + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + + cb(er); + + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; + + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); + + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000000000..b3f4e85a2f6e3 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,687 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +var destroyImpl = require('./internal/streams/destroy'); + +util.inherits(Writable, Stream); + +function nop() {} + +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = corkReq; + } else { + state.corkedRequestsFree = corkReq; + } +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); + +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/BufferList.js similarity index 100% rename from node_modules/readable-stream/lib/internal/streams/BufferList.js rename to node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/BufferList.js diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 0000000000000..5a0a0d88cec6f --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,74 @@ +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { + pna.nextTick(emitErrorNT, this, err); + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + pna.nextTick(emitErrorNT, _this, err); + if (_this._writableState) { + _this._writableState.errorEmitted = true; + } + } else if (cb) { + cb(err); + } + }); + + return this; +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy +}; \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 0000000000000..9332a3fdae706 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 0000000000000..ce2ad5b6ee57f --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/package.json b/node_modules/are-we-there-yet/node_modules/readable-stream/package.json new file mode 100644 index 0000000000000..387f98ab910c0 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/package.json @@ -0,0 +1,81 @@ +{ + "_from": "readable-stream@^2.0.6", + "_id": "readable-stream@2.3.6", + "_inBundle": false, + "_integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "_location": "/are-we-there-yet/readable-stream", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "readable-stream@^2.0.6", + "name": "readable-stream", + "escapedName": "readable-stream", + "rawSpec": "^2.0.6", + "saveSpec": null, + "fetchSpec": "^2.0.6" + }, + "_requiredBy": [ + "/are-we-there-yet" + ], + "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "_shasum": "b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf", + "_spec": "readable-stream@^2.0.6", + "_where": "/Users/aeschright/code/cli/node_modules/are-we-there-yet", + "browser": { + "util": false, + "./readable.js": "./readable-browser.js", + "./writable.js": "./writable-browser.js", + "./duplex.js": "./duplex-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "bugs": { + "url": "https://github.com/nodejs/readable-stream/issues" + }, + "bundleDependencies": false, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "deprecated": false, + "description": "Streams3, a user-land copy of the stream library from Node.js", + "devDependencies": { + "assert": "^1.4.0", + "babel-polyfill": "^6.9.1", + "buffer": "^4.9.0", + "lolex": "^2.3.2", + "nyc": "^6.4.0", + "tap": "^0.7.0", + "tape": "^4.8.0" + }, + "homepage": "https://github.com/nodejs/readable-stream#readme", + "keywords": [ + "readable", + "stream", + "pipe" + ], + "license": "MIT", + "main": "readable.js", + "name": "readable-stream", + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js" + }, + "version": "2.3.6" +} diff --git a/node_modules/readable-stream/passthrough.js b/node_modules/are-we-there-yet/node_modules/readable-stream/passthrough.js similarity index 100% rename from node_modules/readable-stream/passthrough.js rename to node_modules/are-we-there-yet/node_modules/readable-stream/passthrough.js diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/readable-browser.js b/node_modules/are-we-there-yet/node_modules/readable-stream/readable-browser.js new file mode 100644 index 0000000000000..e50372592ee6c --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,7 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/readable.js b/node_modules/are-we-there-yet/node_modules/readable-stream/readable.js new file mode 100644 index 0000000000000..ec89ec5330649 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/readable.js @@ -0,0 +1,19 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); +} diff --git a/node_modules/readable-stream/transform.js b/node_modules/are-we-there-yet/node_modules/readable-stream/transform.js similarity index 100% rename from node_modules/readable-stream/transform.js rename to node_modules/are-we-there-yet/node_modules/readable-stream/transform.js diff --git a/node_modules/readable-stream/writable-browser.js b/node_modules/are-we-there-yet/node_modules/readable-stream/writable-browser.js similarity index 100% rename from node_modules/readable-stream/writable-browser.js rename to node_modules/are-we-there-yet/node_modules/readable-stream/writable-browser.js diff --git a/node_modules/readable-stream/writable.js b/node_modules/are-we-there-yet/node_modules/readable-stream/writable.js similarity index 100% rename from node_modules/readable-stream/writable.js rename to node_modules/are-we-there-yet/node_modules/readable-stream/writable.js diff --git a/node_modules/string_decoder/.travis.yml b/node_modules/are-we-there-yet/node_modules/string_decoder/.travis.yml similarity index 100% rename from node_modules/string_decoder/.travis.yml rename to node_modules/are-we-there-yet/node_modules/string_decoder/.travis.yml diff --git a/node_modules/are-we-there-yet/node_modules/string_decoder/LICENSE b/node_modules/are-we-there-yet/node_modules/string_decoder/LICENSE new file mode 100644 index 0000000000000..778edb20730ef --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/node_modules/are-we-there-yet/node_modules/string_decoder/README.md b/node_modules/are-we-there-yet/node_modules/string_decoder/README.md new file mode 100644 index 0000000000000..5fd58315ed588 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/node_modules/are-we-there-yet/node_modules/string_decoder/lib/string_decoder.js b/node_modules/are-we-there-yet/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 0000000000000..2e89e63f7933e --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/string_decoder/package.json b/node_modules/are-we-there-yet/node_modules/string_decoder/package.json new file mode 100644 index 0000000000000..1e76b8bffe408 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/string_decoder/package.json @@ -0,0 +1,59 @@ +{ + "_from": "string_decoder@~1.1.1", + "_id": "string_decoder@1.1.1", + "_inBundle": false, + "_integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "_location": "/are-we-there-yet/string_decoder", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "string_decoder@~1.1.1", + "name": "string_decoder", + "escapedName": "string_decoder", + "rawSpec": "~1.1.1", + "saveSpec": null, + "fetchSpec": "~1.1.1" + }, + "_requiredBy": [ + "/are-we-there-yet/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "_shasum": "9cf1611ba62685d7030ae9e4ba34149c3af03fc8", + "_spec": "string_decoder@~1.1.1", + "_where": "/Users/aeschright/code/cli/node_modules/are-we-there-yet/node_modules/readable-stream", + "bugs": { + "url": "https://github.com/nodejs/string_decoder/issues" + }, + "bundleDependencies": false, + "dependencies": { + "safe-buffer": "~5.1.0" + }, + "deprecated": false, + "description": "The string_decoder module from Node core", + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT", + "main": "lib/string_decoder.js", + "name": "string_decoder", + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "test": "tap test/parallel/*.js && node test/verify-dependencies" + }, + "version": "1.1.1" +} diff --git a/node_modules/bin-links/CHANGELOG.md b/node_modules/bin-links/CHANGELOG.md index fb6bd0bd1e7b0..e7b640c08d13f 100644 --- a/node_modules/bin-links/CHANGELOG.md +++ b/node_modules/bin-links/CHANGELOG.md @@ -2,6 +2,51 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [1.1.7](https://github.com/npm/bin-links/compare/v1.1.6...v1.1.7) (2019-12-26) + + +### Bug Fixes + +* resolve folder that is passed in ([0bbd303](https://github.com/npm/bin-links/commit/0bbd303)) + + + + +## [1.1.6](https://github.com/npm/bin-links/compare/v1.1.5...v1.1.6) (2019-12-11) + + +### Bug Fixes + +* prevent improper clobbering of man/bin links ([642cd18](https://github.com/npm/bin-links/commit/642cd18)), closes [#11](https://github.com/npm/bin-links/issues/11) [#12](https://github.com/npm/bin-links/issues/12) + + + + +## [1.1.5](https://github.com/npm/bin-links/compare/v1.1.4...v1.1.5) (2019-12-10) + + +### Bug Fixes + +* don't filter out ./ man references ([b3cfd2e](https://github.com/npm/bin-links/commit/b3cfd2e)) + + + + +## [1.1.4](https://github.com/npm/bin-links/compare/v1.1.3...v1.1.4) (2019-12-09) + + +### Bug Fixes + +* sanitize and validate bin and man link targets ([25a34f9](https://github.com/npm/bin-links/commit/25a34f9)) + + + + +## [1.1.3](https://github.com/npm/bin-links/compare/v1.1.2...v1.1.3) (2019-08-14) + + + ## [1.1.2](https://github.com/npm/bin-links/compare/v1.1.1...v1.1.2) (2018-03-22) diff --git a/node_modules/bin-links/index.js b/node_modules/bin-links/index.js index 5f86755475223..79c2cb585bd73 100644 --- a/node_modules/bin-links/index.js +++ b/node_modules/bin-links/index.js @@ -3,18 +3,23 @@ const path = require('path') const fs = require('graceful-fs') const BB = require('bluebird') -const linkIfExists = BB.promisify(require('gentle-fs').linkIfExists) -const cmdShimIfExists = BB.promisify(require('cmd-shim').ifExists) +const gentleFs = require('gentle-fs') +const linkIfExists = BB.promisify(gentleFs.linkIfExists) +const gentleFsBinLink = BB.promisify(gentleFs.binLink) const open = BB.promisify(fs.open) const close = BB.promisify(fs.close) const read = BB.promisify(fs.read, {multiArgs: true}) const chmod = BB.promisify(fs.chmod) const readFile = BB.promisify(fs.readFile) const writeFileAtomic = BB.promisify(require('write-file-atomic')) +const normalize = require('npm-normalize-package-bin') module.exports = BB.promisify(binLinks) function binLinks (pkg, folder, global, opts, cb) { + pkg = normalize(pkg) + folder = path.resolve(folder) + // if it's global, and folder is in {prefix}/node_modules, // then bins are in {prefix}/bin // otherwise, then bins are in folder/../.bin @@ -39,9 +44,9 @@ function isHashbangFile (file) { return read(fileHandle, Buffer.alloc(2), 0, 2, 0).spread((_, buf) => { if (!hasHashbang(buf)) return [] return read(fileHandle, Buffer.alloc(2048), 0, 2048, 0) - }).spread((_, buf) => buf && hasCR(buf), () => false) + }).spread((_, buf) => buf && hasCR(buf), /* istanbul ignore next */ () => false) .finally(() => close(fileHandle)) - }).catch(() => false) + }).catch(/* istanbul ignore next */ () => false) } function hasHashbang (buf) { @@ -77,6 +82,12 @@ function linkBins (pkg, folder, parent, gtop, opts) { var dest = path.resolve(binRoot, bin) var src = path.resolve(folder, pkg.bin[bin]) + /* istanbul ignore if - that unpossible */ + if (src.indexOf(folder) !== 0) { + throw new Error('invalid bin entry for package ' + + pkg._id + '. key=' + bin + ', value=' + pkg.bin[bin]) + } + return linkBin(src, dest, linkOpts).then(() => { // bins should always be executable. // XXX skip chmod on windows? @@ -100,6 +111,7 @@ function linkBins (pkg, folder, parent, gtop, opts) { opts.log.showProgress() } }).catch(err => { + /* istanbul ignore next */ if (err.code === 'ENOENT' && opts.ignoreScripts) return throw err }) @@ -107,11 +119,11 @@ function linkBins (pkg, folder, parent, gtop, opts) { } function linkBin (from, to, opts) { - if (process.platform !== 'win32') { - return linkIfExists(from, to, opts) - } else { - return cmdShimIfExists(from, to) + // do not clobber global bins + if (opts.globalBin && to.indexOf(opts.globalBin) === 0) { + opts.clobberLinkGently = true } + return gentleFsBinLink(from, to, opts) } function linkMans (pkg, folder, parent, gtop, opts) { @@ -123,15 +135,22 @@ function linkMans (pkg, folder, parent, gtop, opts) { // make sure that the mans are unique. // otherwise, if there are dupes, it'll fail with EEXIST var set = pkg.man.reduce(function (acc, man) { - acc[path.basename(man)] = man + if (typeof man !== 'string') { + return acc + } + const cleanMan = path.join('/', man).replace(/\\|:/g, '/').substr(1) + acc[path.basename(man)] = cleanMan return acc }, {}) var manpages = pkg.man.filter(function (man) { - return set[path.basename(man)] === man + if (typeof man !== 'string') { + return false + } + const cleanMan = path.join('/', man).replace(/\\|:/g, '/').substr(1) + return set[path.basename(man)] === cleanMan }) return BB.map(manpages, man => { - if (typeof man !== 'string') return opts.log.silly('linkMans', 'preparing to link', man) var parseMan = man.match(/(.*\.([0-9]+)(\.gz)?)$/) if (!parseMan) { @@ -146,8 +165,19 @@ function linkMans (pkg, folder, parent, gtop, opts) { var sxn = parseMan[2] var bn = path.basename(stem) var manSrc = path.resolve(folder, man) + /* istanbul ignore if - that unpossible */ + if (manSrc.indexOf(folder) !== 0) { + throw new Error('invalid man entry for package ' + + pkg._id + '. man=' + manSrc) + } + var manDest = path.join(manRoot, 'man' + sxn, bn) + // man pages should always be clobbering gently, because they are + // only installed for top-level global packages, so never destroy + // a link if it doesn't point into the folder we're linking + opts.clobberLinkGently = true + return linkIfExists(manSrc, manDest, getLinkOpts(opts, gtop && folder)) }) } diff --git a/node_modules/bin-links/package.json b/node_modules/bin-links/package.json index 0a1af20fdaec4..45393d11c5fc8 100644 --- a/node_modules/bin-links/package.json +++ b/node_modules/bin-links/package.json @@ -1,54 +1,54 @@ { - "_args": [ - [ - "bin-links@1.1.2", - "/Users/rebecca/code/npm" - ] - ], - "_from": "bin-links@1.1.2", - "_id": "bin-links@1.1.2", + "_from": "bin-links@1.1.7", + "_id": "bin-links@1.1.7", "_inBundle": false, - "_integrity": "sha512-8eEHVgYP03nILphilltWjeIjMbKyJo3wvp9K816pHbhP301ismzw15mxAAEVQ/USUwcP++1uNrbERbp8lOA6Fg==", + "_integrity": "sha512-/eaLaTu7G7/o7PV04QPy1HRT65zf+1tFkPGv0sPTV0tRwufooYBQO3zrcyGgm+ja+ZtBf2GEuKjDRJ2pPG+yqA==", "_location": "/bin-links", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "bin-links@1.1.2", + "raw": "bin-links@1.1.7", "name": "bin-links", "escapedName": "bin-links", - "rawSpec": "1.1.2", + "rawSpec": "1.1.7", "saveSpec": null, - "fetchSpec": "1.1.2" + "fetchSpec": "1.1.7" }, "_requiredBy": [ + "#USER", "/", - "/libcipm" + "/libcipm", + "/libnpm" ], - "_resolved": "https://registry.npmjs.org/bin-links/-/bin-links-1.1.2.tgz", - "_spec": "1.1.2", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/bin-links/-/bin-links-1.1.7.tgz", + "_shasum": "34b79ea9d0e575d7308afeff0c6b2fc24c793359", + "_spec": "bin-links@1.1.7", + "_where": "/Users/mperrotte/npminc/cli", "author": { "name": "Mike Sherov" }, "bugs": { "url": "https://github.com/npm/bin-links/issues" }, + "bundleDependencies": false, "dependencies": { - "bluebird": "^3.5.0", - "cmd-shim": "^2.0.2", - "gentle-fs": "^2.0.0", - "graceful-fs": "^4.1.11", + "bluebird": "^3.5.3", + "cmd-shim": "^3.0.0", + "gentle-fs": "^2.3.0", + "graceful-fs": "^4.1.15", + "npm-normalize-package-bin": "^1.0.0", "write-file-atomic": "^2.3.0" }, + "deprecated": false, "description": "JavaScript package binary linker", "devDependencies": { "mkdirp": "^0.5.1", - "nyc": "^11.1.0", - "rimraf": "^2.6.2", + "nyc": "^13.1.0", + "rimraf": "^2.6.3", "standard": "^10.0.3", - "standard-version": "^4.2.0", - "tap": "^10.7.2", + "standard-version": "^4.4.0", + "tap": "^12.1.3", "weallbehave": "^1.2.0", "weallcontribute": "^1.0.8" }, @@ -70,12 +70,12 @@ }, "scripts": { "postrelease": "npm publish && git push --follow-tags", + "posttest": "standard", "prerelease": "npm t", - "pretest": "standard", "release": "standard-version -s", - "test": "tap -J --nyc-arg=--all --coverage test/*.js", + "test": "tap -J --nyc-arg=--all --coverage test/*.js --100", "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "1.1.2" + "version": "1.1.7" } diff --git a/node_modules/bl/.travis.yml b/node_modules/bl/.travis.yml new file mode 100644 index 0000000000000..1e66ab257f7d3 --- /dev/null +++ b/node_modules/bl/.travis.yml @@ -0,0 +1,13 @@ +sudo: false +language: node_js +node_js: + - '6' + - '8' + - '10' +branches: + only: + - master +notifications: + email: + - rod@vagg.org + - matteo.collina@gmail.com diff --git a/node_modules/bl/LICENSE.md b/node_modules/bl/LICENSE.md new file mode 100644 index 0000000000000..dea757d454785 --- /dev/null +++ b/node_modules/bl/LICENSE.md @@ -0,0 +1,13 @@ +The MIT License (MIT) +===================== + +Copyright (c) 2013-2018 bl contributors +---------------------------------- + +*bl contributors listed at * + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/bl/README.md b/node_modules/bl/README.md new file mode 100644 index 0000000000000..79dca35373d77 --- /dev/null +++ b/node_modules/bl/README.md @@ -0,0 +1,218 @@ +# bl *(BufferList)* + +[![Build Status](https://travis-ci.org/rvagg/bl.svg?branch=master)](https://travis-ci.org/rvagg/bl) + +**A Node.js Buffer list collector, reader and streamer thingy.** + +[![NPM](https://nodei.co/npm/bl.png?downloads=true&downloadRank=true)](https://nodei.co/npm/bl/) +[![NPM](https://nodei.co/npm-dl/bl.png?months=6&height=3)](https://nodei.co/npm/bl/) + +**bl** is a storage object for collections of Node Buffers, exposing them with the main Buffer readable API. Also works as a duplex stream so you can collect buffers from a stream that emits them and emit buffers to a stream that consumes them! + +The original buffers are kept intact and copies are only done as necessary. Any reads that require the use of a single original buffer will return a slice of that buffer only (which references the same memory as the original buffer). Reads that span buffers perform concatenation as required and return the results transparently. + +```js +const BufferList = require('bl') + +var bl = new BufferList() +bl.append(Buffer.from('abcd')) +bl.append(Buffer.from('efg')) +bl.append('hi') // bl will also accept & convert Strings +bl.append(Buffer.from('j')) +bl.append(Buffer.from([ 0x3, 0x4 ])) + +console.log(bl.length) // 12 + +console.log(bl.slice(0, 10).toString('ascii')) // 'abcdefghij' +console.log(bl.slice(3, 10).toString('ascii')) // 'defghij' +console.log(bl.slice(3, 6).toString('ascii')) // 'def' +console.log(bl.slice(3, 8).toString('ascii')) // 'defgh' +console.log(bl.slice(5, 10).toString('ascii')) // 'fghij' + +console.log(bl.indexOf('def')) // 3 +console.log(bl.indexOf('asdf')) // -1 + +// or just use toString! +console.log(bl.toString()) // 'abcdefghij\u0003\u0004' +console.log(bl.toString('ascii', 3, 8)) // 'defgh' +console.log(bl.toString('ascii', 5, 10)) // 'fghij' + +// other standard Buffer readables +console.log(bl.readUInt16BE(10)) // 0x0304 +console.log(bl.readUInt16LE(10)) // 0x0403 +``` + +Give it a callback in the constructor and use it just like **[concat-stream](https://github.com/maxogden/node-concat-stream)**: + +```js +const bl = require('bl') + , fs = require('fs') + +fs.createReadStream('README.md') + .pipe(bl(function (err, data) { // note 'new' isn't strictly required + // `data` is a complete Buffer object containing the full data + console.log(data.toString()) + })) +``` + +Note that when you use the *callback* method like this, the resulting `data` parameter is a concatenation of all `Buffer` objects in the list. If you want to avoid the overhead of this concatenation (in cases of extreme performance consciousness), then avoid the *callback* method and just listen to `'end'` instead, like a standard Stream. + +Or to fetch a URL using [hyperquest](https://github.com/substack/hyperquest) (should work with [request](http://github.com/mikeal/request) and even plain Node http too!): +```js +const hyperquest = require('hyperquest') + , bl = require('bl') + , url = 'https://raw.github.com/rvagg/bl/master/README.md' + +hyperquest(url).pipe(bl(function (err, data) { + console.log(data.toString()) +})) +``` + +Or, use it as a readable stream to recompose a list of Buffers to an output source: + +```js +const BufferList = require('bl') + , fs = require('fs') + +var bl = new BufferList() +bl.append(Buffer.from('abcd')) +bl.append(Buffer.from('efg')) +bl.append(Buffer.from('hi')) +bl.append(Buffer.from('j')) + +bl.pipe(fs.createWriteStream('gibberish.txt')) +``` + +## API + + * new BufferList([ callback ]) + * bl.length + * bl.append(buffer) + * bl.get(index) + * bl.indexOf(value[, byteOffset][, encoding]) + * bl.slice([ start[, end ] ]) + * bl.shallowSlice([ start[, end ] ]) + * bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ]) + * bl.duplicate() + * bl.consume(bytes) + * bl.toString([encoding, [ start, [ end ]]]) + * bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8() + * Streams + +-------------------------------------------------------- + +### new BufferList([ callback | Buffer | Buffer array | BufferList | BufferList array | String ]) +The constructor takes an optional callback, if supplied, the callback will be called with an error argument followed by a reference to the **bl** instance, when `bl.end()` is called (i.e. from a piped stream). This is a convenient method of collecting the entire contents of a stream, particularly when the stream is *chunky*, such as a network stream. + +Normally, no arguments are required for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` object. + +`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with: + +```js +var bl = require('bl') +var myinstance = bl() + +// equivalent to: + +var BufferList = require('bl') +var myinstance = new BufferList() +``` + +-------------------------------------------------------- + +### bl.length +Get the length of the list in bytes. This is the sum of the lengths of all of the buffers contained in the list, minus any initial offset for a semi-consumed buffer at the beginning. Should accurately represent the total number of bytes that can be read from the list. + +-------------------------------------------------------- + +### bl.append(Buffer | Buffer array | BufferList | BufferList array | String) +`append(buffer)` adds an additional buffer or BufferList to the internal list. `this` is returned so it can be chained. + +-------------------------------------------------------- + +### bl.get(index) +`get()` will return the byte at the specified index. + +-------------------------------------------------------- + +### bl.indexOf(value[, byteOffset][, encoding]) +`get()` will return the byte at the specified index. +`indexOf()` method returns the first index at which a given element can be found in the BufferList, or -1 if it is not present. + +-------------------------------------------------------- + +### bl.slice([ start, [ end ] ]) +`slice()` returns a new `Buffer` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively. + +If the requested range spans a single internal buffer then a slice of that buffer will be returned which shares the original memory range of that Buffer. If the range spans multiple buffers then copy operations will likely occur to give you a uniform Buffer. + +-------------------------------------------------------- + +### bl.shallowSlice([ start, [ end ] ]) +`shallowSlice()` returns a new `BufferList` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively. + +No copies will be performed. All buffers in the result share memory with the original list. + +-------------------------------------------------------- + +### bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ]) +`copy()` copies the content of the list in the `dest` buffer, starting from `destStart` and containing the bytes within the range specified with `srcStart` to `srcEnd`. `destStart`, `start` and `end` are optional and will default to the beginning of the `dest` buffer, and the beginning and end of the list respectively. + +-------------------------------------------------------- + +### bl.duplicate() +`duplicate()` performs a **shallow-copy** of the list. The internal Buffers remains the same, so if you change the underlying Buffers, the change will be reflected in both the original and the duplicate. This method is needed if you want to call `consume()` or `pipe()` and still keep the original list.Example: + +```js +var bl = new BufferList() + +bl.append('hello') +bl.append(' world') +bl.append('\n') + +bl.duplicate().pipe(process.stdout, { end: false }) + +console.log(bl.toString()) +``` + +-------------------------------------------------------- + +### bl.consume(bytes) +`consume()` will shift bytes *off the start of the list*. The number of bytes consumed don't need to line up with the sizes of the internal Buffers—initial offsets will be calculated accordingly in order to give you a consistent view of the data. + +-------------------------------------------------------- + +### bl.toString([encoding, [ start, [ end ]]]) +`toString()` will return a string representation of the buffer. The optional `start` and `end` arguments are passed on to `slice()`, while the `encoding` is passed on to `toString()` of the resulting Buffer. See the [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end) documentation for more information. + +-------------------------------------------------------- + +### bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8() + +All of the standard byte-reading methods of the `Buffer` interface are implemented and will operate across internal Buffer boundaries transparently. + +See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work. + +-------------------------------------------------------- + +### Streams +**bl** is a Node **[Duplex Stream](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_duplex)**, so it can be read from and written to like a standard Node stream. You can also `pipe()` to and from a **bl** instance. + +-------------------------------------------------------- + +## Contributors + +**bl** is brought to you by the following hackers: + + * [Rod Vagg](https://github.com/rvagg) + * [Matteo Collina](https://github.com/mcollina) + * [Jarett Cruger](https://github.com/jcrugzz) + +======= + + +## License & copyright + +Copyright (c) 2013-2018 bl contributors (listed above). + +bl is licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details. diff --git a/node_modules/bl/bl.js b/node_modules/bl/bl.js new file mode 100644 index 0000000000000..e0eef85a3b67c --- /dev/null +++ b/node_modules/bl/bl.js @@ -0,0 +1,382 @@ +'use strict' +var DuplexStream = require('readable-stream').Duplex + , util = require('util') + +function BufferList (callback) { + if (!(this instanceof BufferList)) + return new BufferList(callback) + + this._bufs = [] + this.length = 0 + + if (typeof callback == 'function') { + this._callback = callback + + var piper = function piper (err) { + if (this._callback) { + this._callback(err) + this._callback = null + } + }.bind(this) + + this.on('pipe', function onPipe (src) { + src.on('error', piper) + }) + this.on('unpipe', function onUnpipe (src) { + src.removeListener('error', piper) + }) + } else { + this.append(callback) + } + + DuplexStream.call(this) +} + + +util.inherits(BufferList, DuplexStream) + + +BufferList.prototype._offset = function _offset (offset) { + var tot = 0, i = 0, _t + if (offset === 0) return [ 0, 0 ] + for (; i < this._bufs.length; i++) { + _t = tot + this._bufs[i].length + if (offset < _t || i == this._bufs.length - 1) { + return [ i, offset - tot ] + } + tot = _t + } +} + +BufferList.prototype._reverseOffset = function (blOffset) { + var bufferId = blOffset[0] + var offset = blOffset[1] + for (var i = 0; i < bufferId; i++) { + offset += this._bufs[i].length + } + return offset +} + +BufferList.prototype.append = function append (buf) { + var i = 0 + + if (Buffer.isBuffer(buf)) { + this._appendBuffer(buf) + } else if (Array.isArray(buf)) { + for (; i < buf.length; i++) + this.append(buf[i]) + } else if (buf instanceof BufferList) { + // unwrap argument into individual BufferLists + for (; i < buf._bufs.length; i++) + this.append(buf._bufs[i]) + } else if (buf != null) { + // coerce number arguments to strings, since Buffer(number) does + // uninitialized memory allocation + if (typeof buf == 'number') + buf = buf.toString() + + this._appendBuffer(Buffer.from(buf)) + } + + return this +} + + +BufferList.prototype._appendBuffer = function appendBuffer (buf) { + this._bufs.push(buf) + this.length += buf.length +} + + +BufferList.prototype._write = function _write (buf, encoding, callback) { + this._appendBuffer(buf) + + if (typeof callback == 'function') + callback() +} + + +BufferList.prototype._read = function _read (size) { + if (!this.length) + return this.push(null) + + size = Math.min(size, this.length) + this.push(this.slice(0, size)) + this.consume(size) +} + + +BufferList.prototype.end = function end (chunk) { + DuplexStream.prototype.end.call(this, chunk) + + if (this._callback) { + this._callback(null, this.slice()) + this._callback = null + } +} + + +BufferList.prototype.get = function get (index) { + if (index > this.length || index < 0) { + return undefined + } + var offset = this._offset(index) + return this._bufs[offset[0]][offset[1]] +} + + +BufferList.prototype.slice = function slice (start, end) { + if (typeof start == 'number' && start < 0) + start += this.length + if (typeof end == 'number' && end < 0) + end += this.length + return this.copy(null, 0, start, end) +} + + +BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) { + if (typeof srcStart != 'number' || srcStart < 0) + srcStart = 0 + if (typeof srcEnd != 'number' || srcEnd > this.length) + srcEnd = this.length + if (srcStart >= this.length) + return dst || Buffer.alloc(0) + if (srcEnd <= 0) + return dst || Buffer.alloc(0) + + var copy = !!dst + , off = this._offset(srcStart) + , len = srcEnd - srcStart + , bytes = len + , bufoff = (copy && dstStart) || 0 + , start = off[1] + , l + , i + + // copy/slice everything + if (srcStart === 0 && srcEnd == this.length) { + if (!copy) { // slice, but full concat if multiple buffers + return this._bufs.length === 1 + ? this._bufs[0] + : Buffer.concat(this._bufs, this.length) + } + + // copy, need to copy individual buffers + for (i = 0; i < this._bufs.length; i++) { + this._bufs[i].copy(dst, bufoff) + bufoff += this._bufs[i].length + } + + return dst + } + + // easy, cheap case where it's a subset of one of the buffers + if (bytes <= this._bufs[off[0]].length - start) { + return copy + ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes) + : this._bufs[off[0]].slice(start, start + bytes) + } + + if (!copy) // a slice, we need something to copy in to + dst = Buffer.allocUnsafe(len) + + for (i = off[0]; i < this._bufs.length; i++) { + l = this._bufs[i].length - start + + if (bytes > l) { + this._bufs[i].copy(dst, bufoff, start) + } else { + this._bufs[i].copy(dst, bufoff, start, start + bytes) + break + } + + bufoff += l + bytes -= l + + if (start) + start = 0 + } + + return dst +} + +BufferList.prototype.shallowSlice = function shallowSlice (start, end) { + start = start || 0 + end = typeof end !== 'number' ? this.length : end + + if (start < 0) + start += this.length + if (end < 0) + end += this.length + + if (start === end) { + return new BufferList() + } + var startOffset = this._offset(start) + , endOffset = this._offset(end) + , buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1) + + if (endOffset[1] == 0) + buffers.pop() + else + buffers[buffers.length-1] = buffers[buffers.length-1].slice(0, endOffset[1]) + + if (startOffset[1] != 0) + buffers[0] = buffers[0].slice(startOffset[1]) + + return new BufferList(buffers) +} + +BufferList.prototype.toString = function toString (encoding, start, end) { + return this.slice(start, end).toString(encoding) +} + +BufferList.prototype.consume = function consume (bytes) { + while (this._bufs.length) { + if (bytes >= this._bufs[0].length) { + bytes -= this._bufs[0].length + this.length -= this._bufs[0].length + this._bufs.shift() + } else { + this._bufs[0] = this._bufs[0].slice(bytes) + this.length -= bytes + break + } + } + return this +} + + +BufferList.prototype.duplicate = function duplicate () { + var i = 0 + , copy = new BufferList() + + for (; i < this._bufs.length; i++) + copy.append(this._bufs[i]) + + return copy +} + + +BufferList.prototype._destroy = function _destroy (err, cb) { + this._bufs.length = 0 + this.length = 0 + cb(err) +} + + +BufferList.prototype.indexOf = function (search, offset, encoding) { + if (encoding === undefined && typeof offset === 'string') { + encoding = offset + offset = undefined + } + if (typeof search === 'function' || Array.isArray(search)) { + throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.') + } else if (typeof search === 'number') { + search = Buffer.from([search]) + } else if (typeof search === 'string') { + search = Buffer.from(search, encoding) + } else if (search instanceof BufferList) { + search = search.slice() + } else if (!Buffer.isBuffer(search)) { + search = Buffer.from(search) + } + + offset = Number(offset || 0) + if (isNaN(offset)) { + offset = 0 + } + + if (offset < 0) { + offset = this.length + offset + } + + if (offset < 0) { + offset = 0 + } + + if (search.length === 0) { + return offset > this.length ? this.length : offset + } + + var blOffset = this._offset(offset) + var blIndex = blOffset[0] // index of which internal buffer we're working on + var buffOffset = blOffset[1] // offset of the internal buffer we're working on + + // scan over each buffer + for (blIndex; blIndex < this._bufs.length; blIndex++) { + var buff = this._bufs[blIndex] + while(buffOffset < buff.length) { + var availableWindow = buff.length - buffOffset + if (availableWindow >= search.length) { + var nativeSearchResult = buff.indexOf(search, buffOffset) + if (nativeSearchResult !== -1) { + return this._reverseOffset([blIndex, nativeSearchResult]) + } + buffOffset = buff.length - search.length + 1 // end of native search window + } else { + var revOffset = this._reverseOffset([blIndex, buffOffset]) + if (this._match(revOffset, search)) { + return revOffset + } + buffOffset++ + } + } + buffOffset = 0 + } + return -1 +} + +BufferList.prototype._match = function(offset, search) { + if (this.length - offset < search.length) { + return false + } + for (var searchOffset = 0; searchOffset < search.length ; searchOffset++) { + if(this.get(offset + searchOffset) !== search[searchOffset]){ + return false + } + } + return true +} + + +;(function () { + var methods = { + 'readDoubleBE' : 8 + , 'readDoubleLE' : 8 + , 'readFloatBE' : 4 + , 'readFloatLE' : 4 + , 'readInt32BE' : 4 + , 'readInt32LE' : 4 + , 'readUInt32BE' : 4 + , 'readUInt32LE' : 4 + , 'readInt16BE' : 2 + , 'readInt16LE' : 2 + , 'readUInt16BE' : 2 + , 'readUInt16LE' : 2 + , 'readInt8' : 1 + , 'readUInt8' : 1 + , 'readIntBE' : null + , 'readIntLE' : null + , 'readUIntBE' : null + , 'readUIntLE' : null + } + + for (var m in methods) { + (function (m) { + if (methods[m] === null) { + BufferList.prototype[m] = function (offset, byteLength) { + return this.slice(offset, offset + byteLength)[m](0, byteLength) + } + } + else { + BufferList.prototype[m] = function (offset) { + return this.slice(offset, offset + methods[m])[m](0) + } + } + }(m)) + } +}()) + + +module.exports = BufferList diff --git a/node_modules/bl/package.json b/node_modules/bl/package.json new file mode 100644 index 0000000000000..85611a6cb5c47 --- /dev/null +++ b/node_modules/bl/package.json @@ -0,0 +1,62 @@ +{ + "_from": "bl@^3.0.0", + "_id": "bl@3.0.0", + "_inBundle": false, + "_integrity": "sha512-EUAyP5UHU5hxF8BPT0LKW8gjYLhq1DQIcneOX/pL/m2Alo+OYDQAJlHq+yseMP50Os2nHXOSic6Ss3vSQeyf4A==", + "_location": "/bl", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "bl@^3.0.0", + "name": "bl", + "escapedName": "bl", + "rawSpec": "^3.0.0", + "saveSpec": null, + "fetchSpec": "^3.0.0" + }, + "_requiredBy": [ + "/tar-stream" + ], + "_resolved": "https://registry.npmjs.org/bl/-/bl-3.0.0.tgz", + "_shasum": "3611ec00579fd18561754360b21e9f784500ff88", + "_spec": "bl@^3.0.0", + "_where": "/Users/aeschright/code/cli/node_modules/tar-stream", + "authors": [ + "Rod Vagg (https://github.com/rvagg)", + "Matteo Collina (https://github.com/mcollina)", + "Jarett Cruger (https://github.com/jcrugzz)" + ], + "bugs": { + "url": "https://github.com/rvagg/bl/issues" + }, + "bundleDependencies": false, + "dependencies": { + "readable-stream": "^3.0.1" + }, + "deprecated": false, + "description": "Buffer List: collect buffers and access with a standard readable Buffer interface, streamable too!", + "devDependencies": { + "faucet": "0.0.1", + "hash_file": "~0.1.1", + "tape": "~4.9.1" + }, + "homepage": "https://github.com/rvagg/bl", + "keywords": [ + "buffer", + "buffers", + "stream", + "awesomesauce" + ], + "license": "MIT", + "main": "bl.js", + "name": "bl", + "repository": { + "type": "git", + "url": "git+https://github.com/rvagg/bl.git" + }, + "scripts": { + "test": "node test/test.js | faucet" + }, + "version": "3.0.0" +} diff --git a/node_modules/bl/test/indexOf.js b/node_modules/bl/test/indexOf.js new file mode 100644 index 0000000000000..68435e1ae415b --- /dev/null +++ b/node_modules/bl/test/indexOf.js @@ -0,0 +1,463 @@ +'use strict' + +var tape = require('tape') + , BufferList = require('../') + , Buffer = require('safe-buffer').Buffer + +tape('indexOf single byte needle', t => { + const bl = new BufferList(['abcdefg', 'abcdefg', '12345']) + t.equal(bl.indexOf('e'), 4) + t.equal(bl.indexOf('e', 5), 11) + t.equal(bl.indexOf('e', 12), -1) + t.equal(bl.indexOf('5'), 18) + t.end() +}) + +tape('indexOf multiple byte needle', t => { + const bl = new BufferList(['abcdefg', 'abcdefg']) + t.equal(bl.indexOf('ef'), 4) + t.equal(bl.indexOf('ef', 5), 11) + t.end() +}) + +tape('indexOf multiple byte needles across buffer boundaries', t => { + const bl = new BufferList(['abcdefg', 'abcdefg']) + t.equal(bl.indexOf('fgabc'), 5) + t.end() +}) + +tape('indexOf takes a buffer list search', t => { + const bl = new BufferList(['abcdefg', 'abcdefg']) + const search = new BufferList('fgabc') + t.equal(bl.indexOf(search), 5) + t.end() +}) + +tape('indexOf a zero byte needle', t => { + const b = new BufferList('abcdef') + const buf_empty = Buffer.from('') + t.equal(b.indexOf(''), 0) + t.equal(b.indexOf('', 1), 1) + t.equal(b.indexOf('', b.length + 1), b.length) + t.equal(b.indexOf('', Infinity), b.length) + t.equal(b.indexOf(buf_empty), 0) + t.equal(b.indexOf(buf_empty, 1), 1) + t.equal(b.indexOf(buf_empty, b.length + 1), b.length) + t.equal(b.indexOf(buf_empty, Infinity), b.length) + t.end() +}) + +tape('indexOf buffers smaller and larger than the needle', t => { + const bl = new BufferList(['abcdefg', 'a', 'bcdefg', 'a', 'bcfgab']) + t.equal(bl.indexOf('fgabc'), 5) + t.equal(bl.indexOf('fgabc', 6), 12) + t.equal(bl.indexOf('fgabc', 13), -1) + t.end() +}) + +// only present in node 6+ +;(process.version.substr(1).split('.')[0] >= 6) && tape('indexOf latin1 and binary encoding', t => { + const b = new BufferList('abcdef') + + // test latin1 encoding + t.equal( + new BufferList(Buffer.from(b.toString('latin1'), 'latin1')) + .indexOf('d', 0, 'latin1'), + 3 + ) + t.equal( + new BufferList(Buffer.from(b.toString('latin1'), 'latin1')) + .indexOf(Buffer.from('d', 'latin1'), 0, 'latin1'), + 3 + ) + t.equal( + new BufferList(Buffer.from('aa\u00e8aa', 'latin1')) + .indexOf('\u00e8', 'latin1'), + 2 + ) + t.equal( + new BufferList(Buffer.from('\u00e8', 'latin1')) + .indexOf('\u00e8', 'latin1'), + 0 + ) + t.equal( + new BufferList(Buffer.from('\u00e8', 'latin1')) + .indexOf(Buffer.from('\u00e8', 'latin1'), 'latin1'), + 0 + ) + + // test binary encoding + t.equal( + new BufferList(Buffer.from(b.toString('binary'), 'binary')) + .indexOf('d', 0, 'binary'), + 3 + ) + t.equal( + new BufferList(Buffer.from(b.toString('binary'), 'binary')) + .indexOf(Buffer.from('d', 'binary'), 0, 'binary'), + 3 + ) + t.equal( + new BufferList(Buffer.from('aa\u00e8aa', 'binary')) + .indexOf('\u00e8', 'binary'), + 2 + ) + t.equal( + new BufferList(Buffer.from('\u00e8', 'binary')) + .indexOf('\u00e8', 'binary'), + 0 + ) + t.equal( + new BufferList(Buffer.from('\u00e8', 'binary')) + .indexOf(Buffer.from('\u00e8', 'binary'), 'binary'), + 0 + ) + t.end() +}) + +tape('indexOf the entire nodejs10 buffer test suite', t => { + const b = new BufferList('abcdef') + const buf_a = Buffer.from('a') + const buf_bc = Buffer.from('bc') + const buf_f = Buffer.from('f') + const buf_z = Buffer.from('z') + + const stringComparison = 'abcdef' + + t.equal(b.indexOf('a'), 0) + t.equal(b.indexOf('a', 1), -1) + t.equal(b.indexOf('a', -1), -1) + t.equal(b.indexOf('a', -4), -1) + t.equal(b.indexOf('a', -b.length), 0) + t.equal(b.indexOf('a', NaN), 0) + t.equal(b.indexOf('a', -Infinity), 0) + t.equal(b.indexOf('a', Infinity), -1) + t.equal(b.indexOf('bc'), 1) + t.equal(b.indexOf('bc', 2), -1) + t.equal(b.indexOf('bc', -1), -1) + t.equal(b.indexOf('bc', -3), -1) + t.equal(b.indexOf('bc', -5), 1) + t.equal(b.indexOf('bc', NaN), 1) + t.equal(b.indexOf('bc', -Infinity), 1) + t.equal(b.indexOf('bc', Infinity), -1) + t.equal(b.indexOf('f'), b.length - 1) + t.equal(b.indexOf('z'), -1) + // empty search tests + t.equal(b.indexOf(buf_a), 0) + t.equal(b.indexOf(buf_a, 1), -1) + t.equal(b.indexOf(buf_a, -1), -1) + t.equal(b.indexOf(buf_a, -4), -1) + t.equal(b.indexOf(buf_a, -b.length), 0) + t.equal(b.indexOf(buf_a, NaN), 0) + t.equal(b.indexOf(buf_a, -Infinity), 0) + t.equal(b.indexOf(buf_a, Infinity), -1) + t.equal(b.indexOf(buf_bc), 1) + t.equal(b.indexOf(buf_bc, 2), -1) + t.equal(b.indexOf(buf_bc, -1), -1) + t.equal(b.indexOf(buf_bc, -3), -1) + t.equal(b.indexOf(buf_bc, -5), 1) + t.equal(b.indexOf(buf_bc, NaN), 1) + t.equal(b.indexOf(buf_bc, -Infinity), 1) + t.equal(b.indexOf(buf_bc, Infinity), -1) + t.equal(b.indexOf(buf_f), b.length - 1) + t.equal(b.indexOf(buf_z), -1) + t.equal(b.indexOf(0x61), 0) + t.equal(b.indexOf(0x61, 1), -1) + t.equal(b.indexOf(0x61, -1), -1) + t.equal(b.indexOf(0x61, -4), -1) + t.equal(b.indexOf(0x61, -b.length), 0) + t.equal(b.indexOf(0x61, NaN), 0) + t.equal(b.indexOf(0x61, -Infinity), 0) + t.equal(b.indexOf(0x61, Infinity), -1) + t.equal(b.indexOf(0x0), -1) + + // test offsets + t.equal(b.indexOf('d', 2), 3) + t.equal(b.indexOf('f', 5), 5) + t.equal(b.indexOf('f', -1), 5) + t.equal(b.indexOf('f', 6), -1) + + t.equal(b.indexOf(Buffer.from('d'), 2), 3) + t.equal(b.indexOf(Buffer.from('f'), 5), 5) + t.equal(b.indexOf(Buffer.from('f'), -1), 5) + t.equal(b.indexOf(Buffer.from('f'), 6), -1) + + t.equal(Buffer.from('ff').indexOf(Buffer.from('f'), 1, 'ucs2'), -1) + + // test invalid and uppercase encoding + t.equal(b.indexOf('b', 'utf8'), 1) + t.equal(b.indexOf('b', 'UTF8'), 1) + t.equal(b.indexOf('62', 'HEX'), 1) + t.throws(() => b.indexOf('bad', 'enc'), TypeError) + + // test hex encoding + t.equal( + Buffer.from(b.toString('hex'), 'hex') + .indexOf('64', 0, 'hex'), + 3 + ) + t.equal( + Buffer.from(b.toString('hex'), 'hex') + .indexOf(Buffer.from('64', 'hex'), 0, 'hex'), + 3 + ) + + // test base64 encoding + t.equal( + Buffer.from(b.toString('base64'), 'base64') + .indexOf('ZA==', 0, 'base64'), + 3 + ) + t.equal( + Buffer.from(b.toString('base64'), 'base64') + .indexOf(Buffer.from('ZA==', 'base64'), 0, 'base64'), + 3 + ) + + // test ascii encoding + t.equal( + Buffer.from(b.toString('ascii'), 'ascii') + .indexOf('d', 0, 'ascii'), + 3 + ) + t.equal( + Buffer.from(b.toString('ascii'), 'ascii') + .indexOf(Buffer.from('d', 'ascii'), 0, 'ascii'), + 3 + ) + + // test optional offset with passed encoding + t.equal(Buffer.from('aaaa0').indexOf('30', 'hex'), 4) + t.equal(Buffer.from('aaaa00a').indexOf('3030', 'hex'), 4) + + { + // test usc2 encoding + const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2') + + t.equal(8, twoByteString.indexOf('\u0395', 4, 'ucs2')) + t.equal(6, twoByteString.indexOf('\u03a3', -4, 'ucs2')) + t.equal(4, twoByteString.indexOf('\u03a3', -6, 'ucs2')) + t.equal(4, twoByteString.indexOf( + Buffer.from('\u03a3', 'ucs2'), -6, 'ucs2')) + t.equal(-1, twoByteString.indexOf('\u03a3', -2, 'ucs2')) + } + + const mixedByteStringUcs2 = + Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395', 'ucs2') + t.equal(6, mixedByteStringUcs2.indexOf('bc', 0, 'ucs2')) + t.equal(10, mixedByteStringUcs2.indexOf('\u03a3', 0, 'ucs2')) + t.equal(-1, mixedByteStringUcs2.indexOf('\u0396', 0, 'ucs2')) + + t.equal( + 6, mixedByteStringUcs2.indexOf(Buffer.from('bc', 'ucs2'), 0, 'ucs2')) + t.equal( + 10, mixedByteStringUcs2.indexOf(Buffer.from('\u03a3', 'ucs2'), 0, 'ucs2')) + t.equal( + -1, mixedByteStringUcs2.indexOf(Buffer.from('\u0396', 'ucs2'), 0, 'ucs2')) + + { + const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2') + + // Test single char pattern + t.equal(0, twoByteString.indexOf('\u039a', 0, 'ucs2')) + let index = twoByteString.indexOf('\u0391', 0, 'ucs2') + t.equal(2, index, `Alpha - at index ${index}`) + index = twoByteString.indexOf('\u03a3', 0, 'ucs2') + t.equal(4, index, `First Sigma - at index ${index}`) + index = twoByteString.indexOf('\u03a3', 6, 'ucs2') + t.equal(6, index, `Second Sigma - at index ${index}`) + index = twoByteString.indexOf('\u0395', 0, 'ucs2') + t.equal(8, index, `Epsilon - at index ${index}`) + index = twoByteString.indexOf('\u0392', 0, 'ucs2') + t.equal(-1, index, `Not beta - at index ${index}`) + + // Test multi-char pattern + index = twoByteString.indexOf('\u039a\u0391', 0, 'ucs2') + t.equal(0, index, `Lambda Alpha - at index ${index}`) + index = twoByteString.indexOf('\u0391\u03a3', 0, 'ucs2') + t.equal(2, index, `Alpha Sigma - at index ${index}`) + index = twoByteString.indexOf('\u03a3\u03a3', 0, 'ucs2') + t.equal(4, index, `Sigma Sigma - at index ${index}`) + index = twoByteString.indexOf('\u03a3\u0395', 0, 'ucs2') + t.equal(6, index, `Sigma Epsilon - at index ${index}`) + } + + const mixedByteStringUtf8 = Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395') + t.equal(5, mixedByteStringUtf8.indexOf('bc')) + t.equal(5, mixedByteStringUtf8.indexOf('bc', 5)) + t.equal(5, mixedByteStringUtf8.indexOf('bc', -8)) + t.equal(7, mixedByteStringUtf8.indexOf('\u03a3')) + t.equal(-1, mixedByteStringUtf8.indexOf('\u0396')) + + + // Test complex string indexOf algorithms. Only trigger for long strings. + // Long string that isn't a simple repeat of a shorter string. + let longString = 'A' + for (let i = 66; i < 76; i++) { // from 'B' to 'K' + longString = longString + String.fromCharCode(i) + longString + } + + const longBufferString = Buffer.from(longString) + + // pattern of 15 chars, repeated every 16 chars in long + let pattern = 'ABACABADABACABA' + for (let i = 0; i < longBufferString.length - pattern.length; i += 7) { + const index = longBufferString.indexOf(pattern, i) + t.equal((i + 15) & ~0xf, index, + `Long ABACABA...-string at index ${i}`) + } + + let index = longBufferString.indexOf('AJABACA') + t.equal(510, index, `Long AJABACA, First J - at index ${index}`) + index = longBufferString.indexOf('AJABACA', 511) + t.equal(1534, index, `Long AJABACA, Second J - at index ${index}`) + + pattern = 'JABACABADABACABA' + index = longBufferString.indexOf(pattern) + t.equal(511, index, `Long JABACABA..., First J - at index ${index}`) + index = longBufferString.indexOf(pattern, 512) + t.equal( + 1535, index, `Long JABACABA..., Second J - at index ${index}`) + + // Search for a non-ASCII string in a pure ASCII string. + const asciiString = Buffer.from( + 'arglebargleglopglyfarglebargleglopglyfarglebargleglopglyf') + t.equal(-1, asciiString.indexOf('\x2061')) + t.equal(3, asciiString.indexOf('leb', 0)) + + // Search in string containing many non-ASCII chars. + const allCodePoints = [] + for (let i = 0; i < 65536; i++) allCodePoints[i] = i + const allCharsString = String.fromCharCode.apply(String, allCodePoints) + const allCharsBufferUtf8 = Buffer.from(allCharsString) + const allCharsBufferUcs2 = Buffer.from(allCharsString, 'ucs2') + + // Search for string long enough to trigger complex search with ASCII pattern + // and UC16 subject. + t.equal(-1, allCharsBufferUtf8.indexOf('notfound')) + t.equal(-1, allCharsBufferUcs2.indexOf('notfound')) + + // Needle is longer than haystack, but only because it's encoded as UTF-16 + t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'ucs2'), -1) + + t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'utf8'), 0) + t.equal(Buffer.from('aaaa').indexOf('你好', 'ucs2'), -1) + + // Haystack has odd length, but the needle is UCS2. + t.equal(Buffer.from('aaaaa').indexOf('b', 'ucs2'), -1) + + { + // Find substrings in Utf8. + const lengths = [1, 3, 15]; // Single char, simple and complex. + const indices = [0x5, 0x60, 0x400, 0x680, 0x7ee, 0xFF02, 0x16610, 0x2f77b] + for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) { + for (let i = 0; i < indices.length; i++) { + const index = indices[i] + let length = lengths[lengthIndex] + + if (index + length > 0x7F) { + length = 2 * length + } + + if (index + length > 0x7FF) { + length = 3 * length + } + + if (index + length > 0xFFFF) { + length = 4 * length + } + + const patternBufferUtf8 = allCharsBufferUtf8.slice(index, index + length) + t.equal(index, allCharsBufferUtf8.indexOf(patternBufferUtf8)) + + const patternStringUtf8 = patternBufferUtf8.toString() + t.equal(index, allCharsBufferUtf8.indexOf(patternStringUtf8)) + } + } + } + + { + // Find substrings in Usc2. + const lengths = [2, 4, 16]; // Single char, simple and complex. + const indices = [0x5, 0x65, 0x105, 0x205, 0x285, 0x2005, 0x2085, 0xfff0] + for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) { + for (let i = 0; i < indices.length; i++) { + const index = indices[i] * 2 + const length = lengths[lengthIndex] + + const patternBufferUcs2 = + allCharsBufferUcs2.slice(index, index + length) + t.equal( + index, allCharsBufferUcs2.indexOf(patternBufferUcs2, 0, 'ucs2')) + + const patternStringUcs2 = patternBufferUcs2.toString('ucs2') + t.equal( + index, allCharsBufferUcs2.indexOf(patternStringUcs2, 0, 'ucs2')) + } + } + } + + [ + () => {}, + {}, + [] + ].forEach(val => { + debugger + t.throws(() => b.indexOf(val), TypeError, `"${JSON.stringify(val)}" should throw`) + }) + + // Test weird offset arguments. + // The following offsets coerce to NaN or 0, searching the whole Buffer + t.equal(b.indexOf('b', undefined), 1) + t.equal(b.indexOf('b', {}), 1) + t.equal(b.indexOf('b', 0), 1) + t.equal(b.indexOf('b', null), 1) + t.equal(b.indexOf('b', []), 1) + + // The following offset coerces to 2, in other words +[2] === 2 + t.equal(b.indexOf('b', [2]), -1) + + // Behavior should match String.indexOf() + t.equal( + b.indexOf('b', undefined), + stringComparison.indexOf('b', undefined)) + t.equal( + b.indexOf('b', {}), + stringComparison.indexOf('b', {})) + t.equal( + b.indexOf('b', 0), + stringComparison.indexOf('b', 0)) + t.equal( + b.indexOf('b', null), + stringComparison.indexOf('b', null)) + t.equal( + b.indexOf('b', []), + stringComparison.indexOf('b', [])) + t.equal( + b.indexOf('b', [2]), + stringComparison.indexOf('b', [2])) + + // test truncation of Number arguments to uint8 + { + const buf = Buffer.from('this is a test') + t.equal(buf.indexOf(0x6973), 3) + t.equal(buf.indexOf(0x697320), 4) + t.equal(buf.indexOf(0x69732069), 2) + t.equal(buf.indexOf(0x697374657374), 0) + t.equal(buf.indexOf(0x69737374), 0) + t.equal(buf.indexOf(0x69737465), 11) + t.equal(buf.indexOf(0x69737465), 11) + t.equal(buf.indexOf(-140), 0) + t.equal(buf.indexOf(-152), 1) + t.equal(buf.indexOf(0xff), -1) + t.equal(buf.indexOf(0xffff), -1) + } + + // Test that Uint8Array arguments are okay. + { + const needle = new Uint8Array([ 0x66, 0x6f, 0x6f ]) + const haystack = new BufferList(Buffer.from('a foo b foo')) + t.equal(haystack.indexOf(needle), 2) + } + t.end() +}) diff --git a/node_modules/bl/test/test.js b/node_modules/bl/test/test.js new file mode 100644 index 0000000000000..1da0293b6d146 --- /dev/null +++ b/node_modules/bl/test/test.js @@ -0,0 +1,780 @@ +'use strict' + +var tape = require('tape') + , crypto = require('crypto') + , fs = require('fs') + , hash = require('hash_file') + , BufferList = require('../') + , Buffer = require('safe-buffer').Buffer + + , encodings = + ('hex utf8 utf-8 ascii binary base64' + + (process.browser ? '' : ' ucs2 ucs-2 utf16le utf-16le')).split(' ') + +// run the indexOf tests +require('./indexOf') + +tape('single bytes from single buffer', function (t) { + var bl = new BufferList() + bl.append(Buffer.from('abcd')) + + t.equal(bl.length, 4) + t.equal(bl.get(-1), undefined) + t.equal(bl.get(0), 97) + t.equal(bl.get(1), 98) + t.equal(bl.get(2), 99) + t.equal(bl.get(3), 100) + t.equal(bl.get(4), undefined) + + t.end() +}) + +tape('single bytes from multiple buffers', function (t) { + var bl = new BufferList() + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.length, 10) + + t.equal(bl.get(0), 97) + t.equal(bl.get(1), 98) + t.equal(bl.get(2), 99) + t.equal(bl.get(3), 100) + t.equal(bl.get(4), 101) + t.equal(bl.get(5), 102) + t.equal(bl.get(6), 103) + t.equal(bl.get(7), 104) + t.equal(bl.get(8), 105) + t.equal(bl.get(9), 106) + t.end() +}) + +tape('multi bytes from single buffer', function (t) { + var bl = new BufferList() + bl.append(Buffer.from('abcd')) + + t.equal(bl.length, 4) + + t.equal(bl.slice(0, 4).toString('ascii'), 'abcd') + t.equal(bl.slice(0, 3).toString('ascii'), 'abc') + t.equal(bl.slice(1, 4).toString('ascii'), 'bcd') + t.equal(bl.slice(-4, -1).toString('ascii'), 'abc') + + t.end() +}) + +tape('multi bytes from single buffer (negative indexes)', function (t) { + var bl = new BufferList() + bl.append(Buffer.from('buffer')) + + t.equal(bl.length, 6) + + t.equal(bl.slice(-6, -1).toString('ascii'), 'buffe') + t.equal(bl.slice(-6, -2).toString('ascii'), 'buff') + t.equal(bl.slice(-5, -2).toString('ascii'), 'uff') + + t.end() +}) + +tape('multiple bytes from multiple buffers', function (t) { + var bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') + t.equal(bl.slice(3, 6).toString('ascii'), 'def') + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') + t.equal(bl.slice(-7, -4).toString('ascii'), 'def') + + t.end() +}) + +tape('multiple bytes from multiple buffer lists', function (t) { + var bl = new BufferList() + + bl.append(new BufferList([ Buffer.from('abcd'), Buffer.from('efg') ])) + bl.append(new BufferList([ Buffer.from('hi'), Buffer.from('j') ])) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') + t.equal(bl.slice(3, 6).toString('ascii'), 'def') + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') + + t.end() +}) + +// same data as previous test, just using nested constructors +tape('multiple bytes from crazy nested buffer lists', function (t) { + var bl = new BufferList() + + bl.append(new BufferList([ + new BufferList([ + new BufferList(Buffer.from('abc')) + , Buffer.from('d') + , new BufferList(Buffer.from('efg')) + ]) + , new BufferList([ Buffer.from('hi') ]) + , new BufferList(Buffer.from('j')) + ])) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') + t.equal(bl.slice(3, 6).toString('ascii'), 'def') + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') + + t.end() +}) + +tape('append accepts arrays of Buffers', function (t) { + var bl = new BufferList() + bl.append(Buffer.from('abc')) + bl.append([ Buffer.from('def') ]) + bl.append([ Buffer.from('ghi'), Buffer.from('jkl') ]) + bl.append([ Buffer.from('mnop'), Buffer.from('qrstu'), Buffer.from('vwxyz') ]) + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + t.end() +}) + +tape('append accepts arrays of BufferLists', function (t) { + var bl = new BufferList() + bl.append(Buffer.from('abc')) + bl.append([ new BufferList('def') ]) + bl.append(new BufferList([ Buffer.from('ghi'), new BufferList('jkl') ])) + bl.append([ Buffer.from('mnop'), new BufferList([ Buffer.from('qrstu'), Buffer.from('vwxyz') ]) ]) + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + t.end() +}) + +tape('append chainable', function (t) { + var bl = new BufferList() + t.ok(bl.append(Buffer.from('abcd')) === bl) + t.ok(bl.append([ Buffer.from('abcd') ]) === bl) + t.ok(bl.append(new BufferList(Buffer.from('abcd'))) === bl) + t.ok(bl.append([ new BufferList(Buffer.from('abcd')) ]) === bl) + t.end() +}) + +tape('append chainable (test results)', function (t) { + var bl = new BufferList('abc') + .append([ new BufferList('def') ]) + .append(new BufferList([ Buffer.from('ghi'), new BufferList('jkl') ])) + .append([ Buffer.from('mnop'), new BufferList([ Buffer.from('qrstu'), Buffer.from('vwxyz') ]) ]) + + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + t.end() +}) + +tape('consuming from multiple buffers', function (t) { + var bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + + bl.consume(3) + t.equal(bl.length, 7) + t.equal(bl.slice(0, 7).toString('ascii'), 'defghij') + + bl.consume(2) + t.equal(bl.length, 5) + t.equal(bl.slice(0, 5).toString('ascii'), 'fghij') + + bl.consume(1) + t.equal(bl.length, 4) + t.equal(bl.slice(0, 4).toString('ascii'), 'ghij') + + bl.consume(1) + t.equal(bl.length, 3) + t.equal(bl.slice(0, 3).toString('ascii'), 'hij') + + bl.consume(2) + t.equal(bl.length, 1) + t.equal(bl.slice(0, 1).toString('ascii'), 'j') + + t.end() +}) + +tape('complete consumption', function (t) { + var bl = new BufferList() + + bl.append(Buffer.from('a')) + bl.append(Buffer.from('b')) + + bl.consume(2) + + t.equal(bl.length, 0) + t.equal(bl._bufs.length, 0) + + t.end() +}) + +tape('test readUInt8 / readInt8', function (t) { + var buf1 = Buffer.alloc(1) + , buf2 = Buffer.alloc(3) + , buf3 = Buffer.alloc(3) + , bl = new BufferList() + + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUInt8(2), 0x3) + t.equal(bl.readInt8(2), 0x3) + t.equal(bl.readUInt8(3), 0x4) + t.equal(bl.readInt8(3), 0x4) + t.equal(bl.readUInt8(4), 0x23) + t.equal(bl.readInt8(4), 0x23) + t.equal(bl.readUInt8(5), 0x42) + t.equal(bl.readInt8(5), 0x42) + t.end() +}) + +tape('test readUInt16LE / readUInt16BE / readInt16LE / readInt16BE', function (t) { + var buf1 = Buffer.alloc(1) + , buf2 = Buffer.alloc(3) + , buf3 = Buffer.alloc(3) + , bl = new BufferList() + + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUInt16BE(2), 0x0304) + t.equal(bl.readUInt16LE(2), 0x0403) + t.equal(bl.readInt16BE(2), 0x0304) + t.equal(bl.readInt16LE(2), 0x0403) + t.equal(bl.readUInt16BE(3), 0x0423) + t.equal(bl.readUInt16LE(3), 0x2304) + t.equal(bl.readInt16BE(3), 0x0423) + t.equal(bl.readInt16LE(3), 0x2304) + t.equal(bl.readUInt16BE(4), 0x2342) + t.equal(bl.readUInt16LE(4), 0x4223) + t.equal(bl.readInt16BE(4), 0x2342) + t.equal(bl.readInt16LE(4), 0x4223) + t.end() +}) + +tape('test readUInt32LE / readUInt32BE / readInt32LE / readInt32BE', function (t) { + var buf1 = Buffer.alloc(1) + , buf2 = Buffer.alloc(3) + , buf3 = Buffer.alloc(3) + , bl = new BufferList() + + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUInt32BE(2), 0x03042342) + t.equal(bl.readUInt32LE(2), 0x42230403) + t.equal(bl.readInt32BE(2), 0x03042342) + t.equal(bl.readInt32LE(2), 0x42230403) + t.end() +}) + +tape('test readUIntLE / readUIntBE / readIntLE / readIntBE', function (t) { + var buf1 = Buffer.alloc(1) + , buf2 = Buffer.alloc(3) + , buf3 = Buffer.alloc(3) + , bl = new BufferList() + + buf2[0] = 0x2 + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + buf3[2] = 0x61 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUIntBE(1, 1), 0x02) + t.equal(bl.readUIntBE(1, 2), 0x0203) + t.equal(bl.readUIntBE(1, 3), 0x020304) + t.equal(bl.readUIntBE(1, 4), 0x02030423) + t.equal(bl.readUIntBE(1, 5), 0x0203042342) + t.equal(bl.readUIntBE(1, 6), 0x020304234261) + t.equal(bl.readUIntLE(1, 1), 0x02) + t.equal(bl.readUIntLE(1, 2), 0x0302) + t.equal(bl.readUIntLE(1, 3), 0x040302) + t.equal(bl.readUIntLE(1, 4), 0x23040302) + t.equal(bl.readUIntLE(1, 5), 0x4223040302) + t.equal(bl.readUIntLE(1, 6), 0x614223040302) + t.equal(bl.readIntBE(1, 1), 0x02) + t.equal(bl.readIntBE(1, 2), 0x0203) + t.equal(bl.readIntBE(1, 3), 0x020304) + t.equal(bl.readIntBE(1, 4), 0x02030423) + t.equal(bl.readIntBE(1, 5), 0x0203042342) + t.equal(bl.readIntBE(1, 6), 0x020304234261) + t.equal(bl.readIntLE(1, 1), 0x02) + t.equal(bl.readIntLE(1, 2), 0x0302) + t.equal(bl.readIntLE(1, 3), 0x040302) + t.equal(bl.readIntLE(1, 4), 0x23040302) + t.equal(bl.readIntLE(1, 5), 0x4223040302) + t.equal(bl.readIntLE(1, 6), 0x614223040302) + t.end() +}) + +tape('test readFloatLE / readFloatBE', function (t) { + var buf1 = Buffer.alloc(1) + , buf2 = Buffer.alloc(3) + , buf3 = Buffer.alloc(3) + , bl = new BufferList() + + buf2[1] = 0x00 + buf2[2] = 0x00 + buf3[0] = 0x80 + buf3[1] = 0x3f + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readFloatLE(2), 0x01) + t.end() +}) + +tape('test readDoubleLE / readDoubleBE', function (t) { + var buf1 = Buffer.alloc(1) + , buf2 = Buffer.alloc(3) + , buf3 = Buffer.alloc(10) + , bl = new BufferList() + + buf2[1] = 0x55 + buf2[2] = 0x55 + buf3[0] = 0x55 + buf3[1] = 0x55 + buf3[2] = 0x55 + buf3[3] = 0x55 + buf3[4] = 0xd5 + buf3[5] = 0x3f + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readDoubleLE(2), 0.3333333333333333) + t.end() +}) + +tape('test toString', function (t) { + var bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.toString('ascii', 0, 10), 'abcdefghij') + t.equal(bl.toString('ascii', 3, 10), 'defghij') + t.equal(bl.toString('ascii', 3, 6), 'def') + t.equal(bl.toString('ascii', 3, 8), 'defgh') + t.equal(bl.toString('ascii', 5, 10), 'fghij') + + t.end() +}) + +tape('test toString encoding', function (t) { + var bl = new BufferList() + , b = Buffer.from('abcdefghij\xff\x00') + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + bl.append(Buffer.from('\xff\x00')) + + encodings.forEach(function (enc) { + t.equal(bl.toString(enc), b.toString(enc), enc) + }) + + t.end() +}) + +!process.browser && tape('test stream', function (t) { + var random = crypto.randomBytes(65534) + , rndhash = hash(random, 'md5') + , md5sum = crypto.createHash('md5') + , bl = new BufferList(function (err, buf) { + t.ok(Buffer.isBuffer(buf)) + t.ok(err === null) + t.equal(rndhash, hash(bl.slice(), 'md5')) + t.equal(rndhash, hash(buf, 'md5')) + + bl.pipe(fs.createWriteStream('/tmp/bl_test_rnd_out.dat')) + .on('close', function () { + var s = fs.createReadStream('/tmp/bl_test_rnd_out.dat') + s.on('data', md5sum.update.bind(md5sum)) + s.on('end', function() { + t.equal(rndhash, md5sum.digest('hex'), 'woohoo! correct hash!') + t.end() + }) + }) + + }) + + fs.writeFileSync('/tmp/bl_test_rnd.dat', random) + fs.createReadStream('/tmp/bl_test_rnd.dat').pipe(bl) +}) + +tape('instantiation with Buffer', function (t) { + var buf = crypto.randomBytes(1024) + , buf2 = crypto.randomBytes(1024) + , b = BufferList(buf) + + t.equal(buf.toString('hex'), b.slice().toString('hex'), 'same buffer') + b = BufferList([ buf, buf2 ]) + t.equal(b.slice().toString('hex'), Buffer.concat([ buf, buf2 ]).toString('hex'), 'same buffer') + t.end() +}) + +tape('test String appendage', function (t) { + var bl = new BufferList() + , b = Buffer.from('abcdefghij\xff\x00') + + bl.append('abcd') + bl.append('efg') + bl.append('hi') + bl.append('j') + bl.append('\xff\x00') + + encodings.forEach(function (enc) { + t.equal(bl.toString(enc), b.toString(enc)) + }) + + t.end() +}) + +tape('test Number appendage', function (t) { + var bl = new BufferList() + , b = Buffer.from('1234567890') + + bl.append(1234) + bl.append(567) + bl.append(89) + bl.append(0) + + encodings.forEach(function (enc) { + t.equal(bl.toString(enc), b.toString(enc)) + }) + + t.end() +}) + +tape('write nothing, should get empty buffer', function (t) { + t.plan(3) + BufferList(function (err, data) { + t.notOk(err, 'no error') + t.ok(Buffer.isBuffer(data), 'got a buffer') + t.equal(0, data.length, 'got a zero-length buffer') + t.end() + }).end() +}) + +tape('unicode string', function (t) { + t.plan(2) + var inp1 = '\u2600' + , inp2 = '\u2603' + , exp = inp1 + ' and ' + inp2 + , bl = BufferList() + bl.write(inp1) + bl.write(' and ') + bl.write(inp2) + t.equal(exp, bl.toString()) + t.equal(Buffer.from(exp).toString('hex'), bl.toString('hex')) +}) + +tape('should emit finish', function (t) { + var source = BufferList() + , dest = BufferList() + + source.write('hello') + source.pipe(dest) + + dest.on('finish', function () { + t.equal(dest.toString('utf8'), 'hello') + t.end() + }) +}) + +tape('basic copy', function (t) { + var buf = crypto.randomBytes(1024) + , buf2 = Buffer.alloc(1024) + , b = BufferList(buf) + + b.copy(buf2) + t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer') + t.end() +}) + +tape('copy after many appends', function (t) { + var buf = crypto.randomBytes(512) + , buf2 = Buffer.alloc(1024) + , b = BufferList(buf) + + b.append(buf) + b.copy(buf2) + t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer') + t.end() +}) + +tape('copy at a precise position', function (t) { + var buf = crypto.randomBytes(1004) + , buf2 = Buffer.alloc(1024) + , b = BufferList(buf) + + b.copy(buf2, 20) + t.equal(b.slice().toString('hex'), buf2.slice(20).toString('hex'), 'same buffer') + t.end() +}) + +tape('copy starting from a precise location', function (t) { + var buf = crypto.randomBytes(10) + , buf2 = Buffer.alloc(5) + , b = BufferList(buf) + + b.copy(buf2, 0, 5) + t.equal(b.slice(5).toString('hex'), buf2.toString('hex'), 'same buffer') + t.end() +}) + +tape('copy in an interval', function (t) { + var rnd = crypto.randomBytes(10) + , b = BufferList(rnd) // put the random bytes there + , actual = Buffer.alloc(3) + , expected = Buffer.alloc(3) + + rnd.copy(expected, 0, 5, 8) + b.copy(actual, 0, 5, 8) + + t.equal(actual.toString('hex'), expected.toString('hex'), 'same buffer') + t.end() +}) + +tape('copy an interval between two buffers', function (t) { + var buf = crypto.randomBytes(10) + , buf2 = Buffer.alloc(10) + , b = BufferList(buf) + + b.append(buf) + b.copy(buf2, 0, 5, 15) + + t.equal(b.slice(5, 15).toString('hex'), buf2.toString('hex'), 'same buffer') + t.end() +}) + +tape('shallow slice across buffer boundaries', function (t) { + var bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice(3, 13).toString(), 'stSecondTh') + t.end() +}) + +tape('shallow slice within single buffer', function (t) { + t.plan(2) + var bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice(5, 10).toString(), 'Secon') + t.equal(bl.shallowSlice(7, 10).toString(), 'con') + t.end() +}) + +tape('shallow slice single buffer', function (t) { + t.plan(3) + var bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice(0, 5).toString(), 'First') + t.equal(bl.shallowSlice(5, 11).toString(), 'Second') + t.equal(bl.shallowSlice(11, 16).toString(), 'Third') +}) + +tape('shallow slice with negative or omitted indices', function (t) { + t.plan(4) + var bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice().toString(), 'FirstSecondThird') + t.equal(bl.shallowSlice(5).toString(), 'SecondThird') + t.equal(bl.shallowSlice(5, -3).toString(), 'SecondTh') + t.equal(bl.shallowSlice(-8).toString(), 'ondThird') +}) + +tape('shallow slice does not make a copy', function (t) { + t.plan(1) + var buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')] + var bl = (new BufferList(buffers)).shallowSlice(5, -3) + + buffers[1].fill('h') + buffers[2].fill('h') + + t.equal(bl.toString(), 'hhhhhhhh') +}) + +tape('shallow slice with 0 length', function (t) { + t.plan(1) + var buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')] + var bl = (new BufferList(buffers)).shallowSlice(0, 0) + t.equal(bl.length, 0) +}) + +tape('shallow slice with 0 length from middle', function (t) { + t.plan(1) + var buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')] + var bl = (new BufferList(buffers)).shallowSlice(10, 10) + t.equal(bl.length, 0) +}) + +tape('duplicate', function (t) { + t.plan(2) + + var bl = new BufferList('abcdefghij\xff\x00') + , dup = bl.duplicate() + + t.equal(bl.prototype, dup.prototype) + t.equal(bl.toString('hex'), dup.toString('hex')) +}) + +tape('destroy no pipe', function (t) { + t.plan(2) + + var bl = new BufferList('alsdkfja;lsdkfja;lsdk') + bl.destroy() + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) +}) + +tape('destroy with error', function (t) { + t.plan(3) + + var bl = new BufferList('alsdkfja;lsdkfja;lsdk') + var err = new Error('kaboom') + bl.destroy(err) + bl.on('error', function (_err) { + t.equal(_err, err) + }) + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) +}) + +!process.browser && tape('destroy with pipe before read end', function (t) { + t.plan(2) + + var bl = new BufferList() + fs.createReadStream(__dirname + '/test.js') + .pipe(bl) + + bl.destroy() + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) + +}) + +!process.browser && tape('destroy with pipe before read end with race', function (t) { + t.plan(2) + + var bl = new BufferList() + fs.createReadStream(__dirname + '/test.js') + .pipe(bl) + + setTimeout(function () { + bl.destroy() + setTimeout(function () { + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) + }, 500) + }, 500) +}) + +!process.browser && tape('destroy with pipe after read end', function (t) { + t.plan(2) + + var bl = new BufferList() + fs.createReadStream(__dirname + '/test.js') + .on('end', onEnd) + .pipe(bl) + + function onEnd () { + bl.destroy() + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) + } +}) + +!process.browser && tape('destroy with pipe while writing to a destination', function (t) { + t.plan(4) + + var bl = new BufferList() + , ds = new BufferList() + + fs.createReadStream(__dirname + '/test.js') + .on('end', onEnd) + .pipe(bl) + + function onEnd () { + bl.pipe(ds) + + setTimeout(function () { + bl.destroy() + + t.equals(bl._bufs.length, 0) + t.equals(bl.length, 0) + + ds.destroy() + + t.equals(bl._bufs.length, 0) + t.equals(bl.length, 0) + + }, 100) + } +}) + +!process.browser && tape('handle error', function (t) { + t.plan(2) + fs.createReadStream('/does/not/exist').pipe(BufferList(function (err, data) { + t.ok(err instanceof Error, 'has error') + t.notOk(data, 'no data') + })) +}) diff --git a/node_modules/block-stream/LICENCE b/node_modules/block-stream/LICENCE deleted file mode 100644 index 74489e2e2658e..0000000000000 --- a/node_modules/block-stream/LICENCE +++ /dev/null @@ -1,25 +0,0 @@ -Copyright (c) Isaac Z. Schlueter -All rights reserved. - -The BSD License - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS -``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED -TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS -BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/block-stream/README.md b/node_modules/block-stream/README.md deleted file mode 100644 index c16e9c468891d..0000000000000 --- a/node_modules/block-stream/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# block-stream - -A stream of blocks. - -Write data into it, and it'll output data in buffer blocks the size you -specify, padding with zeroes if necessary. - -```javascript -var block = new BlockStream(512) -fs.createReadStream("some-file").pipe(block) -block.pipe(fs.createWriteStream("block-file")) -``` - -When `.end()` or `.flush()` is called, it'll pad the block with zeroes. diff --git a/node_modules/block-stream/block-stream.js b/node_modules/block-stream/block-stream.js deleted file mode 100644 index 008de035c2edc..0000000000000 --- a/node_modules/block-stream/block-stream.js +++ /dev/null @@ -1,209 +0,0 @@ -// write data to it, and it'll emit data in 512 byte blocks. -// if you .end() or .flush(), it'll emit whatever it's got, -// padded with nulls to 512 bytes. - -module.exports = BlockStream - -var Stream = require("stream").Stream - , inherits = require("inherits") - , assert = require("assert").ok - , debug = process.env.DEBUG ? console.error : function () {} - -function BlockStream (size, opt) { - this.writable = this.readable = true - this._opt = opt || {} - this._chunkSize = size || 512 - this._offset = 0 - this._buffer = [] - this._bufferLength = 0 - if (this._opt.nopad) this._zeroes = false - else { - this._zeroes = new Buffer(this._chunkSize) - for (var i = 0; i < this._chunkSize; i ++) { - this._zeroes[i] = 0 - } - } -} - -inherits(BlockStream, Stream) - -BlockStream.prototype.write = function (c) { - // debug(" BS write", c) - if (this._ended) throw new Error("BlockStream: write after end") - if (c && !Buffer.isBuffer(c)) c = new Buffer(c + "") - if (c.length) { - this._buffer.push(c) - this._bufferLength += c.length - } - // debug("pushed onto buffer", this._bufferLength) - if (this._bufferLength >= this._chunkSize) { - if (this._paused) { - // debug(" BS paused, return false, need drain") - this._needDrain = true - return false - } - this._emitChunk() - } - return true -} - -BlockStream.prototype.pause = function () { - // debug(" BS pausing") - this._paused = true -} - -BlockStream.prototype.resume = function () { - // debug(" BS resume") - this._paused = false - return this._emitChunk() -} - -BlockStream.prototype.end = function (chunk) { - // debug("end", chunk) - if (typeof chunk === "function") cb = chunk, chunk = null - if (chunk) this.write(chunk) - this._ended = true - this.flush() -} - -BlockStream.prototype.flush = function () { - this._emitChunk(true) -} - -BlockStream.prototype._emitChunk = function (flush) { - // debug("emitChunk flush=%j emitting=%j paused=%j", flush, this._emitting, this._paused) - - // emit a chunk - if (flush && this._zeroes) { - // debug(" BS push zeroes", this._bufferLength) - // push a chunk of zeroes - var padBytes = (this._bufferLength % this._chunkSize) - if (padBytes !== 0) padBytes = this._chunkSize - padBytes - if (padBytes > 0) { - // debug("padBytes", padBytes, this._zeroes.slice(0, padBytes)) - this._buffer.push(this._zeroes.slice(0, padBytes)) - this._bufferLength += padBytes - // debug(this._buffer[this._buffer.length - 1].length, this._bufferLength) - } - } - - if (this._emitting || this._paused) return - this._emitting = true - - // debug(" BS entering loops") - var bufferIndex = 0 - while (this._bufferLength >= this._chunkSize && - (flush || !this._paused)) { - // debug(" BS data emission loop", this._bufferLength) - - var out - , outOffset = 0 - , outHas = this._chunkSize - - while (outHas > 0 && (flush || !this._paused) ) { - // debug(" BS data inner emit loop", this._bufferLength) - var cur = this._buffer[bufferIndex] - , curHas = cur.length - this._offset - // debug("cur=", cur) - // debug("curHas=%j", curHas) - // If it's not big enough to fill the whole thing, then we'll need - // to copy multiple buffers into one. However, if it is big enough, - // then just slice out the part we want, to save unnecessary copying. - // Also, need to copy if we've already done some copying, since buffers - // can't be joined like cons strings. - if (out || curHas < outHas) { - out = out || new Buffer(this._chunkSize) - cur.copy(out, outOffset, - this._offset, this._offset + Math.min(curHas, outHas)) - } else if (cur.length === outHas && this._offset === 0) { - // shortcut -- cur is exactly long enough, and no offset. - out = cur - } else { - // slice out the piece of cur that we need. - out = cur.slice(this._offset, this._offset + outHas) - } - - if (curHas > outHas) { - // means that the current buffer couldn't be completely output - // update this._offset to reflect how much WAS written - this._offset += outHas - outHas = 0 - } else { - // output the entire current chunk. - // toss it away - outHas -= curHas - outOffset += curHas - bufferIndex ++ - this._offset = 0 - } - } - - this._bufferLength -= this._chunkSize - assert(out.length === this._chunkSize) - // debug("emitting data", out) - // debug(" BS emitting, paused=%j", this._paused, this._bufferLength) - this.emit("data", out) - out = null - } - // debug(" BS out of loops", this._bufferLength) - - // whatever is left, it's not enough to fill up a block, or we're paused - this._buffer = this._buffer.slice(bufferIndex) - if (this._paused) { - // debug(" BS paused, leaving", this._bufferLength) - this._needsDrain = true - this._emitting = false - return - } - - // if flushing, and not using null-padding, then need to emit the last - // chunk(s) sitting in the queue. We know that it's not enough to - // fill up a whole block, because otherwise it would have been emitted - // above, but there may be some offset. - var l = this._buffer.length - if (flush && !this._zeroes && l) { - if (l === 1) { - if (this._offset) { - this.emit("data", this._buffer[0].slice(this._offset)) - } else { - this.emit("data", this._buffer[0]) - } - } else { - var outHas = this._bufferLength - , out = new Buffer(outHas) - , outOffset = 0 - for (var i = 0; i < l; i ++) { - var cur = this._buffer[i] - , curHas = cur.length - this._offset - cur.copy(out, outOffset, this._offset) - this._offset = 0 - outOffset += curHas - this._bufferLength -= curHas - } - this.emit("data", out) - } - // truncate - this._buffer.length = 0 - this._bufferLength = 0 - this._offset = 0 - } - - // now either drained or ended - // debug("either draining, or ended", this._bufferLength, this._ended) - // means that we've flushed out all that we can so far. - if (this._needDrain) { - // debug("emitting drain", this._bufferLength) - this._needDrain = false - this.emit("drain") - } - - if ((this._bufferLength === 0) && this._ended && !this._endEmitted) { - // debug("emitting end", this._bufferLength) - this._endEmitted = true - this.emit("end") - } - - this._emitting = false - - // debug(" BS no longer emitting", flush, this._paused, this._emitting, this._bufferLength, this._chunkSize) -} diff --git a/node_modules/block-stream/package.json b/node_modules/block-stream/package.json deleted file mode 100644 index d7d591cf904b0..0000000000000 --- a/node_modules/block-stream/package.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "_from": "block-stream@*", - "_id": "block-stream@0.0.9", - "_inBundle": false, - "_integrity": "sha1-E+v+d4oDIFz+A3UUgeu0szAMEmo=", - "_location": "/block-stream", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "block-stream@*", - "name": "block-stream", - "escapedName": "block-stream", - "rawSpec": "*", - "saveSpec": null, - "fetchSpec": "*" - }, - "_requiredBy": [ - "/node-gyp/tar" - ], - "_resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.9.tgz", - "_shasum": "13ebfe778a03205cfe03751481ebb4b3300c126a", - "_spec": "block-stream@*", - "_where": "/Users/rebecca/code/npm/node_modules/node-gyp/node_modules/tar", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "bugs": { - "url": "https://github.com/isaacs/block-stream/issues" - }, - "bundleDependencies": false, - "dependencies": { - "inherits": "~2.0.0" - }, - "deprecated": false, - "description": "a stream of blocks", - "devDependencies": { - "tap": "^5.7.1" - }, - "engines": { - "node": "0.4 || >=0.5.8" - }, - "files": [ - "block-stream.js" - ], - "homepage": "https://github.com/isaacs/block-stream#readme", - "license": "ISC", - "main": "block-stream.js", - "name": "block-stream", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/block-stream.git" - }, - "scripts": { - "test": "tap test/*.js --cov" - }, - "version": "0.0.9" -} diff --git a/node_modules/bluebird/js/browser/bluebird.core.js b/node_modules/bluebird/js/browser/bluebird.core.js index 626406f5d5b25..bae7583d6fe07 100644 --- a/node_modules/bluebird/js/browser/bluebird.core.js +++ b/node_modules/bluebird/js/browser/bluebird.core.js @@ -23,7 +23,7 @@ * */ /** - * bluebird build version 3.5.3 + * bluebird build version 3.5.5 * Features enabled: core * Features disabled: race, call_get, generators, map, nodeify, promisify, props, reduce, settle, some, using, timers, filter, any, each */ @@ -1214,8 +1214,8 @@ function parseLineInfo(line) { function setBounds(firstLineError, lastLineError) { if (!longStackTracesIsSupported()) return; - var firstStackLines = firstLineError.stack.split("\n"); - var lastStackLines = lastLineError.stack.split("\n"); + var firstStackLines = (firstLineError.stack || "").split("\n"); + var lastStackLines = (lastLineError.stack || "").split("\n"); var firstIndex = -1; var lastIndex = -1; var firstFileName; @@ -2238,6 +2238,11 @@ Promise.prototype.caught = Promise.prototype["catch"] = function (fn) { } catchInstances.length = j; fn = arguments[i]; + + if (typeof fn !== "function") { + throw new TypeError("The last argument to .catch() " + + "must be a function, got " + util.toString(fn)); + } return this.then(undefined, catchFilter(catchInstances, fn, this)); } return this.then(undefined, fn); @@ -2845,6 +2850,14 @@ Promise.prototype._settledValue = function() { } }; +if (typeof Symbol !== "undefined" && Symbol.toStringTag) { + es5.defineProperty(Promise.prototype, Symbol.toStringTag, { + get: function () { + return "Object"; + } + }); +} + function deferResolve(v) {this.promise._resolveCallback(v);} function deferReject(v) {this.promise._rejectCallback(v, false);} @@ -2871,7 +2884,7 @@ _dereq_("./synchronous_inspection")(Promise); _dereq_("./join")( Promise, PromiseArray, tryConvertToPromise, INTERNAL, async, getDomain); Promise.Promise = Promise; -Promise.version = "3.5.3"; +Promise.version = "3.5.5"; util.toFastProperties(Promise); util.toFastProperties(Promise.prototype); @@ -3182,7 +3195,8 @@ if (util.isNode && typeof MutationObserver === "undefined") { } else if ((typeof MutationObserver !== "undefined") && !(typeof window !== "undefined" && window.navigator && - (window.navigator.standalone || window.cordova))) { + (window.navigator.standalone || window.cordova)) && + ("classList" in document.documentElement)) { schedule = (function() { var div = document.createElement("div"); var opts = {attributes: true}; @@ -3792,7 +3806,12 @@ var ret = { domainBind: domainBind }; ret.isRecentNode = ret.isNode && (function() { - var version = process.versions.node.split(".").map(Number); + var version; + if (process.versions && process.versions.node) { + version = process.versions.node.split(".").map(Number); + } else if (process.version) { + version = process.version.split(".").map(Number); + } return (version[0] === 0 && version[1] > 10) || (version[0] > 0); })(); diff --git a/node_modules/bluebird/js/browser/bluebird.core.min.js b/node_modules/bluebird/js/browser/bluebird.core.min.js index 25fe9175713c0..7091d9fac0ad2 100644 --- a/node_modules/bluebird/js/browser/bluebird.core.min.js +++ b/node_modules/bluebird/js/browser/bluebird.core.min.js @@ -23,9 +23,9 @@ * */ /** - * bluebird build version 3.5.3 + * bluebird build version 3.5.5 * Features enabled: core * Features disabled: race, call_get, generators, map, nodeify, promisify, props, reduce, settle, some, using, timers, filter, any, each */ -!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{var e;"undefined"!=typeof window?e=window:"undefined"!=typeof global?e=global:"undefined"!=typeof self&&(e=self),e.Promise=t()}}(function(){var t,e,n;return function r(t,e,n){function i(a,s){if(!e[a]){if(!t[a]){var c="function"==typeof _dereq_&&_dereq_;if(!s&&c)return c(a,!0);if(o)return o(a,!0);var l=new Error("Cannot find module '"+a+"'");throw l.code="MODULE_NOT_FOUND",l}var u=e[a]={exports:{}};t[a][0].call(u.exports,function(e){var n=t[a][1][e];return i(n?n:e)},u,u.exports,r,t,e,n)}return e[a].exports}for(var o="function"==typeof _dereq_&&_dereq_,a=0;a0;)c(t)}function c(t){var e=t.shift();if("function"!=typeof e)e._settlePromises();else{var n=t.shift(),r=t.shift();e.call(n,r)}}var l;try{throw new Error}catch(u){l=u}var p=t("./schedule"),f=t("./queue"),h=t("./util");r.prototype.setScheduler=function(t){var e=this._schedule;return this._schedule=t,this._customScheduler=!0,e},r.prototype.hasCustomScheduler=function(){return this._customScheduler},r.prototype.enableTrampoline=function(){this._trampolineEnabled=!0},r.prototype.disableTrampolineIfNecessary=function(){h.hasDevTools&&(this._trampolineEnabled=!1)},r.prototype.haveItemsQueued=function(){return this._isTickUsed||this._haveDrainedQueues},r.prototype.fatalError=function(t,e){e?(process.stderr.write("Fatal "+(t instanceof Error?t.stack:t)+"\n"),process.exit(2)):this.throwLater(t)},r.prototype.throwLater=function(t,e){if(1===arguments.length&&(e=t,t=function(){throw e}),"undefined"!=typeof setTimeout)setTimeout(function(){t(e)},0);else try{this._schedule(function(){t(e)})}catch(n){throw new Error("No async scheduler available\n\n See http://goo.gl/MqrFmX\n")}},h.hasDevTools?(r.prototype.invokeLater=function(t,e,n){this._trampolineEnabled?i.call(this,t,e,n):this._schedule(function(){setTimeout(function(){t.call(e,n)},100)})},r.prototype.invoke=function(t,e,n){this._trampolineEnabled?o.call(this,t,e,n):this._schedule(function(){t.call(e,n)})},r.prototype.settlePromises=function(t){this._trampolineEnabled?a.call(this,t):this._schedule(function(){t._settlePromises()})}):(r.prototype.invokeLater=i,r.prototype.invoke=o,r.prototype.settlePromises=a),r.prototype._drainQueues=function(){s(this._normalQueue),this._reset(),this._haveDrainedQueues=!0,s(this._lateQueue)},r.prototype._queueTick=function(){this._isTickUsed||(this._isTickUsed=!0,this._schedule(this.drainQueues))},r.prototype._reset=function(){this._isTickUsed=!1},e.exports=r,e.exports.firstLineError=l},{"./queue":17,"./schedule":18,"./util":21}],2:[function(t,e,n){"use strict";e.exports=function(t,e,n,r){var i=!1,o=function(t,e){this._reject(e)},a=function(t,e){e.promiseRejectionQueued=!0,e.bindingPromise._then(o,o,null,this,t)},s=function(t,e){0===(50397184&this._bitField)&&this._resolveCallback(e.target)},c=function(t,e){e.promiseRejectionQueued||this._reject(t)};t.prototype.bind=function(o){i||(i=!0,t.prototype._propagateFrom=r.propagateFromFunction(),t.prototype._boundValue=r.boundValueFunction());var l=n(o),u=new t(e);u._propagateFrom(this,1);var p=this._target();if(u._setBoundTo(l),l instanceof t){var f={promiseRejectionQueued:!1,promise:u,target:p,bindingPromise:l};p._then(e,a,void 0,u,f),l._then(s,c,void 0,u,f),u._setOnCancel(l)}else u._resolveCallback(p);return u},t.prototype._setBoundTo=function(t){void 0!==t?(this._bitField=2097152|this._bitField,this._boundTo=t):this._bitField=-2097153&this._bitField},t.prototype._isBound=function(){return 2097152===(2097152&this._bitField)},t.bind=function(e,n){return t.resolve(n).bind(e)}}},{}],3:[function(t,e,n){"use strict";function r(){try{Promise===o&&(Promise=i)}catch(t){}return o}var i;"undefined"!=typeof Promise&&(i=Promise);var o=t("./promise")();o.noConflict=r,e.exports=o},{"./promise":15}],4:[function(t,e,n){"use strict";e.exports=function(e,n,r,i){var o=t("./util"),a=o.tryCatch,s=o.errorObj,c=e._async;e.prototype["break"]=e.prototype.cancel=function(){if(!i.cancellation())return this._warn("cancellation is disabled");for(var t=this,e=t;t._isCancellable();){if(!t._cancelBy(e)){e._isFollowing()?e._followee().cancel():e._cancelBranched();break}var n=t._cancellationParent;if(null==n||!n._isCancellable()){t._isFollowing()?t._followee().cancel():t._cancelBranched();break}t._isFollowing()&&t._followee().cancel(),t._setWillBeCancelled(),e=t,t=n}},e.prototype._branchHasCancelled=function(){this._branchesRemainingToCancel--},e.prototype._enoughBranchesHaveCancelled=function(){return void 0===this._branchesRemainingToCancel||this._branchesRemainingToCancel<=0},e.prototype._cancelBy=function(t){return t===this?(this._branchesRemainingToCancel=0,this._invokeOnCancel(),!0):(this._branchHasCancelled(),this._enoughBranchesHaveCancelled()?(this._invokeOnCancel(),!0):!1)},e.prototype._cancelBranched=function(){this._enoughBranchesHaveCancelled()&&this._cancel()},e.prototype._cancel=function(){this._isCancellable()&&(this._setCancelled(),c.invoke(this._cancelPromises,this,void 0))},e.prototype._cancelPromises=function(){this._length()>0&&this._settlePromises()},e.prototype._unsetOnCancel=function(){this._onCancelField=void 0},e.prototype._isCancellable=function(){return this.isPending()&&!this._isCancelled()},e.prototype.isCancellable=function(){return this.isPending()&&!this.isCancelled()},e.prototype._doInvokeOnCancel=function(t,e){if(o.isArray(t))for(var n=0;n=0?o[t]:void 0}var i=!1,o=[];return t.prototype._promiseCreated=function(){},t.prototype._pushContext=function(){},t.prototype._popContext=function(){return null},t._peekContext=t.prototype._peekContext=function(){},e.prototype._pushContext=function(){void 0!==this._trace&&(this._trace._promiseCreated=null,o.push(this._trace))},e.prototype._popContext=function(){if(void 0!==this._trace){var t=o.pop(),e=t._promiseCreated;return t._promiseCreated=null,e}return null},e.CapturedTrace=null,e.create=n,e.deactivateLongStackTraces=function(){},e.activateLongStackTraces=function(){var n=t.prototype._pushContext,o=t.prototype._popContext,a=t._peekContext,s=t.prototype._peekContext,c=t.prototype._promiseCreated;e.deactivateLongStackTraces=function(){t.prototype._pushContext=n,t.prototype._popContext=o,t._peekContext=a,t.prototype._peekContext=s,t.prototype._promiseCreated=c,i=!1},i=!0,t.prototype._pushContext=e.prototype._pushContext,t.prototype._popContext=e.prototype._popContext,t._peekContext=t.prototype._peekContext=r,t.prototype._promiseCreated=function(){var t=this._peekContext();t&&null==t._promiseCreated&&(t._promiseCreated=this)}},e}},{}],7:[function(t,e,n){"use strict";e.exports=function(e,n){function r(t,e){return{promise:e}}function i(){return!1}function o(t,e,n){var r=this;try{t(e,n,function(t){if("function"!=typeof t)throw new TypeError("onCancel must be a function, got: "+H.toString(t));r._attachCancellationCallback(t)})}catch(i){return i}}function a(t){if(!this._isCancellable())return this;var e=this._onCancel();void 0!==e?H.isArray(e)?e.push(t):this._setOnCancel([e,t]):this._setOnCancel(t)}function s(){return this._onCancelField}function c(t){this._onCancelField=t}function l(){this._cancellationParent=void 0,this._onCancelField=void 0}function u(t,e){if(0!==(1&e)){this._cancellationParent=t;var n=t._branchesRemainingToCancel;void 0===n&&(n=0),t._branchesRemainingToCancel=n+1}0!==(2&e)&&t._isBound()&&this._setBoundTo(t._boundTo)}function p(t,e){0!==(2&e)&&t._isBound()&&this._setBoundTo(t._boundTo)}function f(){var t=this._boundTo;return void 0!==t&&t instanceof e?t.isFulfilled()?t.value():void 0:t}function h(){this._trace=new O(this._peekContext())}function _(t,e){if(V(t)){var n=this._trace;if(void 0!==n&&e&&(n=n._parent),void 0!==n)n.attachExtraTrace(t);else if(!t.__stackCleaned__){var r=k(t);H.notEnumerableProp(t,"stack",r.message+"\n"+r.stack.join("\n")),H.notEnumerableProp(t,"__stackCleaned__",!0)}}}function d(){this._trace=void 0}function v(t,e,n,r,i){if(void 0===t&&null!==e&&J){if(void 0!==i&&i._returnedNonUndefined())return;if(0===(65535&r._bitField))return;n&&(n+=" ");var o="",a="";if(e._trace){for(var s=e._trace.stack.split("\n"),c=w(s),l=c.length-1;l>=0;--l){var u=c[l];if(!q.test(u)){var p=u.match(G);p&&(o="at "+p[1]+":"+p[2]+":"+p[3]+" ");break}}if(c.length>0)for(var f=c[0],l=0;l0&&(a="\n"+s[l-1]);break}}var h="a promise was created in a "+n+"handler "+o+"but was not returned from it, see http://goo.gl/rRqMUw"+a;r._warn(h,!0,e)}}function y(t,e){var n=t+" is deprecated and will be removed in a future version.";return e&&(n+=" Use "+e+" instead."),g(n)}function g(t,n,r){if(st.warnings){var i,o=new I(t);if(n)r._attachExtraTrace(o);else if(st.longStackTraces&&(i=e._peekContext()))i.attachExtraTrace(o);else{var a=k(o);o.stack=a.message+"\n"+a.stack.join("\n")}nt("warning",o)||j(o,"",!0)}}function m(t,e){for(var n=0;n=0;--s)if(r[s]===o){a=s;break}for(var s=a;s>=0;--s){var c=r[s];if(e[i]!==c)break;e.pop(),i--}e=r}}function w(t){for(var e=[],n=0;n0&&"SyntaxError"!=t.name&&(e=e.slice(n)),e}function k(t){var e=t.stack,n=t.toString();return e="string"==typeof e&&e.length>0?E(t):[" (No stack trace)"],{message:n,stack:"SyntaxError"==t.name?e:w(e)}}function j(t,e,n){if("undefined"!=typeof console){var r;if(H.isObject(t)){var i=t.stack;r=e+W(i,t)}else r=e+String(t);"function"==typeof L?L(r,n):("function"==typeof console.log||"object"==typeof console.log)&&console.log(r)}}function F(t,e,n,r){var i=!1;try{"function"==typeof e&&(i=!0,"rejectionHandled"===t?e(r):e(n,r))}catch(o){U.throwLater(o)}"unhandledRejection"===t?nt(t,n,r)||i||j(n,"Unhandled rejection "):nt(t,r)}function T(t){var e;if("function"==typeof t)e="[function "+(t.name||"anonymous")+"]";else{e=t&&"function"==typeof t.toString?t.toString():H.toString(t);var n=/\[object [a-zA-Z0-9$_]+\]/;if(n.test(e))try{var r=JSON.stringify(t);e=r}catch(i){}0===e.length&&(e="(empty array)")}return"(<"+P(e)+">, no stack trace)"}function P(t){var e=41;return t.lengtha||0>s||!n||!r||n!==r||a>=s||(it=function(t){if(Q.test(t))return!0;var e=S(t);return e&&e.fileName===n&&a<=e.line&&e.line<=s?!0:!1})}}function O(t){this._parent=t,this._promisesCreated=0;var e=this._length=1+(void 0===t?0:t._length);at(this,O),e>32&&this.uncycle()}var A,N,L,B=e._getDomain,U=e._async,I=t("./errors").Warning,H=t("./util"),D=t("./es5"),V=H.canAttachTrace,Q=/[\\\/]bluebird[\\\/]js[\\\/](release|debug|instrumented)/,q=/\((?:timers\.js):\d+:\d+\)/,G=/[\/<\(](.+?):(\d+):(\d+)\)?\s*$/,M=null,W=null,$=!1,z=!(0==H.env("BLUEBIRD_DEBUG")||!H.env("BLUEBIRD_DEBUG")&&"development"!==H.env("NODE_ENV")),X=!(0==H.env("BLUEBIRD_WARNINGS")||!z&&!H.env("BLUEBIRD_WARNINGS")),K=!(0==H.env("BLUEBIRD_LONG_STACK_TRACES")||!z&&!H.env("BLUEBIRD_LONG_STACK_TRACES")),J=0!=H.env("BLUEBIRD_W_FORGOTTEN_RETURN")&&(X||!!H.env("BLUEBIRD_W_FORGOTTEN_RETURN"));e.prototype.suppressUnhandledRejections=function(){var t=this._target();t._bitField=-1048577&t._bitField|524288},e.prototype._ensurePossibleRejectionHandled=function(){if(0===(524288&this._bitField)){this._setRejectionIsUnhandled();var t=this;setTimeout(function(){t._notifyUnhandledRejection()},1)}},e.prototype._notifyUnhandledRejectionIsHandled=function(){F("rejectionHandled",A,void 0,this)},e.prototype._setReturnedNonUndefined=function(){this._bitField=268435456|this._bitField},e.prototype._returnedNonUndefined=function(){return 0!==(268435456&this._bitField)},e.prototype._notifyUnhandledRejection=function(){if(this._isRejectionUnhandled()){var t=this._settledValue();this._setUnhandledRejectionIsNotified(),F("unhandledRejection",N,t,this)}},e.prototype._setUnhandledRejectionIsNotified=function(){this._bitField=262144|this._bitField},e.prototype._unsetUnhandledRejectionIsNotified=function(){this._bitField=-262145&this._bitField},e.prototype._isUnhandledRejectionNotified=function(){return(262144&this._bitField)>0},e.prototype._setRejectionIsUnhandled=function(){this._bitField=1048576|this._bitField},e.prototype._unsetRejectionIsUnhandled=function(){this._bitField=-1048577&this._bitField,this._isUnhandledRejectionNotified()&&(this._unsetUnhandledRejectionIsNotified(),this._notifyUnhandledRejectionIsHandled())},e.prototype._isRejectionUnhandled=function(){return(1048576&this._bitField)>0},e.prototype._warn=function(t,e,n){return g(t,e,n||this)},e.onPossiblyUnhandledRejection=function(t){var e=B();N="function"==typeof t?null===e?t:H.domainBind(e,t):void 0},e.onUnhandledRejectionHandled=function(t){var e=B();A="function"==typeof t?null===e?t:H.domainBind(e,t):void 0};var Y=function(){};e.longStackTraces=function(){if(U.haveItemsQueued()&&!st.longStackTraces)throw new Error("cannot enable long stack traces after promises have been created\n\n See http://goo.gl/MqrFmX\n");if(!st.longStackTraces&&R()){var t=e.prototype._captureStackTrace,r=e.prototype._attachExtraTrace,i=e.prototype._dereferenceTrace;st.longStackTraces=!0,Y=function(){if(U.haveItemsQueued()&&!st.longStackTraces)throw new Error("cannot enable long stack traces after promises have been created\n\n See http://goo.gl/MqrFmX\n");e.prototype._captureStackTrace=t,e.prototype._attachExtraTrace=r,e.prototype._dereferenceTrace=i,n.deactivateLongStackTraces(),U.enableTrampoline(),st.longStackTraces=!1},e.prototype._captureStackTrace=h,e.prototype._attachExtraTrace=_,e.prototype._dereferenceTrace=d,n.activateLongStackTraces(),U.disableTrampolineIfNecessary()}},e.hasLongStackTraces=function(){return st.longStackTraces&&R()};var Z=function(){try{if("function"==typeof CustomEvent){var t=new CustomEvent("CustomEvent");return H.global.dispatchEvent(t),function(t,e){var n={detail:e,cancelable:!0};D.defineProperty(n,"promise",{value:e.promise}),D.defineProperty(n,"reason",{value:e.reason});var r=new CustomEvent(t.toLowerCase(),n);return!H.global.dispatchEvent(r)}}if("function"==typeof Event){var t=new Event("CustomEvent");return H.global.dispatchEvent(t),function(t,e){var n=new Event(t.toLowerCase(),{cancelable:!0});return n.detail=e,D.defineProperty(n,"promise",{value:e.promise}),D.defineProperty(n,"reason",{value:e.reason}),!H.global.dispatchEvent(n)}}var t=document.createEvent("CustomEvent");return t.initCustomEvent("testingtheevent",!1,!0,{}),H.global.dispatchEvent(t),function(t,e){var n=document.createEvent("CustomEvent");return n.initCustomEvent(t.toLowerCase(),!1,!0,e),!H.global.dispatchEvent(n)}}catch(e){}return function(){return!1}}(),tt=function(){return H.isNode?function(){return process.emit.apply(process,arguments)}:H.global?function(t){var e="on"+t.toLowerCase(),n=H.global[e];return n?(n.apply(H.global,[].slice.call(arguments,1)),!0):!1}:function(){return!1}}(),et={promiseCreated:r,promiseFulfilled:r,promiseRejected:r,promiseResolved:r,promiseCancelled:r,promiseChained:function(t,e,n){return{promise:e,child:n}},warning:function(t,e){return{warning:e}},unhandledRejection:function(t,e,n){return{reason:e,promise:n}},rejectionHandled:r},nt=function(t){var e=!1;try{e=tt.apply(null,arguments)}catch(n){U.throwLater(n),e=!0}var r=!1;try{r=Z(t,et[t].apply(null,arguments))}catch(n){U.throwLater(n),r=!0}return r||e};e.config=function(t){if(t=Object(t),"longStackTraces"in t&&(t.longStackTraces?e.longStackTraces():!t.longStackTraces&&e.hasLongStackTraces()&&Y()),"warnings"in t){var n=t.warnings;st.warnings=!!n,J=st.warnings,H.isObject(n)&&"wForgottenReturn"in n&&(J=!!n.wForgottenReturn)}if("cancellation"in t&&t.cancellation&&!st.cancellation){if(U.haveItemsQueued())throw new Error("cannot enable cancellation after promises are in use");e.prototype._clearCancellationData=l,e.prototype._propagateFrom=u,e.prototype._onCancel=s,e.prototype._setOnCancel=c,e.prototype._attachCancellationCallback=a,e.prototype._execute=o,rt=u,st.cancellation=!0}return"monitoring"in t&&(t.monitoring&&!st.monitoring?(st.monitoring=!0,e.prototype._fireEvent=nt):!t.monitoring&&st.monitoring&&(st.monitoring=!1,e.prototype._fireEvent=i)),e},e.prototype._fireEvent=i,e.prototype._execute=function(t,e,n){try{t(e,n)}catch(r){return r}},e.prototype._onCancel=function(){},e.prototype._setOnCancel=function(t){},e.prototype._attachCancellationCallback=function(t){},e.prototype._captureStackTrace=function(){},e.prototype._attachExtraTrace=function(){},e.prototype._dereferenceTrace=function(){},e.prototype._clearCancellationData=function(){},e.prototype._propagateFrom=function(t,e){};var rt=p,it=function(){return!1},ot=/[\/<\(]([^:\/]+):(\d+):(?:\d+)\)?\s*$/;H.inherits(O,Error),n.CapturedTrace=O,O.prototype.uncycle=function(){var t=this._length;if(!(2>t)){for(var e=[],n={},r=0,i=this;void 0!==i;++r)e.push(i),i=i._parent;t=this._length=r;for(var r=t-1;r>=0;--r){var o=e[r].stack;void 0===n[o]&&(n[o]=r)}for(var r=0;t>r;++r){var a=e[r].stack,s=n[a];if(void 0!==s&&s!==r){s>0&&(e[s-1]._parent=void 0,e[s-1]._length=1),e[r]._parent=void 0,e[r]._length=1;var c=r>0?e[r-1]:this;t-1>s?(c._parent=e[s+1],c._parent.uncycle(),c._length=c._parent._length+1):(c._parent=void 0,c._length=1);for(var l=c._length+1,u=r-2;u>=0;--u)e[u]._length=l,l++;return}}}},O.prototype.attachExtraTrace=function(t){if(!t.__stackCleaned__){this.uncycle();for(var e=k(t),n=e.message,r=[e.stack],i=this;void 0!==i;)r.push(w(i.stack.split("\n"))),i=i._parent;C(r),b(r),H.notEnumerableProp(t,"stack",m(n,r)),H.notEnumerableProp(t,"__stackCleaned__",!0)}};var at=function(){var t=/^\s*at\s*/,e=function(t,e){return"string"==typeof t?t:void 0!==e.name&&void 0!==e.message?e.toString():T(e)};if("number"==typeof Error.stackTraceLimit&&"function"==typeof Error.captureStackTrace){Error.stackTraceLimit+=6,M=t,W=e;var n=Error.captureStackTrace;return it=function(t){return Q.test(t)},function(t,e){Error.stackTraceLimit+=6,n(t,e),Error.stackTraceLimit-=6}}var r=new Error;if("string"==typeof r.stack&&r.stack.split("\n")[0].indexOf("stackDetection@")>=0)return M=/@/,W=e,$=!0,function(t){t.stack=(new Error).stack};var i;try{throw new Error}catch(o){i="stack"in o}return"stack"in r||!i||"number"!=typeof Error.stackTraceLimit?(W=function(t,e){return"string"==typeof t?t:"object"!=typeof e&&"function"!=typeof e||void 0===e.name||void 0===e.message?T(e):e.toString()},null):(M=t,W=e,function(t){Error.stackTraceLimit+=6;try{throw new Error}catch(e){t.stack=e.stack}Error.stackTraceLimit-=6})}([]);"undefined"!=typeof console&&"undefined"!=typeof console.warn&&(L=function(t){console.warn(t)},H.isNode&&process.stderr.isTTY?L=function(t,e){var n=e?"":"";console.warn(n+t+"\n")}:H.isNode||"string"!=typeof(new Error).stack||(L=function(t,e){console.warn("%c"+t,e?"color: darkorange":"color: red")}));var st={warnings:X,longStackTraces:!1,cancellation:!1,monitoring:!1};return K&&e.longStackTraces(),{longStackTraces:function(){return st.longStackTraces},warnings:function(){return st.warnings},cancellation:function(){return st.cancellation},monitoring:function(){return st.monitoring},propagateFromFunction:function(){return rt},boundValueFunction:function(){return f},checkForgottenReturns:v,setBounds:x,warn:g,deprecated:y,CapturedTrace:O,fireDomEvent:Z,fireGlobalEvent:tt}}},{"./errors":9,"./es5":10,"./util":21}],8:[function(t,e,n){"use strict";e.exports=function(t){function e(){return this.value}function n(){throw this.reason}t.prototype["return"]=t.prototype.thenReturn=function(n){return n instanceof t&&n.suppressUnhandledRejections(),this._then(e,void 0,void 0,{value:n},void 0)},t.prototype["throw"]=t.prototype.thenThrow=function(t){return this._then(n,void 0,void 0,{reason:t},void 0)},t.prototype.catchThrow=function(t){if(arguments.length<=1)return this._then(void 0,n,void 0,{reason:t},void 0);var e=arguments[1],r=function(){throw e};return this.caught(t,r)},t.prototype.catchReturn=function(n){if(arguments.length<=1)return n instanceof t&&n.suppressUnhandledRejections(),this._then(void 0,e,void 0,{value:n},void 0);var r=arguments[1];r instanceof t&&r.suppressUnhandledRejections();var i=function(){return r};return this.caught(n,i)}}},{}],9:[function(t,e,n){"use strict";function r(t,e){function n(r){return this instanceof n?(p(this,"message","string"==typeof r?r:e),p(this,"name",t),void(Error.captureStackTrace?Error.captureStackTrace(this,this.constructor):Error.call(this))):new n(r)}return u(n,Error),n}function i(t){return this instanceof i?(p(this,"name","OperationalError"),p(this,"message",t),this.cause=t,this.isOperational=!0,void(t instanceof Error?(p(this,"message",t.message),p(this,"stack",t.stack)):Error.captureStackTrace&&Error.captureStackTrace(this,this.constructor))):new i(t)}var o,a,s=t("./es5"),c=s.freeze,l=t("./util"),u=l.inherits,p=l.notEnumerableProp,f=r("Warning","warning"),h=r("CancellationError","cancellation error"),_=r("TimeoutError","timeout error"),d=r("AggregateError","aggregate error");try{o=TypeError,a=RangeError}catch(v){o=r("TypeError","type error"),a=r("RangeError","range error")}for(var y="join pop push shift unshift slice filter forEach some every map indexOf lastIndexOf reduce reduceRight sort reverse".split(" "),g=0;g1?t.cancelPromise._reject(e):t.cancelPromise._cancel(),t.cancelPromise=null,!0):!1}function s(){return l.call(this,this.promise._target()._settledValue())}function c(t){return a(this,t)?void 0:(f.e=t,f)}function l(t){var i=this.promise,l=this.handler;if(!this.called){this.called=!0;var u=this.isFinallyHandler()?l.call(i._boundValue()):l.call(i._boundValue(),t);if(u===r)return u;if(void 0!==u){i._setReturnedNonUndefined();var h=n(u,i);if(h instanceof e){if(null!=this.cancelPromise){if(h._isCancelled()){var _=new p("late cancellation observer");return i._attachExtraTrace(_),f.e=_,f}h.isPending()&&h._attachCancellationCallback(new o(this))}return h._then(s,c,void 0,this,void 0)}}}return i.isRejected()?(a(this),f.e=t,f):(a(this),t)}var u=t("./util"),p=e.CancellationError,f=u.errorObj,h=t("./catch_filter")(r);return i.prototype.isFinallyHandler=function(){return 0===this.type},o.prototype._resultCancelled=function(){a(this.finallyHandler)},e.prototype._passThrough=function(t,e,n,r){return"function"!=typeof t?this.then():this._then(n,r,void 0,new i(this,e,t),void 0)},e.prototype.lastly=e.prototype["finally"]=function(t){return this._passThrough(t,0,l,l)},e.prototype.tap=function(t){return this._passThrough(t,1,l)},e.prototype.tapCatch=function(t){var n=arguments.length;if(1===n)return this._passThrough(t,1,void 0,l);var r,i=new Array(n-1),o=0;for(r=0;n-1>r;++r){var a=arguments[r];if(!u.isObject(a))return e.reject(new TypeError("tapCatch statement predicate: expecting an object but got "+u.classString(a)));i[o++]=a}i.length=o;var s=arguments[r];return this._passThrough(h(i,s,this),1,void 0,l)},i}},{"./catch_filter":5,"./util":21}],12:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o,a){var s=t("./util");s.canEvaluate,s.tryCatch,s.errorObj;e.join=function(){var t,e=arguments.length-1;if(e>0&&"function"==typeof arguments[e]){t=arguments[e];var r}var i=[].slice.call(arguments);t&&i.pop();var r=new n(i).promise();return void 0!==t?r.spread(t):r}}},{"./util":21}],13:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o){var a=t("./util"),s=a.tryCatch;e.method=function(t){if("function"!=typeof t)throw new e.TypeError("expecting a function but got "+a.classString(t));return function(){var r=new e(n);r._captureStackTrace(),r._pushContext();var i=s(t).apply(this,arguments),a=r._popContext();return o.checkForgottenReturns(i,a,"Promise.method",r),r._resolveFromSyncValue(i),r}},e.attempt=e["try"]=function(t){if("function"!=typeof t)return i("expecting a function but got "+a.classString(t));var r=new e(n);r._captureStackTrace(),r._pushContext();var c;if(arguments.length>1){o.deprecated("calling Promise.try with more than 1 argument");var l=arguments[1],u=arguments[2];c=a.isArray(l)?s(t).apply(u,l):s(t).call(u,l)}else c=s(t)();var p=r._popContext();return o.checkForgottenReturns(c,p,"Promise.try",r),r._resolveFromSyncValue(c),r},e.prototype._resolveFromSyncValue=function(t){t===a.errorObj?this._rejectCallback(t.e,!1):this._resolveCallback(t,!0)}}},{"./util":21}],14:[function(t,e,n){"use strict";function r(t){return t instanceof Error&&u.getPrototypeOf(t)===Error.prototype}function i(t){var e;if(r(t)){e=new l(t),e.name=t.name,e.message=t.message,e.stack=t.stack;for(var n=u.keys(t),i=0;i1){var n,r=new Array(e-1),i=0;for(n=0;e-1>n;++n){var o=arguments[n];if(!h.isObject(o))return p("Catch statement predicate: expecting an object but got "+h.classString(o));r[i++]=o}return r.length=i,t=arguments[n],this.then(void 0,P(r,t,this))}return this.then(void 0,t)},i.prototype.reflect=function(){return this._then(u,u,void 0,this,void 0)},i.prototype.then=function(t,e){if(F.warnings()&&arguments.length>0&&"function"!=typeof t&&"function"!=typeof e){ -var n=".then() only accepts functions but was passed: "+h.classString(t);arguments.length>1&&(n+=", "+h.classString(e)),this._warn(n)}return this._then(t,e,void 0,void 0,void 0)},i.prototype.done=function(t,e){var n=this._then(t,e,void 0,void 0,void 0);n._setIsFinal()},i.prototype.spread=function(t){return"function"!=typeof t?p("expecting a function but got "+h.classString(t)):this.all()._then(t,void 0,void 0,C,void 0)},i.prototype.toJSON=function(){var t={isFulfilled:!1,isRejected:!1,fulfillmentValue:void 0,rejectionReason:void 0};return this.isFulfilled()?(t.fulfillmentValue=this.value(),t.isFulfilled=!0):this.isRejected()&&(t.rejectionReason=this.reason(),t.isRejected=!0),t},i.prototype.all=function(){return arguments.length>0&&this._warn(".all() was passed arguments but it does not take any"),new k(this).promise()},i.prototype.error=function(t){return this.caught(h.originatesFromRejection,t)},i.getNewLibraryCopy=e.exports,i.is=function(t){return t instanceof i},i.fromNode=i.fromCallback=function(t){var e=new i(b);e._captureStackTrace();var n=arguments.length>1?!!Object(arguments[1]).multiArgs:!1,r=x(t)(R(e,n));return r===S&&e._rejectCallback(r.e,!0),e._isFateSealed()||e._setAsyncGuaranteed(),e},i.all=function(t){return new k(t).promise()},i.cast=function(t){var e=E(t);return e instanceof i||(e=new i(b),e._captureStackTrace(),e._setFulfilled(),e._rejectionHandler0=t),e},i.resolve=i.fulfilled=i.cast,i.reject=i.rejected=function(t){var e=new i(b);return e._captureStackTrace(),e._rejectCallback(t,!0),e},i.setScheduler=function(t){if("function"!=typeof t)throw new g("expecting a function but got "+h.classString(t));return v.setScheduler(t)},i.prototype._then=function(t,e,n,r,o){var a=void 0!==o,s=a?o:new i(b),l=this._target(),u=l._bitField;a||(s._propagateFrom(this,3),s._captureStackTrace(),void 0===r&&0!==(2097152&this._bitField)&&(r=0!==(50397184&u)?this._boundValue():l===this?void 0:this._boundTo),this._fireEvent("promiseChained",this,s));var p=c();if(0!==(50397184&u)){var f,_,d=l._settlePromiseCtx;0!==(33554432&u)?(_=l._rejectionHandler0,f=t):0!==(16777216&u)?(_=l._fulfillmentHandler0,f=e,l._unsetRejectionIsUnhandled()):(d=l._settlePromiseLateCancellationObserver,_=new m("late cancellation observer"),l._attachExtraTrace(_),f=e),v.invoke(d,l,{handler:null===p?f:"function"==typeof f&&h.domainBind(p,f),promise:s,receiver:r,value:_})}else l._addCallbacks(t,e,s,r,p);return s},i.prototype._length=function(){return 65535&this._bitField},i.prototype._isFateSealed=function(){return 0!==(117506048&this._bitField)},i.prototype._isFollowing=function(){return 67108864===(67108864&this._bitField)},i.prototype._setLength=function(t){this._bitField=-65536&this._bitField|65535&t},i.prototype._setFulfilled=function(){this._bitField=33554432|this._bitField,this._fireEvent("promiseFulfilled",this)},i.prototype._setRejected=function(){this._bitField=16777216|this._bitField,this._fireEvent("promiseRejected",this)},i.prototype._setFollowing=function(){this._bitField=67108864|this._bitField,this._fireEvent("promiseResolved",this)},i.prototype._setIsFinal=function(){this._bitField=4194304|this._bitField},i.prototype._isFinal=function(){return(4194304&this._bitField)>0},i.prototype._unsetCancelled=function(){this._bitField=-65537&this._bitField},i.prototype._setCancelled=function(){this._bitField=65536|this._bitField,this._fireEvent("promiseCancelled",this)},i.prototype._setWillBeCancelled=function(){this._bitField=8388608|this._bitField},i.prototype._setAsyncGuaranteed=function(){v.hasCustomScheduler()||(this._bitField=134217728|this._bitField)},i.prototype._receiverAt=function(t){var e=0===t?this._receiver0:this[4*t-4+3];return e===f?void 0:void 0===e&&this._isBound()?this._boundValue():e},i.prototype._promiseAt=function(t){return this[4*t-4+2]},i.prototype._fulfillmentHandlerAt=function(t){return this[4*t-4+0]},i.prototype._rejectionHandlerAt=function(t){return this[4*t-4+1]},i.prototype._boundValue=function(){},i.prototype._migrateCallback0=function(t){var e=(t._bitField,t._fulfillmentHandler0),n=t._rejectionHandler0,r=t._promise0,i=t._receiverAt(0);void 0===i&&(i=f),this._addCallbacks(e,n,r,i,null)},i.prototype._migrateCallbackAt=function(t,e){var n=t._fulfillmentHandlerAt(e),r=t._rejectionHandlerAt(e),i=t._promiseAt(e),o=t._receiverAt(e);void 0===o&&(o=f),this._addCallbacks(n,r,i,o,null)},i.prototype._addCallbacks=function(t,e,n,r,i){var o=this._length();if(o>=65531&&(o=0,this._setLength(0)),0===o)this._promise0=n,this._receiver0=r,"function"==typeof t&&(this._fulfillmentHandler0=null===i?t:h.domainBind(i,t)),"function"==typeof e&&(this._rejectionHandler0=null===i?e:h.domainBind(i,e));else{var a=4*o-4;this[a+2]=n,this[a+3]=r,"function"==typeof t&&(this[a+0]=null===i?t:h.domainBind(i,t)),"function"==typeof e&&(this[a+1]=null===i?e:h.domainBind(i,e))}return this._setLength(o+1),o},i.prototype._proxy=function(t,e){this._addCallbacks(void 0,void 0,e,t,null)},i.prototype._resolveCallback=function(t,e){if(0===(117506048&this._bitField)){if(t===this)return this._rejectCallback(l(),!1);var n=E(t,this);if(!(n instanceof i))return this._fulfill(t);e&&this._propagateFrom(n,2);var r=n._target();if(r===this)return void this._reject(l());var o=r._bitField;if(0===(50397184&o)){var a=this._length();a>0&&r._migrateCallback0(this);for(var s=1;a>s;++s)r._migrateCallbackAt(this,s);this._setFollowing(),this._setLength(0),this._setFollowee(r)}else if(0!==(33554432&o))this._fulfill(r._value());else if(0!==(16777216&o))this._reject(r._reason());else{var c=new m("late cancellation observer");r._attachExtraTrace(c),this._reject(c)}}},i.prototype._rejectCallback=function(t,e,n){var r=h.ensureErrorObject(t),i=r===t;if(!i&&!n&&F.warnings()){var o="a promise was rejected with a non-error: "+h.classString(t);this._warn(o,!0)}this._attachExtraTrace(r,e?i:!1),this._reject(t)},i.prototype._resolveFromExecutor=function(t){if(t!==b){var e=this;this._captureStackTrace(),this._pushContext();var n=!0,r=this._execute(t,function(t){e._resolveCallback(t)},function(t){e._rejectCallback(t,n)});n=!1,this._popContext(),void 0!==r&&e._rejectCallback(r,!0)}},i.prototype._settlePromiseFromHandler=function(t,e,n,r){var i=r._bitField;if(0===(65536&i)){r._pushContext();var o;e===C?n&&"number"==typeof n.length?o=x(t).apply(this._boundValue(),n):(o=S,o.e=new g("cannot .spread() a non-array: "+h.classString(n))):o=x(t).call(e,n);var a=r._popContext();i=r._bitField,0===(65536&i)&&(o===w?r._reject(n):o===S?r._rejectCallback(o.e,!1):(F.checkForgottenReturns(o,a,"",r,this),r._resolveCallback(o)))}},i.prototype._target=function(){for(var t=this;t._isFollowing();)t=t._followee();return t},i.prototype._followee=function(){return this._rejectionHandler0},i.prototype._setFollowee=function(t){this._rejectionHandler0=t},i.prototype._settlePromise=function(t,e,r,o){var a=t instanceof i,s=this._bitField,c=0!==(134217728&s);0!==(65536&s)?(a&&t._invokeInternalOnCancel(),r instanceof T&&r.isFinallyHandler()?(r.cancelPromise=t,x(e).call(r,o)===S&&t._reject(S.e)):e===u?t._fulfill(u.call(r)):r instanceof n?r._promiseCancelled(t):a||t instanceof k?t._cancel():r.cancel()):"function"==typeof e?a?(c&&t._setAsyncGuaranteed(),this._settlePromiseFromHandler(e,r,o,t)):e.call(r,o,t):r instanceof n?r._isResolved()||(0!==(33554432&s)?r._promiseFulfilled(o,t):r._promiseRejected(o,t)):a&&(c&&t._setAsyncGuaranteed(),0!==(33554432&s)?t._fulfill(o):t._reject(o))},i.prototype._settlePromiseLateCancellationObserver=function(t){var e=t.handler,n=t.promise,r=t.receiver,o=t.value;"function"==typeof e?n instanceof i?this._settlePromiseFromHandler(e,r,o,n):e.call(r,o,n):n instanceof i&&n._reject(o)},i.prototype._settlePromiseCtx=function(t){this._settlePromise(t.promise,t.handler,t.receiver,t.value)},i.prototype._settlePromise0=function(t,e,n){var r=this._promise0,i=this._receiverAt(0);this._promise0=void 0,this._receiver0=void 0,this._settlePromise(r,t,i,e)},i.prototype._clearCallbackDataAtIndex=function(t){var e=4*t-4;this[e+2]=this[e+3]=this[e+0]=this[e+1]=void 0},i.prototype._fulfill=function(t){var e=this._bitField;if(!((117506048&e)>>>16)){if(t===this){var n=l();return this._attachExtraTrace(n),this._reject(n)}this._setFulfilled(),this._rejectionHandler0=t,(65535&e)>0&&(0!==(134217728&e)?this._settlePromises():v.settlePromises(this),this._dereferenceTrace())}},i.prototype._reject=function(t){var e=this._bitField;if(!((117506048&e)>>>16))return this._setRejected(),this._fulfillmentHandler0=t,this._isFinal()?v.fatalError(t,h.isNode):void((65535&e)>0?v.settlePromises(this):this._ensurePossibleRejectionHandled())},i.prototype._fulfillPromises=function(t,e){for(var n=1;t>n;n++){var r=this._fulfillmentHandlerAt(n),i=this._promiseAt(n),o=this._receiverAt(n);this._clearCallbackDataAtIndex(n),this._settlePromise(i,r,o,e)}},i.prototype._rejectPromises=function(t,e){for(var n=1;t>n;n++){var r=this._rejectionHandlerAt(n),i=this._promiseAt(n),o=this._receiverAt(n);this._clearCallbackDataAtIndex(n),this._settlePromise(i,r,o,e)}},i.prototype._settlePromises=function(){var t=this._bitField,e=65535&t;if(e>0){if(0!==(16842752&t)){var n=this._fulfillmentHandler0;this._settlePromise0(this._rejectionHandler0,n,t),this._rejectPromises(e,n)}else{var r=this._rejectionHandler0;this._settlePromise0(this._fulfillmentHandler0,r,t),this._fulfillPromises(e,r)}this._setLength(0)}this._clearCancellationData()},i.prototype._settledValue=function(){var t=this._bitField;return 0!==(33554432&t)?this._rejectionHandler0:0!==(16777216&t)?this._fulfillmentHandler0:void 0},i.defer=i.pending=function(){F.deprecated("Promise.defer","new Promise");var t=new i(b);return{promise:t,resolve:o,reject:a}},h.notEnumerableProp(i,"_makeSelfResolutionError",l),t("./method")(i,b,E,p,F),t("./bind")(i,b,E,F),t("./cancel")(i,k,p,F),t("./direct_resolve")(i),t("./synchronous_inspection")(i),t("./join")(i,k,E,b,v,c),i.Promise=i,i.version="3.5.3",h.toFastProperties(i),h.toFastProperties(i.prototype),s({a:1}),s({b:2}),s({c:3}),s(1),s(function(){}),s(void 0),s(!1),s(new i(b)),F.setBounds(d.firstLineError,h.lastLineError),i}},{"./async":1,"./bind":2,"./cancel":4,"./catch_filter":5,"./context":6,"./debuggability":7,"./direct_resolve":8,"./errors":9,"./es5":10,"./finally":11,"./join":12,"./method":13,"./nodeback":14,"./promise_array":16,"./synchronous_inspection":19,"./thenables":20,"./util":21}],16:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o){function a(t){switch(t){case-2:return[];case-3:return{};case-6:return new Map}}function s(t){var r=this._promise=new e(n);t instanceof e&&r._propagateFrom(t,3),r._setOnCancel(this),this._values=t,this._length=0,this._totalResolved=0,this._init(void 0,-2)}var c=t("./util");c.isArray;return c.inherits(s,o),s.prototype.length=function(){return this._length},s.prototype.promise=function(){return this._promise},s.prototype._init=function l(t,n){var o=r(this._values,this._promise);if(o instanceof e){o=o._target();var s=o._bitField;if(this._values=o,0===(50397184&s))return this._promise._setAsyncGuaranteed(),o._then(l,this._reject,void 0,this,n);if(0===(33554432&s))return 0!==(16777216&s)?this._reject(o._reason()):this._cancel();o=o._value()}if(o=c.asArray(o),null===o){var u=i("expecting an array or an iterable object but got "+c.classString(o)).reason();return void this._promise._rejectCallback(u,!1)}return 0===o.length?void(-5===n?this._resolveEmptyArray():this._resolve(a(n))):void this._iterate(o)},s.prototype._iterate=function(t){var n=this.getActualLength(t.length);this._length=n,this._values=this.shouldCopyValues()?new Array(n):this._values;for(var i=this._promise,o=!1,a=null,s=0;n>s;++s){var c=r(t[s],i);c instanceof e?(c=c._target(),a=c._bitField):a=null,o?null!==a&&c.suppressUnhandledRejections():null!==a?0===(50397184&a)?(c._proxy(this,s),this._values[s]=c):o=0!==(33554432&a)?this._promiseFulfilled(c._value(),s):0!==(16777216&a)?this._promiseRejected(c._reason(),s):this._promiseCancelled(s):o=this._promiseFulfilled(c,s)}o||i._setAsyncGuaranteed()},s.prototype._isResolved=function(){return null===this._values},s.prototype._resolve=function(t){this._values=null,this._promise._fulfill(t)},s.prototype._cancel=function(){!this._isResolved()&&this._promise._isCancellable()&&(this._values=null,this._promise._cancel())},s.prototype._reject=function(t){this._values=null,this._promise._rejectCallback(t,!1)},s.prototype._promiseFulfilled=function(t,e){this._values[e]=t;var n=++this._totalResolved;return n>=this._length?(this._resolve(this._values),!0):!1},s.prototype._promiseCancelled=function(){return this._cancel(),!0},s.prototype._promiseRejected=function(t){return this._totalResolved++,this._reject(t),!0},s.prototype._resultCancelled=function(){if(!this._isResolved()){var t=this._values;if(this._cancel(),t instanceof e)t.cancel();else for(var n=0;no;++o)n[o+r]=t[o+e],t[o+e]=void 0}function i(t){this._capacity=t,this._length=0,this._front=0}i.prototype._willBeOverCapacity=function(t){return this._capacityn;++n)i[n]=t[n];return i[n]=e,i}function l(t,e,n){if(!F.isES5)return{}.hasOwnProperty.call(t,e)?t[e]:void 0;var r=Object.getOwnPropertyDescriptor(t,e);return null!=r?null==r.get&&null==r.set?r.value:n:void 0}function u(t,e,n){if(o(t))return t;var r={value:n,configurable:!0,enumerable:!1,writable:!0};return F.defineProperty(t,e,r),t}function p(t){throw t}function f(t){try{if("function"==typeof t){var e=F.names(t.prototype),n=F.isES5&&e.length>1,r=e.length>0&&!(1===e.length&&"constructor"===e[0]),i=A.test(t+"")&&F.names(t).length>0;if(n||r||i)return!0}return!1}catch(o){return!1}}function h(t){function e(){}function n(){return typeof r.foo}e.prototype=t;var r=new e;return n(),n(),t}function _(t){return N.test(t)}function d(t,e,n){for(var r=new Array(t),i=0;t>i;++i)r[i]=e+i+n;return r}function v(t){try{return t+""}catch(e){return"[no string representation]"}}function y(t){return t instanceof Error||null!==t&&"object"==typeof t&&"string"==typeof t.message&&"string"==typeof t.name}function g(t){try{u(t,"isOperational",!0)}catch(e){}}function m(t){return null==t?!1:t instanceof Error.__BluebirdErrorTypes__.OperationalError||t.isOperational===!0}function b(t){return y(t)&&F.propertyIsWritable(t,"stack")}function C(t){return{}.toString.call(t)}function w(t,e,n){for(var r=F.names(t),i=0;i10||t[0]>0}(),D.isNode&&D.toFastProperties(process);try{throw new Error}catch(V){D.lastLineError=V}e.exports=D},{"./es5":10}]},{},[3])(3)}),"undefined"!=typeof window&&null!==window?window.P=window.Promise:"undefined"!=typeof self&&null!==self&&(self.P=self.Promise); \ No newline at end of file +!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{var e;"undefined"!=typeof window?e=window:"undefined"!=typeof global?e=global:"undefined"!=typeof self&&(e=self),e.Promise=t()}}(function(){var t,e,n;return function r(t,e,n){function i(a,s){if(!e[a]){if(!t[a]){var c="function"==typeof _dereq_&&_dereq_;if(!s&&c)return c(a,!0);if(o)return o(a,!0);var l=new Error("Cannot find module '"+a+"'");throw l.code="MODULE_NOT_FOUND",l}var u=e[a]={exports:{}};t[a][0].call(u.exports,function(e){var n=t[a][1][e];return i(n?n:e)},u,u.exports,r,t,e,n)}return e[a].exports}for(var o="function"==typeof _dereq_&&_dereq_,a=0;a0;)c(t)}function c(t){var e=t.shift();if("function"!=typeof e)e._settlePromises();else{var n=t.shift(),r=t.shift();e.call(n,r)}}var l;try{throw new Error}catch(u){l=u}var p=t("./schedule"),f=t("./queue"),h=t("./util");r.prototype.setScheduler=function(t){var e=this._schedule;return this._schedule=t,this._customScheduler=!0,e},r.prototype.hasCustomScheduler=function(){return this._customScheduler},r.prototype.enableTrampoline=function(){this._trampolineEnabled=!0},r.prototype.disableTrampolineIfNecessary=function(){h.hasDevTools&&(this._trampolineEnabled=!1)},r.prototype.haveItemsQueued=function(){return this._isTickUsed||this._haveDrainedQueues},r.prototype.fatalError=function(t,e){e?(process.stderr.write("Fatal "+(t instanceof Error?t.stack:t)+"\n"),process.exit(2)):this.throwLater(t)},r.prototype.throwLater=function(t,e){if(1===arguments.length&&(e=t,t=function(){throw e}),"undefined"!=typeof setTimeout)setTimeout(function(){t(e)},0);else try{this._schedule(function(){t(e)})}catch(n){throw new Error("No async scheduler available\n\n See http://goo.gl/MqrFmX\n")}},h.hasDevTools?(r.prototype.invokeLater=function(t,e,n){this._trampolineEnabled?i.call(this,t,e,n):this._schedule(function(){setTimeout(function(){t.call(e,n)},100)})},r.prototype.invoke=function(t,e,n){this._trampolineEnabled?o.call(this,t,e,n):this._schedule(function(){t.call(e,n)})},r.prototype.settlePromises=function(t){this._trampolineEnabled?a.call(this,t):this._schedule(function(){t._settlePromises()})}):(r.prototype.invokeLater=i,r.prototype.invoke=o,r.prototype.settlePromises=a),r.prototype._drainQueues=function(){s(this._normalQueue),this._reset(),this._haveDrainedQueues=!0,s(this._lateQueue)},r.prototype._queueTick=function(){this._isTickUsed||(this._isTickUsed=!0,this._schedule(this.drainQueues))},r.prototype._reset=function(){this._isTickUsed=!1},e.exports=r,e.exports.firstLineError=l},{"./queue":17,"./schedule":18,"./util":21}],2:[function(t,e,n){"use strict";e.exports=function(t,e,n,r){var i=!1,o=function(t,e){this._reject(e)},a=function(t,e){e.promiseRejectionQueued=!0,e.bindingPromise._then(o,o,null,this,t)},s=function(t,e){0===(50397184&this._bitField)&&this._resolveCallback(e.target)},c=function(t,e){e.promiseRejectionQueued||this._reject(t)};t.prototype.bind=function(o){i||(i=!0,t.prototype._propagateFrom=r.propagateFromFunction(),t.prototype._boundValue=r.boundValueFunction());var l=n(o),u=new t(e);u._propagateFrom(this,1);var p=this._target();if(u._setBoundTo(l),l instanceof t){var f={promiseRejectionQueued:!1,promise:u,target:p,bindingPromise:l};p._then(e,a,void 0,u,f),l._then(s,c,void 0,u,f),u._setOnCancel(l)}else u._resolveCallback(p);return u},t.prototype._setBoundTo=function(t){void 0!==t?(this._bitField=2097152|this._bitField,this._boundTo=t):this._bitField=-2097153&this._bitField},t.prototype._isBound=function(){return 2097152===(2097152&this._bitField)},t.bind=function(e,n){return t.resolve(n).bind(e)}}},{}],3:[function(t,e,n){"use strict";function r(){try{Promise===o&&(Promise=i)}catch(t){}return o}var i;"undefined"!=typeof Promise&&(i=Promise);var o=t("./promise")();o.noConflict=r,e.exports=o},{"./promise":15}],4:[function(t,e,n){"use strict";e.exports=function(e,n,r,i){var o=t("./util"),a=o.tryCatch,s=o.errorObj,c=e._async;e.prototype["break"]=e.prototype.cancel=function(){if(!i.cancellation())return this._warn("cancellation is disabled");for(var t=this,e=t;t._isCancellable();){if(!t._cancelBy(e)){e._isFollowing()?e._followee().cancel():e._cancelBranched();break}var n=t._cancellationParent;if(null==n||!n._isCancellable()){t._isFollowing()?t._followee().cancel():t._cancelBranched();break}t._isFollowing()&&t._followee().cancel(),t._setWillBeCancelled(),e=t,t=n}},e.prototype._branchHasCancelled=function(){this._branchesRemainingToCancel--},e.prototype._enoughBranchesHaveCancelled=function(){return void 0===this._branchesRemainingToCancel||this._branchesRemainingToCancel<=0},e.prototype._cancelBy=function(t){return t===this?(this._branchesRemainingToCancel=0,this._invokeOnCancel(),!0):(this._branchHasCancelled(),this._enoughBranchesHaveCancelled()?(this._invokeOnCancel(),!0):!1)},e.prototype._cancelBranched=function(){this._enoughBranchesHaveCancelled()&&this._cancel()},e.prototype._cancel=function(){this._isCancellable()&&(this._setCancelled(),c.invoke(this._cancelPromises,this,void 0))},e.prototype._cancelPromises=function(){this._length()>0&&this._settlePromises()},e.prototype._unsetOnCancel=function(){this._onCancelField=void 0},e.prototype._isCancellable=function(){return this.isPending()&&!this._isCancelled()},e.prototype.isCancellable=function(){return this.isPending()&&!this.isCancelled()},e.prototype._doInvokeOnCancel=function(t,e){if(o.isArray(t))for(var n=0;n=0?o[t]:void 0}var i=!1,o=[];return t.prototype._promiseCreated=function(){},t.prototype._pushContext=function(){},t.prototype._popContext=function(){return null},t._peekContext=t.prototype._peekContext=function(){},e.prototype._pushContext=function(){void 0!==this._trace&&(this._trace._promiseCreated=null,o.push(this._trace))},e.prototype._popContext=function(){if(void 0!==this._trace){var t=o.pop(),e=t._promiseCreated;return t._promiseCreated=null,e}return null},e.CapturedTrace=null,e.create=n,e.deactivateLongStackTraces=function(){},e.activateLongStackTraces=function(){var n=t.prototype._pushContext,o=t.prototype._popContext,a=t._peekContext,s=t.prototype._peekContext,c=t.prototype._promiseCreated;e.deactivateLongStackTraces=function(){t.prototype._pushContext=n,t.prototype._popContext=o,t._peekContext=a,t.prototype._peekContext=s,t.prototype._promiseCreated=c,i=!1},i=!0,t.prototype._pushContext=e.prototype._pushContext,t.prototype._popContext=e.prototype._popContext,t._peekContext=t.prototype._peekContext=r,t.prototype._promiseCreated=function(){var t=this._peekContext();t&&null==t._promiseCreated&&(t._promiseCreated=this)}},e}},{}],7:[function(t,e,n){"use strict";e.exports=function(e,n){function r(t,e){return{promise:e}}function i(){return!1}function o(t,e,n){var r=this;try{t(e,n,function(t){if("function"!=typeof t)throw new TypeError("onCancel must be a function, got: "+H.toString(t));r._attachCancellationCallback(t)})}catch(i){return i}}function a(t){if(!this._isCancellable())return this;var e=this._onCancel();void 0!==e?H.isArray(e)?e.push(t):this._setOnCancel([e,t]):this._setOnCancel(t)}function s(){return this._onCancelField}function c(t){this._onCancelField=t}function l(){this._cancellationParent=void 0,this._onCancelField=void 0}function u(t,e){if(0!==(1&e)){this._cancellationParent=t;var n=t._branchesRemainingToCancel;void 0===n&&(n=0),t._branchesRemainingToCancel=n+1}0!==(2&e)&&t._isBound()&&this._setBoundTo(t._boundTo)}function p(t,e){0!==(2&e)&&t._isBound()&&this._setBoundTo(t._boundTo)}function f(){var t=this._boundTo;return void 0!==t&&t instanceof e?t.isFulfilled()?t.value():void 0:t}function h(){this._trace=new x(this._peekContext())}function d(t,e){if(V(t)){var n=this._trace;if(void 0!==n&&e&&(n=n._parent),void 0!==n)n.attachExtraTrace(t);else if(!t.__stackCleaned__){var r=k(t);H.notEnumerableProp(t,"stack",r.message+"\n"+r.stack.join("\n")),H.notEnumerableProp(t,"__stackCleaned__",!0)}}}function _(){this._trace=void 0}function v(t,e,n,r,i){if(void 0===t&&null!==e&&J){if(void 0!==i&&i._returnedNonUndefined())return;if(0===(65535&r._bitField))return;n&&(n+=" ");var o="",a="";if(e._trace){for(var s=e._trace.stack.split("\n"),c=w(s),l=c.length-1;l>=0;--l){var u=c[l];if(!q.test(u)){var p=u.match(G);p&&(o="at "+p[1]+":"+p[2]+":"+p[3]+" ");break}}if(c.length>0)for(var f=c[0],l=0;l0&&(a="\n"+s[l-1]);break}}var h="a promise was created in a "+n+"handler "+o+"but was not returned from it, see http://goo.gl/rRqMUw"+a;r._warn(h,!0,e)}}function y(t,e){var n=t+" is deprecated and will be removed in a future version.";return e&&(n+=" Use "+e+" instead."),g(n)}function g(t,n,r){if(st.warnings){var i,o=new I(t);if(n)r._attachExtraTrace(o);else if(st.longStackTraces&&(i=e._peekContext()))i.attachExtraTrace(o);else{var a=k(o);o.stack=a.message+"\n"+a.stack.join("\n")}nt("warning",o)||j(o,"",!0)}}function m(t,e){for(var n=0;n=0;--s)if(r[s]===o){a=s;break}for(var s=a;s>=0;--s){var c=r[s];if(e[i]!==c)break;e.pop(),i--}e=r}}function w(t){for(var e=[],n=0;n0&&"SyntaxError"!=t.name&&(e=e.slice(n)),e}function k(t){var e=t.stack,n=t.toString();return e="string"==typeof e&&e.length>0?E(t):[" (No stack trace)"],{message:n,stack:"SyntaxError"==t.name?e:w(e)}}function j(t,e,n){if("undefined"!=typeof console){var r;if(H.isObject(t)){var i=t.stack;r=e+W(i,t)}else r=e+String(t);"function"==typeof L?L(r,n):("function"==typeof console.log||"object"==typeof console.log)&&console.log(r)}}function F(t,e,n,r){var i=!1;try{"function"==typeof e&&(i=!0,"rejectionHandled"===t?e(r):e(n,r))}catch(o){U.throwLater(o)}"unhandledRejection"===t?nt(t,n,r)||i||j(n,"Unhandled rejection "):nt(t,r)}function T(t){var e;if("function"==typeof t)e="[function "+(t.name||"anonymous")+"]";else{e=t&&"function"==typeof t.toString?t.toString():H.toString(t);var n=/\[object [a-zA-Z0-9$_]+\]/;if(n.test(e))try{var r=JSON.stringify(t);e=r}catch(i){}0===e.length&&(e="(empty array)")}return"(<"+P(e)+">, no stack trace)"}function P(t){var e=41;return t.lengtha||0>s||!n||!r||n!==r||a>=s||(it=function(t){if(Q.test(t))return!0;var e=O(t);return e&&e.fileName===n&&a<=e.line&&e.line<=s?!0:!1})}}function x(t){this._parent=t,this._promisesCreated=0;var e=this._length=1+(void 0===t?0:t._length);at(this,x),e>32&&this.uncycle()}var A,N,L,B=e._getDomain,U=e._async,I=t("./errors").Warning,H=t("./util"),D=t("./es5"),V=H.canAttachTrace,Q=/[\\\/]bluebird[\\\/]js[\\\/](release|debug|instrumented)/,q=/\((?:timers\.js):\d+:\d+\)/,G=/[\/<\(](.+?):(\d+):(\d+)\)?\s*$/,M=null,W=null,$=!1,z=!(0==H.env("BLUEBIRD_DEBUG")||!H.env("BLUEBIRD_DEBUG")&&"development"!==H.env("NODE_ENV")),X=!(0==H.env("BLUEBIRD_WARNINGS")||!z&&!H.env("BLUEBIRD_WARNINGS")),K=!(0==H.env("BLUEBIRD_LONG_STACK_TRACES")||!z&&!H.env("BLUEBIRD_LONG_STACK_TRACES")),J=0!=H.env("BLUEBIRD_W_FORGOTTEN_RETURN")&&(X||!!H.env("BLUEBIRD_W_FORGOTTEN_RETURN"));e.prototype.suppressUnhandledRejections=function(){var t=this._target();t._bitField=-1048577&t._bitField|524288},e.prototype._ensurePossibleRejectionHandled=function(){if(0===(524288&this._bitField)){this._setRejectionIsUnhandled();var t=this;setTimeout(function(){t._notifyUnhandledRejection()},1)}},e.prototype._notifyUnhandledRejectionIsHandled=function(){F("rejectionHandled",A,void 0,this)},e.prototype._setReturnedNonUndefined=function(){this._bitField=268435456|this._bitField},e.prototype._returnedNonUndefined=function(){return 0!==(268435456&this._bitField)},e.prototype._notifyUnhandledRejection=function(){if(this._isRejectionUnhandled()){var t=this._settledValue();this._setUnhandledRejectionIsNotified(),F("unhandledRejection",N,t,this)}},e.prototype._setUnhandledRejectionIsNotified=function(){this._bitField=262144|this._bitField},e.prototype._unsetUnhandledRejectionIsNotified=function(){this._bitField=-262145&this._bitField},e.prototype._isUnhandledRejectionNotified=function(){return(262144&this._bitField)>0},e.prototype._setRejectionIsUnhandled=function(){this._bitField=1048576|this._bitField},e.prototype._unsetRejectionIsUnhandled=function(){this._bitField=-1048577&this._bitField,this._isUnhandledRejectionNotified()&&(this._unsetUnhandledRejectionIsNotified(),this._notifyUnhandledRejectionIsHandled())},e.prototype._isRejectionUnhandled=function(){return(1048576&this._bitField)>0},e.prototype._warn=function(t,e,n){return g(t,e,n||this)},e.onPossiblyUnhandledRejection=function(t){var e=B();N="function"==typeof t?null===e?t:H.domainBind(e,t):void 0},e.onUnhandledRejectionHandled=function(t){var e=B();A="function"==typeof t?null===e?t:H.domainBind(e,t):void 0};var Y=function(){};e.longStackTraces=function(){if(U.haveItemsQueued()&&!st.longStackTraces)throw new Error("cannot enable long stack traces after promises have been created\n\n See http://goo.gl/MqrFmX\n");if(!st.longStackTraces&&S()){var t=e.prototype._captureStackTrace,r=e.prototype._attachExtraTrace,i=e.prototype._dereferenceTrace;st.longStackTraces=!0,Y=function(){if(U.haveItemsQueued()&&!st.longStackTraces)throw new Error("cannot enable long stack traces after promises have been created\n\n See http://goo.gl/MqrFmX\n");e.prototype._captureStackTrace=t,e.prototype._attachExtraTrace=r,e.prototype._dereferenceTrace=i,n.deactivateLongStackTraces(),U.enableTrampoline(),st.longStackTraces=!1},e.prototype._captureStackTrace=h,e.prototype._attachExtraTrace=d,e.prototype._dereferenceTrace=_,n.activateLongStackTraces(),U.disableTrampolineIfNecessary()}},e.hasLongStackTraces=function(){return st.longStackTraces&&S()};var Z=function(){try{if("function"==typeof CustomEvent){var t=new CustomEvent("CustomEvent");return H.global.dispatchEvent(t),function(t,e){var n={detail:e,cancelable:!0};D.defineProperty(n,"promise",{value:e.promise}),D.defineProperty(n,"reason",{value:e.reason});var r=new CustomEvent(t.toLowerCase(),n);return!H.global.dispatchEvent(r)}}if("function"==typeof Event){var t=new Event("CustomEvent");return H.global.dispatchEvent(t),function(t,e){var n=new Event(t.toLowerCase(),{cancelable:!0});return n.detail=e,D.defineProperty(n,"promise",{value:e.promise}),D.defineProperty(n,"reason",{value:e.reason}),!H.global.dispatchEvent(n)}}var t=document.createEvent("CustomEvent");return t.initCustomEvent("testingtheevent",!1,!0,{}),H.global.dispatchEvent(t),function(t,e){var n=document.createEvent("CustomEvent");return n.initCustomEvent(t.toLowerCase(),!1,!0,e),!H.global.dispatchEvent(n)}}catch(e){}return function(){return!1}}(),tt=function(){return H.isNode?function(){return process.emit.apply(process,arguments)}:H.global?function(t){var e="on"+t.toLowerCase(),n=H.global[e];return n?(n.apply(H.global,[].slice.call(arguments,1)),!0):!1}:function(){return!1}}(),et={promiseCreated:r,promiseFulfilled:r,promiseRejected:r,promiseResolved:r,promiseCancelled:r,promiseChained:function(t,e,n){return{promise:e,child:n}},warning:function(t,e){return{warning:e}},unhandledRejection:function(t,e,n){return{reason:e,promise:n}},rejectionHandled:r},nt=function(t){var e=!1;try{e=tt.apply(null,arguments)}catch(n){U.throwLater(n),e=!0}var r=!1;try{r=Z(t,et[t].apply(null,arguments))}catch(n){U.throwLater(n),r=!0}return r||e};e.config=function(t){if(t=Object(t),"longStackTraces"in t&&(t.longStackTraces?e.longStackTraces():!t.longStackTraces&&e.hasLongStackTraces()&&Y()),"warnings"in t){var n=t.warnings;st.warnings=!!n,J=st.warnings,H.isObject(n)&&"wForgottenReturn"in n&&(J=!!n.wForgottenReturn)}if("cancellation"in t&&t.cancellation&&!st.cancellation){if(U.haveItemsQueued())throw new Error("cannot enable cancellation after promises are in use");e.prototype._clearCancellationData=l,e.prototype._propagateFrom=u,e.prototype._onCancel=s,e.prototype._setOnCancel=c,e.prototype._attachCancellationCallback=a,e.prototype._execute=o,rt=u,st.cancellation=!0}return"monitoring"in t&&(t.monitoring&&!st.monitoring?(st.monitoring=!0,e.prototype._fireEvent=nt):!t.monitoring&&st.monitoring&&(st.monitoring=!1,e.prototype._fireEvent=i)),e},e.prototype._fireEvent=i,e.prototype._execute=function(t,e,n){try{t(e,n)}catch(r){return r}},e.prototype._onCancel=function(){},e.prototype._setOnCancel=function(t){},e.prototype._attachCancellationCallback=function(t){},e.prototype._captureStackTrace=function(){},e.prototype._attachExtraTrace=function(){},e.prototype._dereferenceTrace=function(){},e.prototype._clearCancellationData=function(){},e.prototype._propagateFrom=function(t,e){};var rt=p,it=function(){return!1},ot=/[\/<\(]([^:\/]+):(\d+):(?:\d+)\)?\s*$/;H.inherits(x,Error),n.CapturedTrace=x,x.prototype.uncycle=function(){var t=this._length;if(!(2>t)){for(var e=[],n={},r=0,i=this;void 0!==i;++r)e.push(i),i=i._parent;t=this._length=r;for(var r=t-1;r>=0;--r){var o=e[r].stack;void 0===n[o]&&(n[o]=r)}for(var r=0;t>r;++r){var a=e[r].stack,s=n[a];if(void 0!==s&&s!==r){s>0&&(e[s-1]._parent=void 0,e[s-1]._length=1),e[r]._parent=void 0,e[r]._length=1;var c=r>0?e[r-1]:this;t-1>s?(c._parent=e[s+1],c._parent.uncycle(),c._length=c._parent._length+1):(c._parent=void 0,c._length=1);for(var l=c._length+1,u=r-2;u>=0;--u)e[u]._length=l,l++;return}}}},x.prototype.attachExtraTrace=function(t){if(!t.__stackCleaned__){this.uncycle();for(var e=k(t),n=e.message,r=[e.stack],i=this;void 0!==i;)r.push(w(i.stack.split("\n"))),i=i._parent;C(r),b(r),H.notEnumerableProp(t,"stack",m(n,r)),H.notEnumerableProp(t,"__stackCleaned__",!0)}};var at=function(){var t=/^\s*at\s*/,e=function(t,e){return"string"==typeof t?t:void 0!==e.name&&void 0!==e.message?e.toString():T(e)};if("number"==typeof Error.stackTraceLimit&&"function"==typeof Error.captureStackTrace){Error.stackTraceLimit+=6,M=t,W=e;var n=Error.captureStackTrace;return it=function(t){return Q.test(t)},function(t,e){Error.stackTraceLimit+=6,n(t,e),Error.stackTraceLimit-=6}}var r=new Error;if("string"==typeof r.stack&&r.stack.split("\n")[0].indexOf("stackDetection@")>=0)return M=/@/,W=e,$=!0,function(t){t.stack=(new Error).stack};var i;try{throw new Error}catch(o){i="stack"in o}return"stack"in r||!i||"number"!=typeof Error.stackTraceLimit?(W=function(t,e){return"string"==typeof t?t:"object"!=typeof e&&"function"!=typeof e||void 0===e.name||void 0===e.message?T(e):e.toString()},null):(M=t,W=e,function(t){Error.stackTraceLimit+=6;try{throw new Error}catch(e){t.stack=e.stack}Error.stackTraceLimit-=6})}([]);"undefined"!=typeof console&&"undefined"!=typeof console.warn&&(L=function(t){console.warn(t)},H.isNode&&process.stderr.isTTY?L=function(t,e){var n=e?"":"";console.warn(n+t+"\n")}:H.isNode||"string"!=typeof(new Error).stack||(L=function(t,e){console.warn("%c"+t,e?"color: darkorange":"color: red")}));var st={warnings:X,longStackTraces:!1,cancellation:!1,monitoring:!1};return K&&e.longStackTraces(),{longStackTraces:function(){return st.longStackTraces},warnings:function(){return st.warnings},cancellation:function(){return st.cancellation},monitoring:function(){return st.monitoring},propagateFromFunction:function(){return rt},boundValueFunction:function(){return f},checkForgottenReturns:v,setBounds:R,warn:g,deprecated:y,CapturedTrace:x,fireDomEvent:Z,fireGlobalEvent:tt}}},{"./errors":9,"./es5":10,"./util":21}],8:[function(t,e,n){"use strict";e.exports=function(t){function e(){return this.value}function n(){throw this.reason}t.prototype["return"]=t.prototype.thenReturn=function(n){return n instanceof t&&n.suppressUnhandledRejections(),this._then(e,void 0,void 0,{value:n},void 0)},t.prototype["throw"]=t.prototype.thenThrow=function(t){return this._then(n,void 0,void 0,{reason:t},void 0)},t.prototype.catchThrow=function(t){if(arguments.length<=1)return this._then(void 0,n,void 0,{reason:t},void 0);var e=arguments[1],r=function(){throw e};return this.caught(t,r)},t.prototype.catchReturn=function(n){if(arguments.length<=1)return n instanceof t&&n.suppressUnhandledRejections(),this._then(void 0,e,void 0,{value:n},void 0);var r=arguments[1];r instanceof t&&r.suppressUnhandledRejections();var i=function(){return r};return this.caught(n,i)}}},{}],9:[function(t,e,n){"use strict";function r(t,e){function n(r){return this instanceof n?(p(this,"message","string"==typeof r?r:e),p(this,"name",t),void(Error.captureStackTrace?Error.captureStackTrace(this,this.constructor):Error.call(this))):new n(r)}return u(n,Error),n}function i(t){return this instanceof i?(p(this,"name","OperationalError"),p(this,"message",t),this.cause=t,this.isOperational=!0,void(t instanceof Error?(p(this,"message",t.message),p(this,"stack",t.stack)):Error.captureStackTrace&&Error.captureStackTrace(this,this.constructor))):new i(t)}var o,a,s=t("./es5"),c=s.freeze,l=t("./util"),u=l.inherits,p=l.notEnumerableProp,f=r("Warning","warning"),h=r("CancellationError","cancellation error"),d=r("TimeoutError","timeout error"),_=r("AggregateError","aggregate error");try{o=TypeError,a=RangeError}catch(v){o=r("TypeError","type error"),a=r("RangeError","range error")}for(var y="join pop push shift unshift slice filter forEach some every map indexOf lastIndexOf reduce reduceRight sort reverse".split(" "),g=0;g1?t.cancelPromise._reject(e):t.cancelPromise._cancel(),t.cancelPromise=null,!0):!1}function s(){return l.call(this,this.promise._target()._settledValue())}function c(t){return a(this,t)?void 0:(f.e=t,f)}function l(t){var i=this.promise,l=this.handler;if(!this.called){this.called=!0;var u=this.isFinallyHandler()?l.call(i._boundValue()):l.call(i._boundValue(),t);if(u===r)return u;if(void 0!==u){i._setReturnedNonUndefined();var h=n(u,i);if(h instanceof e){if(null!=this.cancelPromise){if(h._isCancelled()){var d=new p("late cancellation observer");return i._attachExtraTrace(d),f.e=d,f}h.isPending()&&h._attachCancellationCallback(new o(this))}return h._then(s,c,void 0,this,void 0)}}}return i.isRejected()?(a(this),f.e=t,f):(a(this),t)}var u=t("./util"),p=e.CancellationError,f=u.errorObj,h=t("./catch_filter")(r);return i.prototype.isFinallyHandler=function(){return 0===this.type},o.prototype._resultCancelled=function(){a(this.finallyHandler)},e.prototype._passThrough=function(t,e,n,r){return"function"!=typeof t?this.then():this._then(n,r,void 0,new i(this,e,t),void 0)},e.prototype.lastly=e.prototype["finally"]=function(t){return this._passThrough(t,0,l,l)},e.prototype.tap=function(t){return this._passThrough(t,1,l)},e.prototype.tapCatch=function(t){var n=arguments.length;if(1===n)return this._passThrough(t,1,void 0,l);var r,i=new Array(n-1),o=0;for(r=0;n-1>r;++r){var a=arguments[r];if(!u.isObject(a))return e.reject(new TypeError("tapCatch statement predicate: expecting an object but got "+u.classString(a)));i[o++]=a}i.length=o;var s=arguments[r];return this._passThrough(h(i,s,this),1,void 0,l)},i}},{"./catch_filter":5,"./util":21}],12:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o,a){var s=t("./util");s.canEvaluate,s.tryCatch,s.errorObj;e.join=function(){var t,e=arguments.length-1;if(e>0&&"function"==typeof arguments[e]){t=arguments[e];var r}var i=[].slice.call(arguments);t&&i.pop();var r=new n(i).promise();return void 0!==t?r.spread(t):r}}},{"./util":21}],13:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o){var a=t("./util"),s=a.tryCatch;e.method=function(t){if("function"!=typeof t)throw new e.TypeError("expecting a function but got "+a.classString(t));return function(){var r=new e(n);r._captureStackTrace(),r._pushContext();var i=s(t).apply(this,arguments),a=r._popContext();return o.checkForgottenReturns(i,a,"Promise.method",r),r._resolveFromSyncValue(i),r}},e.attempt=e["try"]=function(t){if("function"!=typeof t)return i("expecting a function but got "+a.classString(t));var r=new e(n);r._captureStackTrace(),r._pushContext();var c;if(arguments.length>1){o.deprecated("calling Promise.try with more than 1 argument");var l=arguments[1],u=arguments[2];c=a.isArray(l)?s(t).apply(u,l):s(t).call(u,l)}else c=s(t)();var p=r._popContext();return o.checkForgottenReturns(c,p,"Promise.try",r),r._resolveFromSyncValue(c),r},e.prototype._resolveFromSyncValue=function(t){t===a.errorObj?this._rejectCallback(t.e,!1):this._resolveCallback(t,!0)}}},{"./util":21}],14:[function(t,e,n){"use strict";function r(t){return t instanceof Error&&u.getPrototypeOf(t)===Error.prototype}function i(t){var e;if(r(t)){e=new l(t),e.name=t.name,e.message=t.message,e.stack=t.stack;for(var n=u.keys(t),i=0;i1){var n,r=new Array(e-1),i=0;for(n=0;e-1>n;++n){var o=arguments[n];if(!h.isObject(o))return p("Catch statement predicate: expecting an object but got "+h.classString(o));r[i++]=o}if(r.length=i,t=arguments[n],"function"!=typeof t)throw new g("The last argument to .catch() must be a function, got "+h.toString(t));return this.then(void 0,P(r,t,this))}return this.then(void 0,t)},i.prototype.reflect=function(){return this._then(u,u,void 0,this,void 0); +},i.prototype.then=function(t,e){if(F.warnings()&&arguments.length>0&&"function"!=typeof t&&"function"!=typeof e){var n=".then() only accepts functions but was passed: "+h.classString(t);arguments.length>1&&(n+=", "+h.classString(e)),this._warn(n)}return this._then(t,e,void 0,void 0,void 0)},i.prototype.done=function(t,e){var n=this._then(t,e,void 0,void 0,void 0);n._setIsFinal()},i.prototype.spread=function(t){return"function"!=typeof t?p("expecting a function but got "+h.classString(t)):this.all()._then(t,void 0,void 0,C,void 0)},i.prototype.toJSON=function(){var t={isFulfilled:!1,isRejected:!1,fulfillmentValue:void 0,rejectionReason:void 0};return this.isFulfilled()?(t.fulfillmentValue=this.value(),t.isFulfilled=!0):this.isRejected()&&(t.rejectionReason=this.reason(),t.isRejected=!0),t},i.prototype.all=function(){return arguments.length>0&&this._warn(".all() was passed arguments but it does not take any"),new k(this).promise()},i.prototype.error=function(t){return this.caught(h.originatesFromRejection,t)},i.getNewLibraryCopy=e.exports,i.is=function(t){return t instanceof i},i.fromNode=i.fromCallback=function(t){var e=new i(b);e._captureStackTrace();var n=arguments.length>1?!!Object(arguments[1]).multiArgs:!1,r=R(t)(S(e,n));return r===O&&e._rejectCallback(r.e,!0),e._isFateSealed()||e._setAsyncGuaranteed(),e},i.all=function(t){return new k(t).promise()},i.cast=function(t){var e=E(t);return e instanceof i||(e=new i(b),e._captureStackTrace(),e._setFulfilled(),e._rejectionHandler0=t),e},i.resolve=i.fulfilled=i.cast,i.reject=i.rejected=function(t){var e=new i(b);return e._captureStackTrace(),e._rejectCallback(t,!0),e},i.setScheduler=function(t){if("function"!=typeof t)throw new g("expecting a function but got "+h.classString(t));return v.setScheduler(t)},i.prototype._then=function(t,e,n,r,o){var a=void 0!==o,s=a?o:new i(b),l=this._target(),u=l._bitField;a||(s._propagateFrom(this,3),s._captureStackTrace(),void 0===r&&0!==(2097152&this._bitField)&&(r=0!==(50397184&u)?this._boundValue():l===this?void 0:this._boundTo),this._fireEvent("promiseChained",this,s));var p=c();if(0!==(50397184&u)){var f,d,_=l._settlePromiseCtx;0!==(33554432&u)?(d=l._rejectionHandler0,f=t):0!==(16777216&u)?(d=l._fulfillmentHandler0,f=e,l._unsetRejectionIsUnhandled()):(_=l._settlePromiseLateCancellationObserver,d=new m("late cancellation observer"),l._attachExtraTrace(d),f=e),v.invoke(_,l,{handler:null===p?f:"function"==typeof f&&h.domainBind(p,f),promise:s,receiver:r,value:d})}else l._addCallbacks(t,e,s,r,p);return s},i.prototype._length=function(){return 65535&this._bitField},i.prototype._isFateSealed=function(){return 0!==(117506048&this._bitField)},i.prototype._isFollowing=function(){return 67108864===(67108864&this._bitField)},i.prototype._setLength=function(t){this._bitField=-65536&this._bitField|65535&t},i.prototype._setFulfilled=function(){this._bitField=33554432|this._bitField,this._fireEvent("promiseFulfilled",this)},i.prototype._setRejected=function(){this._bitField=16777216|this._bitField,this._fireEvent("promiseRejected",this)},i.prototype._setFollowing=function(){this._bitField=67108864|this._bitField,this._fireEvent("promiseResolved",this)},i.prototype._setIsFinal=function(){this._bitField=4194304|this._bitField},i.prototype._isFinal=function(){return(4194304&this._bitField)>0},i.prototype._unsetCancelled=function(){this._bitField=-65537&this._bitField},i.prototype._setCancelled=function(){this._bitField=65536|this._bitField,this._fireEvent("promiseCancelled",this)},i.prototype._setWillBeCancelled=function(){this._bitField=8388608|this._bitField},i.prototype._setAsyncGuaranteed=function(){v.hasCustomScheduler()||(this._bitField=134217728|this._bitField)},i.prototype._receiverAt=function(t){var e=0===t?this._receiver0:this[4*t-4+3];return e===f?void 0:void 0===e&&this._isBound()?this._boundValue():e},i.prototype._promiseAt=function(t){return this[4*t-4+2]},i.prototype._fulfillmentHandlerAt=function(t){return this[4*t-4+0]},i.prototype._rejectionHandlerAt=function(t){return this[4*t-4+1]},i.prototype._boundValue=function(){},i.prototype._migrateCallback0=function(t){var e=(t._bitField,t._fulfillmentHandler0),n=t._rejectionHandler0,r=t._promise0,i=t._receiverAt(0);void 0===i&&(i=f),this._addCallbacks(e,n,r,i,null)},i.prototype._migrateCallbackAt=function(t,e){var n=t._fulfillmentHandlerAt(e),r=t._rejectionHandlerAt(e),i=t._promiseAt(e),o=t._receiverAt(e);void 0===o&&(o=f),this._addCallbacks(n,r,i,o,null)},i.prototype._addCallbacks=function(t,e,n,r,i){var o=this._length();if(o>=65531&&(o=0,this._setLength(0)),0===o)this._promise0=n,this._receiver0=r,"function"==typeof t&&(this._fulfillmentHandler0=null===i?t:h.domainBind(i,t)),"function"==typeof e&&(this._rejectionHandler0=null===i?e:h.domainBind(i,e));else{var a=4*o-4;this[a+2]=n,this[a+3]=r,"function"==typeof t&&(this[a+0]=null===i?t:h.domainBind(i,t)),"function"==typeof e&&(this[a+1]=null===i?e:h.domainBind(i,e))}return this._setLength(o+1),o},i.prototype._proxy=function(t,e){this._addCallbacks(void 0,void 0,e,t,null)},i.prototype._resolveCallback=function(t,e){if(0===(117506048&this._bitField)){if(t===this)return this._rejectCallback(l(),!1);var n=E(t,this);if(!(n instanceof i))return this._fulfill(t);e&&this._propagateFrom(n,2);var r=n._target();if(r===this)return void this._reject(l());var o=r._bitField;if(0===(50397184&o)){var a=this._length();a>0&&r._migrateCallback0(this);for(var s=1;a>s;++s)r._migrateCallbackAt(this,s);this._setFollowing(),this._setLength(0),this._setFollowee(r)}else if(0!==(33554432&o))this._fulfill(r._value());else if(0!==(16777216&o))this._reject(r._reason());else{var c=new m("late cancellation observer");r._attachExtraTrace(c),this._reject(c)}}},i.prototype._rejectCallback=function(t,e,n){var r=h.ensureErrorObject(t),i=r===t;if(!i&&!n&&F.warnings()){var o="a promise was rejected with a non-error: "+h.classString(t);this._warn(o,!0)}this._attachExtraTrace(r,e?i:!1),this._reject(t)},i.prototype._resolveFromExecutor=function(t){if(t!==b){var e=this;this._captureStackTrace(),this._pushContext();var n=!0,r=this._execute(t,function(t){e._resolveCallback(t)},function(t){e._rejectCallback(t,n)});n=!1,this._popContext(),void 0!==r&&e._rejectCallback(r,!0)}},i.prototype._settlePromiseFromHandler=function(t,e,n,r){var i=r._bitField;if(0===(65536&i)){r._pushContext();var o;e===C?n&&"number"==typeof n.length?o=R(t).apply(this._boundValue(),n):(o=O,o.e=new g("cannot .spread() a non-array: "+h.classString(n))):o=R(t).call(e,n);var a=r._popContext();i=r._bitField,0===(65536&i)&&(o===w?r._reject(n):o===O?r._rejectCallback(o.e,!1):(F.checkForgottenReturns(o,a,"",r,this),r._resolveCallback(o)))}},i.prototype._target=function(){for(var t=this;t._isFollowing();)t=t._followee();return t},i.prototype._followee=function(){return this._rejectionHandler0},i.prototype._setFollowee=function(t){this._rejectionHandler0=t},i.prototype._settlePromise=function(t,e,r,o){var a=t instanceof i,s=this._bitField,c=0!==(134217728&s);0!==(65536&s)?(a&&t._invokeInternalOnCancel(),r instanceof T&&r.isFinallyHandler()?(r.cancelPromise=t,R(e).call(r,o)===O&&t._reject(O.e)):e===u?t._fulfill(u.call(r)):r instanceof n?r._promiseCancelled(t):a||t instanceof k?t._cancel():r.cancel()):"function"==typeof e?a?(c&&t._setAsyncGuaranteed(),this._settlePromiseFromHandler(e,r,o,t)):e.call(r,o,t):r instanceof n?r._isResolved()||(0!==(33554432&s)?r._promiseFulfilled(o,t):r._promiseRejected(o,t)):a&&(c&&t._setAsyncGuaranteed(),0!==(33554432&s)?t._fulfill(o):t._reject(o))},i.prototype._settlePromiseLateCancellationObserver=function(t){var e=t.handler,n=t.promise,r=t.receiver,o=t.value;"function"==typeof e?n instanceof i?this._settlePromiseFromHandler(e,r,o,n):e.call(r,o,n):n instanceof i&&n._reject(o)},i.prototype._settlePromiseCtx=function(t){this._settlePromise(t.promise,t.handler,t.receiver,t.value)},i.prototype._settlePromise0=function(t,e,n){var r=this._promise0,i=this._receiverAt(0);this._promise0=void 0,this._receiver0=void 0,this._settlePromise(r,t,i,e)},i.prototype._clearCallbackDataAtIndex=function(t){var e=4*t-4;this[e+2]=this[e+3]=this[e+0]=this[e+1]=void 0},i.prototype._fulfill=function(t){var e=this._bitField;if(!((117506048&e)>>>16)){if(t===this){var n=l();return this._attachExtraTrace(n),this._reject(n)}this._setFulfilled(),this._rejectionHandler0=t,(65535&e)>0&&(0!==(134217728&e)?this._settlePromises():v.settlePromises(this),this._dereferenceTrace())}},i.prototype._reject=function(t){var e=this._bitField;if(!((117506048&e)>>>16))return this._setRejected(),this._fulfillmentHandler0=t,this._isFinal()?v.fatalError(t,h.isNode):void((65535&e)>0?v.settlePromises(this):this._ensurePossibleRejectionHandled())},i.prototype._fulfillPromises=function(t,e){for(var n=1;t>n;n++){var r=this._fulfillmentHandlerAt(n),i=this._promiseAt(n),o=this._receiverAt(n);this._clearCallbackDataAtIndex(n),this._settlePromise(i,r,o,e)}},i.prototype._rejectPromises=function(t,e){for(var n=1;t>n;n++){var r=this._rejectionHandlerAt(n),i=this._promiseAt(n),o=this._receiverAt(n);this._clearCallbackDataAtIndex(n),this._settlePromise(i,r,o,e)}},i.prototype._settlePromises=function(){var t=this._bitField,e=65535&t;if(e>0){if(0!==(16842752&t)){var n=this._fulfillmentHandler0;this._settlePromise0(this._rejectionHandler0,n,t),this._rejectPromises(e,n)}else{var r=this._rejectionHandler0;this._settlePromise0(this._fulfillmentHandler0,r,t),this._fulfillPromises(e,r)}this._setLength(0)}this._clearCancellationData()},i.prototype._settledValue=function(){var t=this._bitField;return 0!==(33554432&t)?this._rejectionHandler0:0!==(16777216&t)?this._fulfillmentHandler0:void 0},"undefined"!=typeof Symbol&&Symbol.toStringTag&&d.defineProperty(i.prototype,Symbol.toStringTag,{get:function(){return"Object"}}),i.defer=i.pending=function(){F.deprecated("Promise.defer","new Promise");var t=new i(b);return{promise:t,resolve:o,reject:a}},h.notEnumerableProp(i,"_makeSelfResolutionError",l),t("./method")(i,b,E,p,F),t("./bind")(i,b,E,F),t("./cancel")(i,k,p,F),t("./direct_resolve")(i),t("./synchronous_inspection")(i),t("./join")(i,k,E,b,v,c),i.Promise=i,i.version="3.5.5",h.toFastProperties(i),h.toFastProperties(i.prototype),s({a:1}),s({b:2}),s({c:3}),s(1),s(function(){}),s(void 0),s(!1),s(new i(b)),F.setBounds(_.firstLineError,h.lastLineError),i}},{"./async":1,"./bind":2,"./cancel":4,"./catch_filter":5,"./context":6,"./debuggability":7,"./direct_resolve":8,"./errors":9,"./es5":10,"./finally":11,"./join":12,"./method":13,"./nodeback":14,"./promise_array":16,"./synchronous_inspection":19,"./thenables":20,"./util":21}],16:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o){function a(t){switch(t){case-2:return[];case-3:return{};case-6:return new Map}}function s(t){var r=this._promise=new e(n);t instanceof e&&r._propagateFrom(t,3),r._setOnCancel(this),this._values=t,this._length=0,this._totalResolved=0,this._init(void 0,-2)}var c=t("./util");c.isArray;return c.inherits(s,o),s.prototype.length=function(){return this._length},s.prototype.promise=function(){return this._promise},s.prototype._init=function l(t,n){var o=r(this._values,this._promise);if(o instanceof e){o=o._target();var s=o._bitField;if(this._values=o,0===(50397184&s))return this._promise._setAsyncGuaranteed(),o._then(l,this._reject,void 0,this,n);if(0===(33554432&s))return 0!==(16777216&s)?this._reject(o._reason()):this._cancel();o=o._value()}if(o=c.asArray(o),null===o){var u=i("expecting an array or an iterable object but got "+c.classString(o)).reason();return void this._promise._rejectCallback(u,!1)}return 0===o.length?void(-5===n?this._resolveEmptyArray():this._resolve(a(n))):void this._iterate(o)},s.prototype._iterate=function(t){var n=this.getActualLength(t.length);this._length=n,this._values=this.shouldCopyValues()?new Array(n):this._values;for(var i=this._promise,o=!1,a=null,s=0;n>s;++s){var c=r(t[s],i);c instanceof e?(c=c._target(),a=c._bitField):a=null,o?null!==a&&c.suppressUnhandledRejections():null!==a?0===(50397184&a)?(c._proxy(this,s),this._values[s]=c):o=0!==(33554432&a)?this._promiseFulfilled(c._value(),s):0!==(16777216&a)?this._promiseRejected(c._reason(),s):this._promiseCancelled(s):o=this._promiseFulfilled(c,s)}o||i._setAsyncGuaranteed()},s.prototype._isResolved=function(){return null===this._values},s.prototype._resolve=function(t){this._values=null,this._promise._fulfill(t)},s.prototype._cancel=function(){!this._isResolved()&&this._promise._isCancellable()&&(this._values=null,this._promise._cancel())},s.prototype._reject=function(t){this._values=null,this._promise._rejectCallback(t,!1)},s.prototype._promiseFulfilled=function(t,e){this._values[e]=t;var n=++this._totalResolved;return n>=this._length?(this._resolve(this._values),!0):!1},s.prototype._promiseCancelled=function(){return this._cancel(),!0},s.prototype._promiseRejected=function(t){return this._totalResolved++,this._reject(t),!0},s.prototype._resultCancelled=function(){if(!this._isResolved()){var t=this._values;if(this._cancel(),t instanceof e)t.cancel();else for(var n=0;no;++o)n[o+r]=t[o+e],t[o+e]=void 0}function i(t){this._capacity=t,this._length=0,this._front=0}i.prototype._willBeOverCapacity=function(t){return this._capacityn;++n)i[n]=t[n];return i[n]=e,i}function l(t,e,n){if(!F.isES5)return{}.hasOwnProperty.call(t,e)?t[e]:void 0;var r=Object.getOwnPropertyDescriptor(t,e);return null!=r?null==r.get&&null==r.set?r.value:n:void 0}function u(t,e,n){if(o(t))return t;var r={value:n,configurable:!0,enumerable:!1,writable:!0};return F.defineProperty(t,e,r),t}function p(t){throw t}function f(t){try{if("function"==typeof t){var e=F.names(t.prototype),n=F.isES5&&e.length>1,r=e.length>0&&!(1===e.length&&"constructor"===e[0]),i=A.test(t+"")&&F.names(t).length>0;if(n||r||i)return!0}return!1}catch(o){return!1}}function h(t){function e(){}function n(){return typeof r.foo}e.prototype=t;var r=new e;return n(),n(),t}function d(t){return N.test(t)}function _(t,e,n){for(var r=new Array(t),i=0;t>i;++i)r[i]=e+i+n;return r}function v(t){try{return t+""}catch(e){return"[no string representation]"}}function y(t){return t instanceof Error||null!==t&&"object"==typeof t&&"string"==typeof t.message&&"string"==typeof t.name}function g(t){try{u(t,"isOperational",!0)}catch(e){}}function m(t){return null==t?!1:t instanceof Error.__BluebirdErrorTypes__.OperationalError||t.isOperational===!0}function b(t){return y(t)&&F.propertyIsWritable(t,"stack")}function C(t){return{}.toString.call(t)}function w(t,e,n){for(var r=F.names(t),i=0;i10||t[0]>0}(),D.isNode&&D.toFastProperties(process);try{throw new Error}catch(V){D.lastLineError=V}e.exports=D},{"./es5":10}]},{},[3])(3)}),"undefined"!=typeof window&&null!==window?window.P=window.Promise:"undefined"!=typeof self&&null!==self&&(self.P=self.Promise); \ No newline at end of file diff --git a/node_modules/bluebird/js/browser/bluebird.js b/node_modules/bluebird/js/browser/bluebird.js index 66c6c1c4fc700..35d0912b0b4eb 100644 --- a/node_modules/bluebird/js/browser/bluebird.js +++ b/node_modules/bluebird/js/browser/bluebird.js @@ -23,7 +23,7 @@ * */ /** - * bluebird build version 3.5.3 + * bluebird build version 3.5.5 * Features enabled: core, race, call_get, generators, map, nodeify, promisify, props, reduce, settle, some, using, timers, filter, any, each */ !function(e){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=e();else if("function"==typeof define&&define.amd)define([],e);else{var f;"undefined"!=typeof window?f=window:"undefined"!=typeof global?f=global:"undefined"!=typeof self&&(f=self),f.Promise=e()}}(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof _dereq_=="function"&&_dereq_;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof _dereq_=="function"&&_dereq_;for(var o=0;o 10) || (version[0] > 0); })(); diff --git a/node_modules/bluebird/js/browser/bluebird.min.js b/node_modules/bluebird/js/browser/bluebird.min.js index 23bc16d562280..ef4fb4ae88e46 100644 --- a/node_modules/bluebird/js/browser/bluebird.min.js +++ b/node_modules/bluebird/js/browser/bluebird.min.js @@ -23,9 +23,9 @@ * */ /** - * bluebird build version 3.5.3 + * bluebird build version 3.5.5 * Features enabled: core, race, call_get, generators, map, nodeify, promisify, props, reduce, settle, some, using, timers, filter, any, each */ -!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{var e;"undefined"!=typeof window?e=window:"undefined"!=typeof global?e=global:"undefined"!=typeof self&&(e=self),e.Promise=t()}}(function(){var t,e,n;return function r(t,e,n){function i(s,a){if(!e[s]){if(!t[s]){var c="function"==typeof _dereq_&&_dereq_;if(!a&&c)return c(s,!0);if(o)return o(s,!0);var l=new Error("Cannot find module '"+s+"'");throw l.code="MODULE_NOT_FOUND",l}var u=e[s]={exports:{}};t[s][0].call(u.exports,function(e){var n=t[s][1][e];return i(n?n:e)},u,u.exports,r,t,e,n)}return e[s].exports}for(var o="function"==typeof _dereq_&&_dereq_,s=0;s0;)c(t)}function c(t){var e=t.shift();if("function"!=typeof e)e._settlePromises();else{var n=t.shift(),r=t.shift();e.call(n,r)}}var l;try{throw new Error}catch(u){l=u}var p=t("./schedule"),h=t("./queue"),f=t("./util");r.prototype.setScheduler=function(t){var e=this._schedule;return this._schedule=t,this._customScheduler=!0,e},r.prototype.hasCustomScheduler=function(){return this._customScheduler},r.prototype.enableTrampoline=function(){this._trampolineEnabled=!0},r.prototype.disableTrampolineIfNecessary=function(){f.hasDevTools&&(this._trampolineEnabled=!1)},r.prototype.haveItemsQueued=function(){return this._isTickUsed||this._haveDrainedQueues},r.prototype.fatalError=function(t,e){e?(process.stderr.write("Fatal "+(t instanceof Error?t.stack:t)+"\n"),process.exit(2)):this.throwLater(t)},r.prototype.throwLater=function(t,e){if(1===arguments.length&&(e=t,t=function(){throw e}),"undefined"!=typeof setTimeout)setTimeout(function(){t(e)},0);else try{this._schedule(function(){t(e)})}catch(n){throw new Error("No async scheduler available\n\n See http://goo.gl/MqrFmX\n")}},f.hasDevTools?(r.prototype.invokeLater=function(t,e,n){this._trampolineEnabled?i.call(this,t,e,n):this._schedule(function(){setTimeout(function(){t.call(e,n)},100)})},r.prototype.invoke=function(t,e,n){this._trampolineEnabled?o.call(this,t,e,n):this._schedule(function(){t.call(e,n)})},r.prototype.settlePromises=function(t){this._trampolineEnabled?s.call(this,t):this._schedule(function(){t._settlePromises()})}):(r.prototype.invokeLater=i,r.prototype.invoke=o,r.prototype.settlePromises=s),r.prototype._drainQueues=function(){a(this._normalQueue),this._reset(),this._haveDrainedQueues=!0,a(this._lateQueue)},r.prototype._queueTick=function(){this._isTickUsed||(this._isTickUsed=!0,this._schedule(this.drainQueues))},r.prototype._reset=function(){this._isTickUsed=!1},e.exports=r,e.exports.firstLineError=l},{"./queue":26,"./schedule":29,"./util":36}],3:[function(t,e,n){"use strict";e.exports=function(t,e,n,r){var i=!1,o=function(t,e){this._reject(e)},s=function(t,e){e.promiseRejectionQueued=!0,e.bindingPromise._then(o,o,null,this,t)},a=function(t,e){0===(50397184&this._bitField)&&this._resolveCallback(e.target)},c=function(t,e){e.promiseRejectionQueued||this._reject(t)};t.prototype.bind=function(o){i||(i=!0,t.prototype._propagateFrom=r.propagateFromFunction(),t.prototype._boundValue=r.boundValueFunction());var l=n(o),u=new t(e);u._propagateFrom(this,1);var p=this._target();if(u._setBoundTo(l),l instanceof t){var h={promiseRejectionQueued:!1,promise:u,target:p,bindingPromise:l};p._then(e,s,void 0,u,h),l._then(a,c,void 0,u,h),u._setOnCancel(l)}else u._resolveCallback(p);return u},t.prototype._setBoundTo=function(t){void 0!==t?(this._bitField=2097152|this._bitField,this._boundTo=t):this._bitField=-2097153&this._bitField},t.prototype._isBound=function(){return 2097152===(2097152&this._bitField)},t.bind=function(e,n){return t.resolve(n).bind(e)}}},{}],4:[function(t,e,n){"use strict";function r(){try{Promise===o&&(Promise=i)}catch(t){}return o}var i;"undefined"!=typeof Promise&&(i=Promise);var o=t("./promise")();o.noConflict=r,e.exports=o},{"./promise":22}],5:[function(t,e,n){"use strict";var r=Object.create;if(r){var i=r(null),o=r(null);i[" size"]=o[" size"]=0}e.exports=function(e){function n(t,n){var r;if(null!=t&&(r=t[n]),"function"!=typeof r){var i="Object "+a.classString(t)+" has no method '"+a.toString(n)+"'";throw new e.TypeError(i)}return r}function r(t){var e=this.pop(),r=n(t,e);return r.apply(t,this)}function i(t){return t[this]}function o(t){var e=+this;return 0>e&&(e=Math.max(0,e+t.length)),t[e]}var s,a=t("./util"),c=a.canEvaluate;a.isIdentifier;e.prototype.call=function(t){var e=[].slice.call(arguments,1);return e.push(t),this._then(r,void 0,void 0,e,void 0)},e.prototype.get=function(t){var e,n="number"==typeof t;if(n)e=o;else if(c){var r=s(t);e=null!==r?r:i}else e=i;return this._then(e,void 0,void 0,t,void 0)}}},{"./util":36}],6:[function(t,e,n){"use strict";e.exports=function(e,n,r,i){var o=t("./util"),s=o.tryCatch,a=o.errorObj,c=e._async;e.prototype["break"]=e.prototype.cancel=function(){if(!i.cancellation())return this._warn("cancellation is disabled");for(var t=this,e=t;t._isCancellable();){if(!t._cancelBy(e)){e._isFollowing()?e._followee().cancel():e._cancelBranched();break}var n=t._cancellationParent;if(null==n||!n._isCancellable()){t._isFollowing()?t._followee().cancel():t._cancelBranched();break}t._isFollowing()&&t._followee().cancel(),t._setWillBeCancelled(),e=t,t=n}},e.prototype._branchHasCancelled=function(){this._branchesRemainingToCancel--},e.prototype._enoughBranchesHaveCancelled=function(){return void 0===this._branchesRemainingToCancel||this._branchesRemainingToCancel<=0},e.prototype._cancelBy=function(t){return t===this?(this._branchesRemainingToCancel=0,this._invokeOnCancel(),!0):(this._branchHasCancelled(),this._enoughBranchesHaveCancelled()?(this._invokeOnCancel(),!0):!1)},e.prototype._cancelBranched=function(){this._enoughBranchesHaveCancelled()&&this._cancel()},e.prototype._cancel=function(){this._isCancellable()&&(this._setCancelled(),c.invoke(this._cancelPromises,this,void 0))},e.prototype._cancelPromises=function(){this._length()>0&&this._settlePromises()},e.prototype._unsetOnCancel=function(){this._onCancelField=void 0},e.prototype._isCancellable=function(){return this.isPending()&&!this._isCancelled()},e.prototype.isCancellable=function(){return this.isPending()&&!this.isCancelled()},e.prototype._doInvokeOnCancel=function(t,e){if(o.isArray(t))for(var n=0;n=0?o[t]:void 0}var i=!1,o=[];return t.prototype._promiseCreated=function(){},t.prototype._pushContext=function(){},t.prototype._popContext=function(){return null},t._peekContext=t.prototype._peekContext=function(){},e.prototype._pushContext=function(){void 0!==this._trace&&(this._trace._promiseCreated=null,o.push(this._trace))},e.prototype._popContext=function(){if(void 0!==this._trace){var t=o.pop(),e=t._promiseCreated;return t._promiseCreated=null,e}return null},e.CapturedTrace=null,e.create=n,e.deactivateLongStackTraces=function(){},e.activateLongStackTraces=function(){var n=t.prototype._pushContext,o=t.prototype._popContext,s=t._peekContext,a=t.prototype._peekContext,c=t.prototype._promiseCreated;e.deactivateLongStackTraces=function(){t.prototype._pushContext=n,t.prototype._popContext=o,t._peekContext=s,t.prototype._peekContext=a,t.prototype._promiseCreated=c,i=!1},i=!0,t.prototype._pushContext=e.prototype._pushContext,t.prototype._popContext=e.prototype._popContext,t._peekContext=t.prototype._peekContext=r,t.prototype._promiseCreated=function(){var t=this._peekContext();t&&null==t._promiseCreated&&(t._promiseCreated=this)}},e}},{}],9:[function(t,e,n){"use strict";e.exports=function(e,n){function r(t,e){return{promise:e}}function i(){return!1}function o(t,e,n){var r=this;try{t(e,n,function(t){if("function"!=typeof t)throw new TypeError("onCancel must be a function, got: "+N.toString(t));r._attachCancellationCallback(t)})}catch(i){return i}}function s(t){if(!this._isCancellable())return this;var e=this._onCancel();void 0!==e?N.isArray(e)?e.push(t):this._setOnCancel([e,t]):this._setOnCancel(t)}function a(){return this._onCancelField}function c(t){this._onCancelField=t}function l(){this._cancellationParent=void 0,this._onCancelField=void 0}function u(t,e){if(0!==(1&e)){this._cancellationParent=t;var n=t._branchesRemainingToCancel;void 0===n&&(n=0),t._branchesRemainingToCancel=n+1}0!==(2&e)&&t._isBound()&&this._setBoundTo(t._boundTo)}function p(t,e){0!==(2&e)&&t._isBound()&&this._setBoundTo(t._boundTo)}function h(){var t=this._boundTo;return void 0!==t&&t instanceof e?t.isFulfilled()?t.value():void 0:t}function f(){this._trace=new O(this._peekContext())}function _(t,e){if(U(t)){var n=this._trace;if(void 0!==n&&e&&(n=n._parent),void 0!==n)n.attachExtraTrace(t);else if(!t.__stackCleaned__){var r=E(t);N.notEnumerableProp(t,"stack",r.message+"\n"+r.stack.join("\n")),N.notEnumerableProp(t,"__stackCleaned__",!0)}}}function d(){this._trace=void 0}function v(t,e,n,r,i){if(void 0===t&&null!==e&&J){if(void 0!==i&&i._returnedNonUndefined())return;if(0===(65535&r._bitField))return;n&&(n+=" ");var o="",s="";if(e._trace){for(var a=e._trace.stack.split("\n"),c=C(a),l=c.length-1;l>=0;--l){var u=c[l];if(!q.test(u)){var p=u.match($);p&&(o="at "+p[1]+":"+p[2]+":"+p[3]+" ");break}}if(c.length>0)for(var h=c[0],l=0;l0&&(s="\n"+a[l-1]);break}}var f="a promise was created in a "+n+"handler "+o+"but was not returned from it, see http://goo.gl/rRqMUw"+s;r._warn(f,!0,e)}}function y(t,e){var n=t+" is deprecated and will be removed in a future version.";return e&&(n+=" Use "+e+" instead."),m(n)}function m(t,n,r){if(at.warnings){var i,o=new H(t);if(n)r._attachExtraTrace(o);else if(at.longStackTraces&&(i=e._peekContext()))i.attachExtraTrace(o);else{var s=E(o);o.stack=s.message+"\n"+s.stack.join("\n")}nt("warning",o)||k(o,"",!0)}}function g(t,e){for(var n=0;n=0;--a)if(r[a]===o){s=a;break}for(var a=s;a>=0;--a){var c=r[a];if(e[i]!==c)break;e.pop(),i--}e=r}}function C(t){for(var e=[],n=0;n0&&"SyntaxError"!=t.name&&(e=e.slice(n)),e}function E(t){var e=t.stack,n=t.toString();return e="string"==typeof e&&e.length>0?j(t):[" (No stack trace)"],{message:n,stack:"SyntaxError"==t.name?e:C(e)}}function k(t,e,n){if("undefined"!=typeof console){var r;if(N.isObject(t)){var i=t.stack;r=e+G(i,t)}else r=e+String(t);"function"==typeof V?V(r,n):("function"==typeof console.log||"object"==typeof console.log)&&console.log(r)}}function F(t,e,n,r){var i=!1;try{"function"==typeof e&&(i=!0,"rejectionHandled"===t?e(r):e(n,r))}catch(o){L.throwLater(o)}"unhandledRejection"===t?nt(t,n,r)||i||k(n,"Unhandled rejection "):nt(t,r)}function T(t){var e;if("function"==typeof t)e="[function "+(t.name||"anonymous")+"]";else{e=t&&"function"==typeof t.toString?t.toString():N.toString(t);var n=/\[object [a-zA-Z0-9$_]+\]/;if(n.test(e))try{var r=JSON.stringify(t);e=r}catch(i){}0===e.length&&(e="(empty array)")}return"(<"+x(e)+">, no stack trace)"}function x(t){var e=41;return t.lengths||0>a||!n||!r||n!==r||s>=a||(it=function(t){if(M.test(t))return!0;var e=R(t);return e&&e.fileName===n&&s<=e.line&&e.line<=a?!0:!1})}}function O(t){this._parent=t,this._promisesCreated=0;var e=this._length=1+(void 0===t?0:t._length);st(this,O),e>32&&this.uncycle()}var A,D,V,I=e._getDomain,L=e._async,H=t("./errors").Warning,N=t("./util"),B=t("./es5"),U=N.canAttachTrace,M=/[\\\/]bluebird[\\\/]js[\\\/](release|debug|instrumented)/,q=/\((?:timers\.js):\d+:\d+\)/,$=/[\/<\(](.+?):(\d+):(\d+)\)?\s*$/,Q=null,G=null,z=!1,X=!(0==N.env("BLUEBIRD_DEBUG")||!N.env("BLUEBIRD_DEBUG")&&"development"!==N.env("NODE_ENV")),W=!(0==N.env("BLUEBIRD_WARNINGS")||!X&&!N.env("BLUEBIRD_WARNINGS")),K=!(0==N.env("BLUEBIRD_LONG_STACK_TRACES")||!X&&!N.env("BLUEBIRD_LONG_STACK_TRACES")),J=0!=N.env("BLUEBIRD_W_FORGOTTEN_RETURN")&&(W||!!N.env("BLUEBIRD_W_FORGOTTEN_RETURN"));e.prototype.suppressUnhandledRejections=function(){var t=this._target();t._bitField=-1048577&t._bitField|524288},e.prototype._ensurePossibleRejectionHandled=function(){if(0===(524288&this._bitField)){this._setRejectionIsUnhandled();var t=this;setTimeout(function(){t._notifyUnhandledRejection()},1)}},e.prototype._notifyUnhandledRejectionIsHandled=function(){F("rejectionHandled",A,void 0,this)},e.prototype._setReturnedNonUndefined=function(){this._bitField=268435456|this._bitField},e.prototype._returnedNonUndefined=function(){return 0!==(268435456&this._bitField)},e.prototype._notifyUnhandledRejection=function(){if(this._isRejectionUnhandled()){var t=this._settledValue();this._setUnhandledRejectionIsNotified(),F("unhandledRejection",D,t,this)}},e.prototype._setUnhandledRejectionIsNotified=function(){this._bitField=262144|this._bitField},e.prototype._unsetUnhandledRejectionIsNotified=function(){this._bitField=-262145&this._bitField},e.prototype._isUnhandledRejectionNotified=function(){return(262144&this._bitField)>0},e.prototype._setRejectionIsUnhandled=function(){this._bitField=1048576|this._bitField},e.prototype._unsetRejectionIsUnhandled=function(){this._bitField=-1048577&this._bitField,this._isUnhandledRejectionNotified()&&(this._unsetUnhandledRejectionIsNotified(),this._notifyUnhandledRejectionIsHandled())},e.prototype._isRejectionUnhandled=function(){return(1048576&this._bitField)>0},e.prototype._warn=function(t,e,n){return m(t,e,n||this)},e.onPossiblyUnhandledRejection=function(t){var e=I();D="function"==typeof t?null===e?t:N.domainBind(e,t):void 0},e.onUnhandledRejectionHandled=function(t){var e=I();A="function"==typeof t?null===e?t:N.domainBind(e,t):void 0};var Y=function(){};e.longStackTraces=function(){if(L.haveItemsQueued()&&!at.longStackTraces)throw new Error("cannot enable long stack traces after promises have been created\n\n See http://goo.gl/MqrFmX\n");if(!at.longStackTraces&&P()){var t=e.prototype._captureStackTrace,r=e.prototype._attachExtraTrace,i=e.prototype._dereferenceTrace;at.longStackTraces=!0,Y=function(){if(L.haveItemsQueued()&&!at.longStackTraces)throw new Error("cannot enable long stack traces after promises have been created\n\n See http://goo.gl/MqrFmX\n");e.prototype._captureStackTrace=t,e.prototype._attachExtraTrace=r,e.prototype._dereferenceTrace=i,n.deactivateLongStackTraces(),L.enableTrampoline(),at.longStackTraces=!1},e.prototype._captureStackTrace=f,e.prototype._attachExtraTrace=_,e.prototype._dereferenceTrace=d,n.activateLongStackTraces(),L.disableTrampolineIfNecessary()}},e.hasLongStackTraces=function(){return at.longStackTraces&&P()};var Z=function(){try{if("function"==typeof CustomEvent){var t=new CustomEvent("CustomEvent");return N.global.dispatchEvent(t),function(t,e){var n={detail:e,cancelable:!0};B.defineProperty(n,"promise",{value:e.promise}),B.defineProperty(n,"reason",{value:e.reason});var r=new CustomEvent(t.toLowerCase(),n);return!N.global.dispatchEvent(r)}}if("function"==typeof Event){var t=new Event("CustomEvent");return N.global.dispatchEvent(t),function(t,e){var n=new Event(t.toLowerCase(),{cancelable:!0});return n.detail=e,B.defineProperty(n,"promise",{value:e.promise}),B.defineProperty(n,"reason",{value:e.reason}),!N.global.dispatchEvent(n)}}var t=document.createEvent("CustomEvent");return t.initCustomEvent("testingtheevent",!1,!0,{}),N.global.dispatchEvent(t),function(t,e){var n=document.createEvent("CustomEvent");return n.initCustomEvent(t.toLowerCase(),!1,!0,e),!N.global.dispatchEvent(n)}}catch(e){}return function(){return!1}}(),tt=function(){return N.isNode?function(){return process.emit.apply(process,arguments)}:N.global?function(t){var e="on"+t.toLowerCase(),n=N.global[e];return n?(n.apply(N.global,[].slice.call(arguments,1)),!0):!1}:function(){return!1}}(),et={promiseCreated:r,promiseFulfilled:r,promiseRejected:r,promiseResolved:r,promiseCancelled:r,promiseChained:function(t,e,n){return{promise:e,child:n}},warning:function(t,e){return{warning:e}},unhandledRejection:function(t,e,n){return{reason:e,promise:n}},rejectionHandled:r},nt=function(t){var e=!1;try{e=tt.apply(null,arguments)}catch(n){L.throwLater(n),e=!0}var r=!1;try{r=Z(t,et[t].apply(null,arguments))}catch(n){L.throwLater(n),r=!0}return r||e};e.config=function(t){if(t=Object(t),"longStackTraces"in t&&(t.longStackTraces?e.longStackTraces():!t.longStackTraces&&e.hasLongStackTraces()&&Y()),"warnings"in t){var n=t.warnings;at.warnings=!!n,J=at.warnings,N.isObject(n)&&"wForgottenReturn"in n&&(J=!!n.wForgottenReturn)}if("cancellation"in t&&t.cancellation&&!at.cancellation){if(L.haveItemsQueued())throw new Error("cannot enable cancellation after promises are in use");e.prototype._clearCancellationData=l,e.prototype._propagateFrom=u,e.prototype._onCancel=a,e.prototype._setOnCancel=c,e.prototype._attachCancellationCallback=s,e.prototype._execute=o,rt=u,at.cancellation=!0}return"monitoring"in t&&(t.monitoring&&!at.monitoring?(at.monitoring=!0,e.prototype._fireEvent=nt):!t.monitoring&&at.monitoring&&(at.monitoring=!1,e.prototype._fireEvent=i)),e},e.prototype._fireEvent=i,e.prototype._execute=function(t,e,n){try{t(e,n)}catch(r){return r}},e.prototype._onCancel=function(){},e.prototype._setOnCancel=function(t){},e.prototype._attachCancellationCallback=function(t){},e.prototype._captureStackTrace=function(){},e.prototype._attachExtraTrace=function(){},e.prototype._dereferenceTrace=function(){},e.prototype._clearCancellationData=function(){},e.prototype._propagateFrom=function(t,e){};var rt=p,it=function(){return!1},ot=/[\/<\(]([^:\/]+):(\d+):(?:\d+)\)?\s*$/;N.inherits(O,Error),n.CapturedTrace=O,O.prototype.uncycle=function(){var t=this._length;if(!(2>t)){for(var e=[],n={},r=0,i=this;void 0!==i;++r)e.push(i),i=i._parent;t=this._length=r;for(var r=t-1;r>=0;--r){var o=e[r].stack;void 0===n[o]&&(n[o]=r)}for(var r=0;t>r;++r){var s=e[r].stack,a=n[s];if(void 0!==a&&a!==r){a>0&&(e[a-1]._parent=void 0,e[a-1]._length=1),e[r]._parent=void 0,e[r]._length=1;var c=r>0?e[r-1]:this;t-1>a?(c._parent=e[a+1],c._parent.uncycle(),c._length=c._parent._length+1):(c._parent=void 0,c._length=1);for(var l=c._length+1,u=r-2;u>=0;--u)e[u]._length=l,l++;return}}}},O.prototype.attachExtraTrace=function(t){if(!t.__stackCleaned__){this.uncycle();for(var e=E(t),n=e.message,r=[e.stack],i=this;void 0!==i;)r.push(C(i.stack.split("\n"))),i=i._parent;w(r),b(r),N.notEnumerableProp(t,"stack",g(n,r)),N.notEnumerableProp(t,"__stackCleaned__",!0)}};var st=function(){var t=/^\s*at\s*/,e=function(t,e){return"string"==typeof t?t:void 0!==e.name&&void 0!==e.message?e.toString():T(e)};if("number"==typeof Error.stackTraceLimit&&"function"==typeof Error.captureStackTrace){Error.stackTraceLimit+=6,Q=t,G=e;var n=Error.captureStackTrace;return it=function(t){return M.test(t)},function(t,e){Error.stackTraceLimit+=6,n(t,e),Error.stackTraceLimit-=6}}var r=new Error;if("string"==typeof r.stack&&r.stack.split("\n")[0].indexOf("stackDetection@")>=0)return Q=/@/,G=e,z=!0,function(t){t.stack=(new Error).stack};var i;try{throw new Error}catch(o){i="stack"in o}return"stack"in r||!i||"number"!=typeof Error.stackTraceLimit?(G=function(t,e){return"string"==typeof t?t:"object"!=typeof e&&"function"!=typeof e||void 0===e.name||void 0===e.message?T(e):e.toString()},null):(Q=t,G=e,function(t){Error.stackTraceLimit+=6;try{throw new Error}catch(e){t.stack=e.stack}Error.stackTraceLimit-=6})}([]);"undefined"!=typeof console&&"undefined"!=typeof console.warn&&(V=function(t){console.warn(t)},N.isNode&&process.stderr.isTTY?V=function(t,e){var n=e?"":"";console.warn(n+t+"\n")}:N.isNode||"string"!=typeof(new Error).stack||(V=function(t,e){console.warn("%c"+t,e?"color: darkorange":"color: red")}));var at={warnings:W,longStackTraces:!1,cancellation:!1,monitoring:!1};return K&&e.longStackTraces(),{longStackTraces:function(){return at.longStackTraces},warnings:function(){return at.warnings},cancellation:function(){return at.cancellation},monitoring:function(){return at.monitoring},propagateFromFunction:function(){return rt},boundValueFunction:function(){return h},checkForgottenReturns:v,setBounds:S,warn:m,deprecated:y,CapturedTrace:O,fireDomEvent:Z,fireGlobalEvent:tt}}},{"./errors":12,"./es5":13,"./util":36}],10:[function(t,e,n){"use strict";e.exports=function(t){function e(){return this.value}function n(){throw this.reason}t.prototype["return"]=t.prototype.thenReturn=function(n){return n instanceof t&&n.suppressUnhandledRejections(),this._then(e,void 0,void 0,{value:n},void 0)},t.prototype["throw"]=t.prototype.thenThrow=function(t){return this._then(n,void 0,void 0,{reason:t},void 0)},t.prototype.catchThrow=function(t){if(arguments.length<=1)return this._then(void 0,n,void 0,{reason:t},void 0);var e=arguments[1],r=function(){throw e};return this.caught(t,r)},t.prototype.catchReturn=function(n){if(arguments.length<=1)return n instanceof t&&n.suppressUnhandledRejections(),this._then(void 0,e,void 0,{value:n},void 0);var r=arguments[1];r instanceof t&&r.suppressUnhandledRejections();var i=function(){return r};return this.caught(n,i)}}},{}],11:[function(t,e,n){"use strict";e.exports=function(t,e){function n(){return o(this)}function r(t,n){return i(t,n,e,e)}var i=t.reduce,o=t.all;t.prototype.each=function(t){return i(this,t,e,0)._then(n,void 0,void 0,this,void 0)},t.prototype.mapSeries=function(t){return i(this,t,e,e)},t.each=function(t,r){return i(t,r,e,0)._then(n,void 0,void 0,t,void 0)},t.mapSeries=r}},{}],12:[function(t,e,n){"use strict";function r(t,e){function n(r){return this instanceof n?(p(this,"message","string"==typeof r?r:e),p(this,"name",t),void(Error.captureStackTrace?Error.captureStackTrace(this,this.constructor):Error.call(this))):new n(r)}return u(n,Error),n}function i(t){return this instanceof i?(p(this,"name","OperationalError"),p(this,"message",t),this.cause=t,this.isOperational=!0,void(t instanceof Error?(p(this,"message",t.message),p(this,"stack",t.stack)):Error.captureStackTrace&&Error.captureStackTrace(this,this.constructor))):new i(t)}var o,s,a=t("./es5"),c=a.freeze,l=t("./util"),u=l.inherits,p=l.notEnumerableProp,h=r("Warning","warning"),f=r("CancellationError","cancellation error"),_=r("TimeoutError","timeout error"),d=r("AggregateError","aggregate error");try{o=TypeError,s=RangeError}catch(v){o=r("TypeError","type error"),s=r("RangeError","range error")}for(var y="join pop push shift unshift slice filter forEach some every map indexOf lastIndexOf reduce reduceRight sort reverse".split(" "),m=0;m1?t.cancelPromise._reject(e):t.cancelPromise._cancel(),t.cancelPromise=null,!0):!1}function a(){return l.call(this,this.promise._target()._settledValue())}function c(t){return s(this,t)?void 0:(h.e=t,h)}function l(t){var i=this.promise,l=this.handler;if(!this.called){this.called=!0;var u=this.isFinallyHandler()?l.call(i._boundValue()):l.call(i._boundValue(),t);if(u===r)return u;if(void 0!==u){i._setReturnedNonUndefined();var f=n(u,i);if(f instanceof e){if(null!=this.cancelPromise){if(f._isCancelled()){var _=new p("late cancellation observer");return i._attachExtraTrace(_),h.e=_,h}f.isPending()&&f._attachCancellationCallback(new o(this))}return f._then(a,c,void 0,this,void 0)}}}return i.isRejected()?(s(this),h.e=t,h):(s(this),t)}var u=t("./util"),p=e.CancellationError,h=u.errorObj,f=t("./catch_filter")(r);return i.prototype.isFinallyHandler=function(){return 0===this.type},o.prototype._resultCancelled=function(){s(this.finallyHandler)},e.prototype._passThrough=function(t,e,n,r){return"function"!=typeof t?this.then():this._then(n,r,void 0,new i(this,e,t),void 0)},e.prototype.lastly=e.prototype["finally"]=function(t){return this._passThrough(t,0,l,l)},e.prototype.tap=function(t){return this._passThrough(t,1,l)},e.prototype.tapCatch=function(t){var n=arguments.length;if(1===n)return this._passThrough(t,1,void 0,l);var r,i=new Array(n-1),o=0;for(r=0;n-1>r;++r){var s=arguments[r];if(!u.isObject(s))return e.reject(new TypeError("tapCatch statement predicate: expecting an object but got "+u.classString(s)));i[o++]=s}i.length=o;var a=arguments[r];return this._passThrough(f(i,a,this),1,void 0,l)},i}},{"./catch_filter":7,"./util":36}],16:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o,s){function a(t,n,r){for(var o=0;o0&&"function"==typeof arguments[e]){t=arguments[e];var r}var i=[].slice.call(arguments);t&&i.pop();var r=new n(i).promise();return void 0!==t?r.spread(t):r}}},{"./util":36}],18:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o,s){function a(t,e,n,r){this.constructor$(t),this._promise._captureStackTrace();var i=l();this._callback=null===i?e:u.domainBind(i,e),this._preservedValues=r===o?new Array(this.length()):null,this._limit=n,this._inFlight=0,this._queue=[],f.invoke(this._asyncInit,this,void 0)}function c(t,n,i,o){if("function"!=typeof n)return r("expecting a function but got "+u.classString(n));var s=0;if(void 0!==i){if("object"!=typeof i||null===i)return e.reject(new TypeError("options argument must be an object but it is "+u.classString(i)));if("number"!=typeof i.concurrency)return e.reject(new TypeError("'concurrency' must be a number but it is "+u.classString(i.concurrency)));s=i.concurrency}return s="number"==typeof s&&isFinite(s)&&s>=1?s:0,new a(t,n,s,o).promise()}var l=e._getDomain,u=t("./util"),p=u.tryCatch,h=u.errorObj,f=e._async;u.inherits(a,n),a.prototype._asyncInit=function(){this._init$(void 0,-2)},a.prototype._init=function(){},a.prototype._promiseFulfilled=function(t,n){var r=this._values,o=this.length(),a=this._preservedValues,c=this._limit;if(0>n){if(n=-1*n-1,r[n]=t,c>=1&&(this._inFlight--,this._drainQueue(),this._isResolved()))return!0}else{if(c>=1&&this._inFlight>=c)return r[n]=t,this._queue.push(n),!1;null!==a&&(a[n]=t);var l=this._promise,u=this._callback,f=l._boundValue();l._pushContext();var _=p(u).call(f,t,n,o),d=l._popContext();if(s.checkForgottenReturns(_,d,null!==a?"Promise.filter":"Promise.map",l),_===h)return this._reject(_.e),!0;var v=i(_,this._promise);if(v instanceof e){v=v._target();var y=v._bitField;if(0===(50397184&y))return c>=1&&this._inFlight++,r[n]=v,v._proxy(this,-1*(n+1)),!1;if(0===(33554432&y))return 0!==(16777216&y)?(this._reject(v._reason()),!0):(this._cancel(),!0);_=v._value()}r[n]=_}var m=++this._totalResolved;return m>=o?(null!==a?this._filter(r,a):this._resolve(r),!0):!1},a.prototype._drainQueue=function(){for(var t=this._queue,e=this._limit,n=this._values;t.length>0&&this._inFlighto;++o)t[o]&&(r[i++]=e[o]);r.length=i,this._resolve(r)},a.prototype.preservedValues=function(){return this._preservedValues},e.prototype.map=function(t,e){return c(this,t,e,null)},e.map=function(t,e,n,r){return c(t,e,n,r)}}},{"./util":36}],19:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o){var s=t("./util"),a=s.tryCatch;e.method=function(t){if("function"!=typeof t)throw new e.TypeError("expecting a function but got "+s.classString(t));return function(){var r=new e(n);r._captureStackTrace(),r._pushContext();var i=a(t).apply(this,arguments),s=r._popContext();return o.checkForgottenReturns(i,s,"Promise.method",r),r._resolveFromSyncValue(i),r}},e.attempt=e["try"]=function(t){if("function"!=typeof t)return i("expecting a function but got "+s.classString(t));var r=new e(n);r._captureStackTrace(),r._pushContext();var c;if(arguments.length>1){o.deprecated("calling Promise.try with more than 1 argument");var l=arguments[1],u=arguments[2];c=s.isArray(l)?a(t).apply(u,l):a(t).call(u,l)}else c=a(t)();var p=r._popContext();return o.checkForgottenReturns(c,p,"Promise.try",r),r._resolveFromSyncValue(c),r},e.prototype._resolveFromSyncValue=function(t){t===s.errorObj?this._rejectCallback(t.e,!1):this._resolveCallback(t,!0)}}},{"./util":36}],20:[function(t,e,n){"use strict";function r(t){return t instanceof Error&&u.getPrototypeOf(t)===Error.prototype}function i(t){var e;if(r(t)){e=new l(t),e.name=t.name,e.message=t.message,e.stack=t.stack;for(var n=u.keys(t),i=0;i1){var n,r=new Array(e-1),i=0;for(n=0;e-1>n;++n){var o=arguments[n];if(!f.isObject(o))return p("Catch statement predicate: expecting an object but got "+f.classString(o));r[i++]=o}return r.length=i,t=arguments[n],this.then(void 0,P(r,t,this))}return this.then(void 0,t)},i.prototype.reflect=function(){return this._then(u,u,void 0,this,void 0)},i.prototype.then=function(t,e){if(T.warnings()&&arguments.length>0&&"function"!=typeof t&&"function"!=typeof e){var n=".then() only accepts functions but was passed: "+f.classString(t);arguments.length>1&&(n+=", "+f.classString(e)),this._warn(n)}return this._then(t,e,void 0,void 0,void 0)},i.prototype.done=function(t,e){var n=this._then(t,e,void 0,void 0,void 0);n._setIsFinal()},i.prototype.spread=function(t){return"function"!=typeof t?p("expecting a function but got "+f.classString(t)):this.all()._then(t,void 0,void 0,w,void 0)},i.prototype.toJSON=function(){var t={isFulfilled:!1,isRejected:!1,fulfillmentValue:void 0,rejectionReason:void 0};return this.isFulfilled()?(t.fulfillmentValue=this.value(),t.isFulfilled=!0):this.isRejected()&&(t.rejectionReason=this.reason(),t.isRejected=!0),t},i.prototype.all=function(){return arguments.length>0&&this._warn(".all() was passed arguments but it does not take any"),new E(this).promise()},i.prototype.error=function(t){return this.caught(f.originatesFromRejection,t)},i.getNewLibraryCopy=e.exports,i.is=function(t){return t instanceof i},i.fromNode=i.fromCallback=function(t){var e=new i(b);e._captureStackTrace();var n=arguments.length>1?!!Object(arguments[1]).multiArgs:!1,r=O(t)(R(e,n));return r===S&&e._rejectCallback(r.e,!0),e._isFateSealed()||e._setAsyncGuaranteed(),e},i.all=function(t){return new E(t).promise()},i.cast=function(t){var e=j(t);return e instanceof i||(e=new i(b),e._captureStackTrace(),e._setFulfilled(),e._rejectionHandler0=t),e},i.resolve=i.fulfilled=i.cast,i.reject=i.rejected=function(t){var e=new i(b);return e._captureStackTrace(),e._rejectCallback(t,!0),e},i.setScheduler=function(t){if("function"!=typeof t)throw new m("expecting a function but got "+f.classString(t));return v.setScheduler(t)},i.prototype._then=function(t,e,n,r,o){var s=void 0!==o,a=s?o:new i(b),l=this._target(),u=l._bitField;s||(a._propagateFrom(this,3),a._captureStackTrace(),void 0===r&&0!==(2097152&this._bitField)&&(r=0!==(50397184&u)?this._boundValue():l===this?void 0:this._boundTo),this._fireEvent("promiseChained",this,a));var p=c();if(0!==(50397184&u)){var h,_,d=l._settlePromiseCtx;0!==(33554432&u)?(_=l._rejectionHandler0,h=t):0!==(16777216&u)?(_=l._fulfillmentHandler0,h=e,l._unsetRejectionIsUnhandled()):(d=l._settlePromiseLateCancellationObserver,_=new g("late cancellation observer"),l._attachExtraTrace(_),h=e),v.invoke(d,l,{handler:null===p?h:"function"==typeof h&&f.domainBind(p,h),promise:a,receiver:r,value:_})}else l._addCallbacks(t,e,a,r,p);return a},i.prototype._length=function(){return 65535&this._bitField},i.prototype._isFateSealed=function(){return 0!==(117506048&this._bitField)},i.prototype._isFollowing=function(){return 67108864===(67108864&this._bitField)},i.prototype._setLength=function(t){this._bitField=-65536&this._bitField|65535&t},i.prototype._setFulfilled=function(){this._bitField=33554432|this._bitField,this._fireEvent("promiseFulfilled",this)},i.prototype._setRejected=function(){this._bitField=16777216|this._bitField,this._fireEvent("promiseRejected",this)},i.prototype._setFollowing=function(){this._bitField=67108864|this._bitField,this._fireEvent("promiseResolved",this)},i.prototype._setIsFinal=function(){this._bitField=4194304|this._bitField},i.prototype._isFinal=function(){return(4194304&this._bitField)>0},i.prototype._unsetCancelled=function(){this._bitField=-65537&this._bitField},i.prototype._setCancelled=function(){this._bitField=65536|this._bitField,this._fireEvent("promiseCancelled",this)},i.prototype._setWillBeCancelled=function(){this._bitField=8388608|this._bitField},i.prototype._setAsyncGuaranteed=function(){v.hasCustomScheduler()||(this._bitField=134217728|this._bitField)},i.prototype._receiverAt=function(t){var e=0===t?this._receiver0:this[4*t-4+3];return e===h?void 0:void 0===e&&this._isBound()?this._boundValue():e},i.prototype._promiseAt=function(t){return this[4*t-4+2]},i.prototype._fulfillmentHandlerAt=function(t){return this[4*t-4+0]},i.prototype._rejectionHandlerAt=function(t){return this[4*t-4+1]},i.prototype._boundValue=function(){},i.prototype._migrateCallback0=function(t){var e=(t._bitField,t._fulfillmentHandler0),n=t._rejectionHandler0,r=t._promise0,i=t._receiverAt(0);void 0===i&&(i=h),this._addCallbacks(e,n,r,i,null)},i.prototype._migrateCallbackAt=function(t,e){var n=t._fulfillmentHandlerAt(e),r=t._rejectionHandlerAt(e),i=t._promiseAt(e),o=t._receiverAt(e);void 0===o&&(o=h),this._addCallbacks(n,r,i,o,null)},i.prototype._addCallbacks=function(t,e,n,r,i){var o=this._length();if(o>=65531&&(o=0,this._setLength(0)),0===o)this._promise0=n,this._receiver0=r,"function"==typeof t&&(this._fulfillmentHandler0=null===i?t:f.domainBind(i,t)),"function"==typeof e&&(this._rejectionHandler0=null===i?e:f.domainBind(i,e));else{var s=4*o-4;this[s+2]=n,this[s+3]=r,"function"==typeof t&&(this[s+0]=null===i?t:f.domainBind(i,t)),"function"==typeof e&&(this[s+1]=null===i?e:f.domainBind(i,e))}return this._setLength(o+1),o},i.prototype._proxy=function(t,e){this._addCallbacks(void 0,void 0,e,t,null)},i.prototype._resolveCallback=function(t,e){if(0===(117506048&this._bitField)){if(t===this)return this._rejectCallback(l(),!1);var n=j(t,this);if(!(n instanceof i))return this._fulfill(t);e&&this._propagateFrom(n,2);var r=n._target();if(r===this)return void this._reject(l());var o=r._bitField;if(0===(50397184&o)){var s=this._length();s>0&&r._migrateCallback0(this);for(var a=1;s>a;++a)r._migrateCallbackAt(this,a);this._setFollowing(),this._setLength(0),this._setFollowee(r)}else if(0!==(33554432&o))this._fulfill(r._value());else if(0!==(16777216&o))this._reject(r._reason());else{var c=new g("late cancellation observer");r._attachExtraTrace(c),this._reject(c)}}},i.prototype._rejectCallback=function(t,e,n){var r=f.ensureErrorObject(t),i=r===t;if(!i&&!n&&T.warnings()){var o="a promise was rejected with a non-error: "+f.classString(t);this._warn(o,!0)}this._attachExtraTrace(r,e?i:!1),this._reject(t)},i.prototype._resolveFromExecutor=function(t){if(t!==b){var e=this;this._captureStackTrace(),this._pushContext();var n=!0,r=this._execute(t,function(t){e._resolveCallback(t)},function(t){e._rejectCallback(t,n)});n=!1,this._popContext(),void 0!==r&&e._rejectCallback(r,!0)}},i.prototype._settlePromiseFromHandler=function(t,e,n,r){var i=r._bitField;if(0===(65536&i)){r._pushContext();var o;e===w?n&&"number"==typeof n.length?o=O(t).apply(this._boundValue(),n):(o=S,o.e=new m("cannot .spread() a non-array: "+f.classString(n))):o=O(t).call(e,n);var s=r._popContext();i=r._bitField,0===(65536&i)&&(o===C?r._reject(n):o===S?r._rejectCallback(o.e,!1):(T.checkForgottenReturns(o,s,"",r,this),r._resolveCallback(o)))}},i.prototype._target=function(){for(var t=this;t._isFollowing();)t=t._followee();return t},i.prototype._followee=function(){return this._rejectionHandler0},i.prototype._setFollowee=function(t){this._rejectionHandler0=t},i.prototype._settlePromise=function(t,e,r,o){var s=t instanceof i,a=this._bitField,c=0!==(134217728&a);0!==(65536&a)?(s&&t._invokeInternalOnCancel(),r instanceof x&&r.isFinallyHandler()?(r.cancelPromise=t,O(e).call(r,o)===S&&t._reject(S.e)):e===u?t._fulfill(u.call(r)):r instanceof n?r._promiseCancelled(t):s||t instanceof E?t._cancel():r.cancel()):"function"==typeof e?s?(c&&t._setAsyncGuaranteed(),this._settlePromiseFromHandler(e,r,o,t)):e.call(r,o,t):r instanceof n?r._isResolved()||(0!==(33554432&a)?r._promiseFulfilled(o,t):r._promiseRejected(o,t)):s&&(c&&t._setAsyncGuaranteed(),0!==(33554432&a)?t._fulfill(o):t._reject(o))},i.prototype._settlePromiseLateCancellationObserver=function(t){var e=t.handler,n=t.promise,r=t.receiver,o=t.value;"function"==typeof e?n instanceof i?this._settlePromiseFromHandler(e,r,o,n):e.call(r,o,n):n instanceof i&&n._reject(o)},i.prototype._settlePromiseCtx=function(t){this._settlePromise(t.promise,t.handler,t.receiver,t.value)},i.prototype._settlePromise0=function(t,e,n){var r=this._promise0,i=this._receiverAt(0);this._promise0=void 0,this._receiver0=void 0,this._settlePromise(r,t,i,e)},i.prototype._clearCallbackDataAtIndex=function(t){var e=4*t-4;this[e+2]=this[e+3]=this[e+0]=this[e+1]=void 0},i.prototype._fulfill=function(t){var e=this._bitField;if(!((117506048&e)>>>16)){if(t===this){var n=l();return this._attachExtraTrace(n),this._reject(n)}this._setFulfilled(),this._rejectionHandler0=t,(65535&e)>0&&(0!==(134217728&e)?this._settlePromises():v.settlePromises(this),this._dereferenceTrace())}},i.prototype._reject=function(t){var e=this._bitField;if(!((117506048&e)>>>16))return this._setRejected(),this._fulfillmentHandler0=t,this._isFinal()?v.fatalError(t,f.isNode):void((65535&e)>0?v.settlePromises(this):this._ensurePossibleRejectionHandled())},i.prototype._fulfillPromises=function(t,e){for(var n=1;t>n;n++){var r=this._fulfillmentHandlerAt(n),i=this._promiseAt(n),o=this._receiverAt(n);this._clearCallbackDataAtIndex(n),this._settlePromise(i,r,o,e)}},i.prototype._rejectPromises=function(t,e){for(var n=1;t>n;n++){var r=this._rejectionHandlerAt(n),i=this._promiseAt(n),o=this._receiverAt(n);this._clearCallbackDataAtIndex(n),this._settlePromise(i,r,o,e)}},i.prototype._settlePromises=function(){var t=this._bitField,e=65535&t;if(e>0){if(0!==(16842752&t)){var n=this._fulfillmentHandler0;this._settlePromise0(this._rejectionHandler0,n,t),this._rejectPromises(e,n)}else{var r=this._rejectionHandler0;this._settlePromise0(this._fulfillmentHandler0,r,t),this._fulfillPromises(e,r)}this._setLength(0)}this._clearCancellationData()},i.prototype._settledValue=function(){var t=this._bitField;return 0!==(33554432&t)?this._rejectionHandler0:0!==(16777216&t)?this._fulfillmentHandler0:void 0},i.defer=i.pending=function(){T.deprecated("Promise.defer","new Promise");var t=new i(b);return{promise:t,resolve:o,reject:s}},f.notEnumerableProp(i,"_makeSelfResolutionError",l),t("./method")(i,b,j,p,T),t("./bind")(i,b,j,T),t("./cancel")(i,E,p,T),t("./direct_resolve")(i),t("./synchronous_inspection")(i),t("./join")(i,E,j,b,v,c),i.Promise=i,i.version="3.5.3",t("./map.js")(i,E,p,j,b,T),t("./call_get.js")(i),t("./using.js")(i,p,j,F,b,T),t("./timers.js")(i,b,T),t("./generators.js")(i,p,b,j,n,T),t("./nodeify.js")(i),t("./promisify.js")(i,b),t("./props.js")(i,E,j,p),t("./race.js")(i,b,j,p),t("./reduce.js")(i,E,p,j,b,T),t("./settle.js")(i,E,T),t("./some.js")(i,E,p),t("./filter.js")(i,b),t("./each.js")(i,b),t("./any.js")(i),f.toFastProperties(i),f.toFastProperties(i.prototype),a({a:1}),a({b:2}),a({c:3}),a(1),a(function(){}),a(void 0),a(!1),a(new i(b)),T.setBounds(d.firstLineError,f.lastLineError),i}},{"./any.js":1,"./async":2,"./bind":3,"./call_get.js":5,"./cancel":6,"./catch_filter":7,"./context":8,"./debuggability":9,"./direct_resolve":10,"./each.js":11,"./errors":12,"./es5":13,"./filter.js":14,"./finally":15,"./generators.js":16,"./join":17,"./map.js":18,"./method":19,"./nodeback":20,"./nodeify.js":21,"./promise_array":23,"./promisify.js":24,"./props.js":25,"./race.js":27,"./reduce.js":28,"./settle.js":30,"./some.js":31,"./synchronous_inspection":32,"./thenables":33,"./timers.js":34,"./using.js":35,"./util":36}],23:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o){function s(t){switch(t){case-2:return[];case-3:return{};case-6:return new Map}}function a(t){var r=this._promise=new e(n);t instanceof e&&r._propagateFrom(t,3),r._setOnCancel(this),this._values=t,this._length=0,this._totalResolved=0,this._init(void 0,-2)}var c=t("./util");c.isArray;return c.inherits(a,o),a.prototype.length=function(){return this._length},a.prototype.promise=function(){return this._promise},a.prototype._init=function l(t,n){var o=r(this._values,this._promise);if(o instanceof e){o=o._target();var a=o._bitField;if(this._values=o,0===(50397184&a))return this._promise._setAsyncGuaranteed(),o._then(l,this._reject,void 0,this,n);if(0===(33554432&a))return 0!==(16777216&a)?this._reject(o._reason()):this._cancel();o=o._value()}if(o=c.asArray(o),null===o){var u=i("expecting an array or an iterable object but got "+c.classString(o)).reason();return void this._promise._rejectCallback(u,!1)}return 0===o.length?void(-5===n?this._resolveEmptyArray():this._resolve(s(n))):void this._iterate(o)},a.prototype._iterate=function(t){var n=this.getActualLength(t.length);this._length=n,this._values=this.shouldCopyValues()?new Array(n):this._values;for(var i=this._promise,o=!1,s=null,a=0;n>a;++a){var c=r(t[a],i);c instanceof e?(c=c._target(),s=c._bitField):s=null,o?null!==s&&c.suppressUnhandledRejections():null!==s?0===(50397184&s)?(c._proxy(this,a),this._values[a]=c):o=0!==(33554432&s)?this._promiseFulfilled(c._value(),a):0!==(16777216&s)?this._promiseRejected(c._reason(),a):this._promiseCancelled(a):o=this._promiseFulfilled(c,a)}o||i._setAsyncGuaranteed()},a.prototype._isResolved=function(){return null===this._values},a.prototype._resolve=function(t){this._values=null,this._promise._fulfill(t)},a.prototype._cancel=function(){!this._isResolved()&&this._promise._isCancellable()&&(this._values=null,this._promise._cancel())},a.prototype._reject=function(t){this._values=null,this._promise._rejectCallback(t,!1)},a.prototype._promiseFulfilled=function(t,e){this._values[e]=t;var n=++this._totalResolved;return n>=this._length?(this._resolve(this._values),!0):!1},a.prototype._promiseCancelled=function(){return this._cancel(),!0},a.prototype._promiseRejected=function(t){return this._totalResolved++,this._reject(t),!0},a.prototype._resultCancelled=function(){if(!this._isResolved()){var t=this._values;if(this._cancel(),t instanceof e)t.cancel();else for(var n=0;nc;c+=2){var u=s[c],p=s[c+1],_=u+e;if(r===k)t[_]=k(u,h,u,p,e,i);else{var d=r(p,function(){return k(u,h,u,p,e,i)});f.notEnumerableProp(d,"__isPromisified__",!0),t[_]=d}}return f.toFastProperties(t),t}function u(t,e,n){return k(t,e,void 0,t,null,n)}var p,h={},f=t("./util"),_=t("./nodeback"),d=f.withAppended,v=f.maybeWrapAsError,y=f.canEvaluate,m=t("./errors").TypeError,g="Async",b={__isPromisified__:!0},w=["arity","length","name","arguments","caller","callee","prototype","__isPromisified__"],C=new RegExp("^(?:"+w.join("|")+")$"),j=function(t){return f.isIdentifier(t)&&"_"!==t.charAt(0)&&"constructor"!==t},E=function(t){return t.replace(/([$])/,"\\$")},k=y?p:c;e.promisify=function(t,e){if("function"!=typeof t)throw new m("expecting a function but got "+f.classString(t));if(i(t))return t;e=Object(e);var n=void 0===e.context?h:e.context,o=!!e.multiArgs,s=u(t,n,o);return f.copyDescriptors(t,s,r),s},e.promisifyAll=function(t,e){if("function"!=typeof t&&"object"!=typeof t)throw new m("the target of promisifyAll must be an object or a function\n\n See http://goo.gl/MqrFmX\n");e=Object(e);var n=!!e.multiArgs,r=e.suffix;"string"!=typeof r&&(r=g);var i=e.filter;"function"!=typeof i&&(i=j);var o=e.promisifier;if("function"!=typeof o&&(o=k),!f.isIdentifier(r))throw new RangeError("suffix must be a valid identifier\n\n See http://goo.gl/MqrFmX\n");for(var s=f.inheritedDataKeys(t),a=0;ao;++o){var s=r[o];e[o]=t[s],e[o+i]=s}}this.constructor$(e),this._isMap=n,this._init$(void 0,n?-6:-3)}function s(t){var n,s=r(t);return l(s)?(n=s instanceof e?s._then(e.props,void 0,void 0,void 0,void 0):new o(s).promise(),s instanceof e&&n._propagateFrom(s,2),n):i("cannot await properties of a non-object\n\n See http://goo.gl/MqrFmX\n")}var a,c=t("./util"),l=c.isObject,u=t("./es5");"function"==typeof Map&&(a=Map);var p=function(){function t(t,r){this[e]=t,this[e+n]=r,e++}var e=0,n=0;return function(r){n=r.size,e=0;var i=new Array(2*r.size);return r.forEach(t,i),i}}(),h=function(t){for(var e=new a,n=t.length/2|0,r=0;n>r;++r){var i=t[n+r],o=t[r];e.set(i,o)}return e};c.inherits(o,n),o.prototype._init=function(){},o.prototype._promiseFulfilled=function(t,e){this._values[e]=t;var n=++this._totalResolved;if(n>=this._length){var r;if(this._isMap)r=h(this._values);else{r={};for(var i=this.length(),o=0,s=this.length();s>o;++o)r[this._values[o+i]]=this._values[o]}return this._resolve(r),!0}return!1},o.prototype.shouldCopyValues=function(){return!1},o.prototype.getActualLength=function(t){return t>>1},e.prototype.props=function(){return s(this)},e.props=function(t){return s(t)}}},{"./es5":13,"./util":36}],26:[function(t,e,n){"use strict";function r(t,e,n,r,i){for(var o=0;i>o;++o)n[o+r]=t[o+e],t[o+e]=void 0}function i(t){this._capacity=t,this._length=0,this._front=0}i.prototype._willBeOverCapacity=function(t){return this._capacityh;++h){var _=t[h];(void 0!==_||h in t)&&e.cast(_)._then(u,p,void 0,l,null)}return l}var s=t("./util"),a=function(t){return t.then(function(e){return o(e,t)})};e.race=function(t){return o(t,void 0)},e.prototype.race=function(){return o(this,void 0)}}},{"./util":36}],28:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o,s){function a(t,n,r,i){this.constructor$(t);var s=h();this._fn=null===s?n:f.domainBind(s,n),void 0!==r&&(r=e.resolve(r),r._attachCancellationCallback(this)),this._initialValue=r,this._currentCancellable=null,i===o?this._eachValues=Array(this._length):0===i?this._eachValues=null:this._eachValues=void 0,this._promise._captureStackTrace(),this._init$(void 0,-5)}function c(t,e){this.isFulfilled()?e._resolve(t):e._reject(t)}function l(t,e,n,i){if("function"!=typeof e)return r("expecting a function but got "+f.classString(e));var o=new a(t,e,n,i);return o.promise()}function u(t){this.accum=t,this.array._gotAccum(t);var n=i(this.value,this.array._promise);return n instanceof e?(this.array._currentCancellable=n,n._then(p,void 0,void 0,this,void 0)):p.call(this,n)}function p(t){var n=this.array,r=n._promise,i=_(n._fn);r._pushContext();var o;o=void 0!==n._eachValues?i.call(r._boundValue(),t,this.index,this.length):i.call(r._boundValue(),this.accum,t,this.index,this.length),o instanceof e&&(n._currentCancellable=o);var a=r._popContext();return s.checkForgottenReturns(o,a,void 0!==n._eachValues?"Promise.each":"Promise.reduce",r),o}var h=e._getDomain,f=t("./util"),_=f.tryCatch;f.inherits(a,n),a.prototype._gotAccum=function(t){void 0!==this._eachValues&&null!==this._eachValues&&t!==o&&this._eachValues.push(t)},a.prototype._eachComplete=function(t){return null!==this._eachValues&&this._eachValues.push(t),this._eachValues},a.prototype._init=function(){},a.prototype._resolveEmptyArray=function(){this._resolve(void 0!==this._eachValues?this._eachValues:this._initialValue)},a.prototype.shouldCopyValues=function(){return!1},a.prototype._resolve=function(t){this._promise._resolveCallback(t),this._values=null},a.prototype._resultCancelled=function(t){return t===this._initialValue?this._cancel():void(this._isResolved()||(this._resultCancelled$(),this._currentCancellable instanceof e&&this._currentCancellable.cancel(),this._initialValue instanceof e&&this._initialValue.cancel()))},a.prototype._iterate=function(t){this._values=t;var n,r,i=t.length;if(void 0!==this._initialValue?(n=this._initialValue,r=0):(n=e.resolve(t[0]),r=1),this._currentCancellable=n,!n.isRejected())for(;i>r;++r){var o={accum:null,value:t[r],index:r,length:i,array:this};n=n._then(u,void 0,void 0,o,void 0)}void 0!==this._eachValues&&(n=n._then(this._eachComplete,void 0,void 0,this,void 0)),n._then(c,c,void 0,n,this)},e.prototype.reduce=function(t,e){return l(this,t,e,null)},e.reduce=function(t,e,n,r){return l(t,e,n,r)}}},{"./util":36}],29:[function(t,e,n){"use strict";var r,i=t("./util"),o=function(){throw new Error("No async scheduler available\n\n See http://goo.gl/MqrFmX\n")},s=i.getNativePromise();if(i.isNode&&"undefined"==typeof MutationObserver){var a=global.setImmediate,c=process.nextTick;r=i.isRecentNode?function(t){a.call(global,t)}:function(t){c.call(process,t)}}else if("function"==typeof s&&"function"==typeof s.resolve){var l=s.resolve();r=function(t){l.then(t)}}else r="undefined"==typeof MutationObserver||"undefined"!=typeof window&&window.navigator&&(window.navigator.standalone||window.cordova)?"undefined"!=typeof setImmediate?function(t){setImmediate(t)}:"undefined"!=typeof setTimeout?function(t){setTimeout(t,0)}:o:function(){var t=document.createElement("div"),e={attributes:!0},n=!1,r=document.createElement("div"),i=new MutationObserver(function(){t.classList.toggle("foo"),n=!1});i.observe(r,e);var o=function(){n||(n=!0,r.classList.toggle("foo"))};return function(n){var r=new MutationObserver(function(){r.disconnect(),n()});r.observe(t,e),o()}}();e.exports=r},{"./util":36}],30:[function(t,e,n){"use strict";e.exports=function(e,n,r){function i(t){this.constructor$(t)}var o=e.PromiseInspection,s=t("./util");s.inherits(i,n),i.prototype._promiseResolved=function(t,e){this._values[t]=e;var n=++this._totalResolved;return n>=this._length?(this._resolve(this._values),!0):!1},i.prototype._promiseFulfilled=function(t,e){var n=new o;return n._bitField=33554432,n._settledValueField=t,this._promiseResolved(e,n)},i.prototype._promiseRejected=function(t,e){var n=new o;return n._bitField=16777216, -n._settledValueField=t,this._promiseResolved(e,n)},e.settle=function(t){return r.deprecated(".settle()",".reflect()"),new i(t).promise()},e.prototype.settle=function(){return e.settle(this)}}},{"./util":36}],31:[function(t,e,n){"use strict";e.exports=function(e,n,r){function i(t){this.constructor$(t),this._howMany=0,this._unwrap=!1,this._initialized=!1}function o(t,e){if((0|e)!==e||0>e)return r("expecting a positive integer\n\n See http://goo.gl/MqrFmX\n");var n=new i(t),o=n.promise();return n.setHowMany(e),n.init(),o}var s=t("./util"),a=t("./errors").RangeError,c=t("./errors").AggregateError,l=s.isArray,u={};s.inherits(i,n),i.prototype._init=function(){if(this._initialized){if(0===this._howMany)return void this._resolve([]);this._init$(void 0,-5);var t=l(this._values);!this._isResolved()&&t&&this._howMany>this._canPossiblyFulfill()&&this._reject(this._getRangeError(this.length()))}},i.prototype.init=function(){this._initialized=!0,this._init()},i.prototype.setUnwrap=function(){this._unwrap=!0},i.prototype.howMany=function(){return this._howMany},i.prototype.setHowMany=function(t){this._howMany=t},i.prototype._promiseFulfilled=function(t){return this._addFulfilled(t),this._fulfilled()===this.howMany()?(this._values.length=this.howMany(),1===this.howMany()&&this._unwrap?this._resolve(this._values[0]):this._resolve(this._values),!0):!1},i.prototype._promiseRejected=function(t){return this._addRejected(t),this._checkOutcome()},i.prototype._promiseCancelled=function(){return this._values instanceof e||null==this._values?this._cancel():(this._addRejected(u),this._checkOutcome())},i.prototype._checkOutcome=function(){if(this.howMany()>this._canPossiblyFulfill()){for(var t=new c,e=this.length();e0?this._reject(t):this._cancel(),!0}return!1},i.prototype._fulfilled=function(){return this._totalResolved},i.prototype._rejected=function(){return this._values.length-this.length()},i.prototype._addRejected=function(t){this._values.push(t)},i.prototype._addFulfilled=function(t){this._values[this._totalResolved++]=t},i.prototype._canPossiblyFulfill=function(){return this.length()-this._rejected()},i.prototype._getRangeError=function(t){var e="Input array must contain at least "+this._howMany+" items but contains only "+t+" items";return new a(e)},i.prototype._resolveEmptyArray=function(){this._reject(this._getRangeError(0))},e.some=function(t,e){return o(t,e)},e.prototype.some=function(t){return o(this,t)},e._SomePromiseArray=i}},{"./errors":12,"./util":36}],32:[function(t,e,n){"use strict";e.exports=function(t){function e(t){void 0!==t?(t=t._target(),this._bitField=t._bitField,this._settledValueField=t._isFateSealed()?t._settledValue():void 0):(this._bitField=0,this._settledValueField=void 0)}e.prototype._settledValue=function(){return this._settledValueField};var n=e.prototype.value=function(){if(!this.isFulfilled())throw new TypeError("cannot get fulfillment value of a non-fulfilled promise\n\n See http://goo.gl/MqrFmX\n");return this._settledValue()},r=e.prototype.error=e.prototype.reason=function(){if(!this.isRejected())throw new TypeError("cannot get rejection reason of a non-rejected promise\n\n See http://goo.gl/MqrFmX\n");return this._settledValue()},i=e.prototype.isFulfilled=function(){return 0!==(33554432&this._bitField)},o=e.prototype.isRejected=function(){return 0!==(16777216&this._bitField)},s=e.prototype.isPending=function(){return 0===(50397184&this._bitField)},a=e.prototype.isResolved=function(){return 0!==(50331648&this._bitField)};e.prototype.isCancelled=function(){return 0!==(8454144&this._bitField)},t.prototype.__isCancelled=function(){return 65536===(65536&this._bitField)},t.prototype._isCancelled=function(){return this._target().__isCancelled()},t.prototype.isCancelled=function(){return 0!==(8454144&this._target()._bitField)},t.prototype.isPending=function(){return s.call(this._target())},t.prototype.isRejected=function(){return o.call(this._target())},t.prototype.isFulfilled=function(){return i.call(this._target())},t.prototype.isResolved=function(){return a.call(this._target())},t.prototype.value=function(){return n.call(this._target())},t.prototype.reason=function(){var t=this._target();return t._unsetRejectionIsUnhandled(),r.call(t)},t.prototype._value=function(){return this._settledValue()},t.prototype._reason=function(){return this._unsetRejectionIsUnhandled(),this._settledValue()},t.PromiseInspection=e}},{}],33:[function(t,e,n){"use strict";e.exports=function(e,n){function r(t,r){if(u(t)){if(t instanceof e)return t;var i=o(t);if(i===l){r&&r._pushContext();var c=e.reject(i.e);return r&&r._popContext(),c}if("function"==typeof i){if(s(t)){var c=new e(n);return t._then(c._fulfill,c._reject,void 0,c,null),c}return a(t,i,r)}}return t}function i(t){return t.then}function o(t){try{return i(t)}catch(e){return l.e=e,l}}function s(t){try{return p.call(t,"_promise0")}catch(e){return!1}}function a(t,r,i){function o(t){a&&(a._resolveCallback(t),a=null)}function s(t){a&&(a._rejectCallback(t,p,!0),a=null)}var a=new e(n),u=a;i&&i._pushContext(),a._captureStackTrace(),i&&i._popContext();var p=!0,h=c.tryCatch(r).call(t,o,s);return p=!1,a&&h===l&&(a._rejectCallback(h.e,!0,!0),a=null),u}var c=t("./util"),l=c.errorObj,u=c.isObject,p={}.hasOwnProperty;return r}},{"./util":36}],34:[function(t,e,n){"use strict";e.exports=function(e,n,r){function i(t){this.handle=t}function o(t){return clearTimeout(this.handle),t}function s(t){throw clearTimeout(this.handle),t}var a=t("./util"),c=e.TimeoutError;i.prototype._resultCancelled=function(){clearTimeout(this.handle)};var l=function(t){return u(+this).thenReturn(t)},u=e.delay=function(t,o){var s,a;return void 0!==o?(s=e.resolve(o)._then(l,null,null,t,void 0),r.cancellation()&&o instanceof e&&s._setOnCancel(o)):(s=new e(n),a=setTimeout(function(){s._fulfill()},+t),r.cancellation()&&s._setOnCancel(new i(a)),s._captureStackTrace()),s._setAsyncGuaranteed(),s};e.prototype.delay=function(t){return u(t,this)};var p=function(t,e,n){var r;r="string"!=typeof e?e instanceof Error?e:new c("operation timed out"):new c(e),a.markAsOriginatingFromRejection(r),t._attachExtraTrace(r),t._reject(r),null!=n&&n.cancel()};e.prototype.timeout=function(t,e){t=+t;var n,a,c=new i(setTimeout(function(){n.isPending()&&p(n,e,a)},t));return r.cancellation()?(a=this.then(),n=a._then(o,s,void 0,c,void 0),n._setOnCancel(c)):n=this._then(o,s,void 0,c,void 0),n}}},{"./util":36}],35:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o,s){function a(t){setTimeout(function(){throw t},0)}function c(t){var e=r(t);return e!==t&&"function"==typeof t._isDisposable&&"function"==typeof t._getDisposer&&t._isDisposable()&&e._setDisposable(t._getDisposer()),e}function l(t,n){function i(){if(s>=l)return u._fulfill();var o=c(t[s++]);if(o instanceof e&&o._isDisposable()){try{o=r(o._getDisposer().tryDispose(n),t.promise)}catch(p){return a(p)}if(o instanceof e)return o._then(i,a,null,null,null)}i()}var s=0,l=t.length,u=new e(o);return i(),u}function u(t,e,n){this._data=t,this._promise=e,this._context=n}function p(t,e,n){this.constructor$(t,e,n)}function h(t){return u.isDisposer(t)?(this.resources[this.index]._setDisposable(t),t.promise()):t}function f(t){this.length=t,this.promise=null,this[t-1]=null}var _=t("./util"),d=t("./errors").TypeError,v=t("./util").inherits,y=_.errorObj,m=_.tryCatch,g={};u.prototype.data=function(){return this._data},u.prototype.promise=function(){return this._promise},u.prototype.resource=function(){return this.promise().isFulfilled()?this.promise().value():g},u.prototype.tryDispose=function(t){var e=this.resource(),n=this._context;void 0!==n&&n._pushContext();var r=e!==g?this.doDispose(e,t):null;return void 0!==n&&n._popContext(),this._promise._unsetDisposable(),this._data=null,r},u.isDisposer=function(t){return null!=t&&"function"==typeof t.resource&&"function"==typeof t.tryDispose},v(p,u),p.prototype.doDispose=function(t,e){var n=this.data();return n.call(t,t,e)},f.prototype._resultCancelled=function(){for(var t=this.length,n=0;t>n;++n){var r=this[n];r instanceof e&&r.cancel()}},e.using=function(){var t=arguments.length;if(2>t)return n("you must pass at least 2 arguments to Promise.using");var i=arguments[t-1];if("function"!=typeof i)return n("expecting a function but got "+_.classString(i));var o,a=!0;2===t&&Array.isArray(arguments[0])?(o=arguments[0],t=o.length,a=!1):(o=arguments,t--);for(var c=new f(t),p=0;t>p;++p){var d=o[p];if(u.isDisposer(d)){var v=d;d=d.promise(),d._setDisposable(v)}else{var g=r(d);g instanceof e&&(d=g._then(h,null,null,{resources:c,index:p},void 0))}c[p]=d}for(var b=new Array(c.length),p=0;p0},e.prototype._getDisposer=function(){return this._disposer},e.prototype._unsetDisposable=function(){this._bitField=-131073&this._bitField,this._disposer=void 0},e.prototype.disposer=function(t){if("function"==typeof t)return new p(t,this,i());throw new d}}},{"./errors":12,"./util":36}],36:[function(t,e,n){"use strict";function r(){try{var t=P;return P=null,t.apply(this,arguments)}catch(e){return x.e=e,x}}function i(t){return P=t,r}function o(t){return null==t||t===!0||t===!1||"string"==typeof t||"number"==typeof t}function s(t){return"function"==typeof t||"object"==typeof t&&null!==t}function a(t){return o(t)?new Error(v(t)):t}function c(t,e){var n,r=t.length,i=new Array(r+1);for(n=0;r>n;++n)i[n]=t[n];return i[n]=e,i}function l(t,e,n){if(!F.isES5)return{}.hasOwnProperty.call(t,e)?t[e]:void 0;var r=Object.getOwnPropertyDescriptor(t,e);return null!=r?null==r.get&&null==r.set?r.value:n:void 0}function u(t,e,n){if(o(t))return t;var r={value:n,configurable:!0,enumerable:!1,writable:!0};return F.defineProperty(t,e,r),t}function p(t){throw t}function h(t){try{if("function"==typeof t){var e=F.names(t.prototype),n=F.isES5&&e.length>1,r=e.length>0&&!(1===e.length&&"constructor"===e[0]),i=A.test(t+"")&&F.names(t).length>0;if(n||r||i)return!0}return!1}catch(o){return!1}}function f(t){function e(){}function n(){return typeof r.foo}e.prototype=t;var r=new e;return n(),n(),t}function _(t){return D.test(t)}function d(t,e,n){for(var r=new Array(t),i=0;t>i;++i)r[i]=e+i+n;return r}function v(t){try{return t+""}catch(e){return"[no string representation]"}}function y(t){return t instanceof Error||null!==t&&"object"==typeof t&&"string"==typeof t.message&&"string"==typeof t.name}function m(t){try{u(t,"isOperational",!0)}catch(e){}}function g(t){return null==t?!1:t instanceof Error.__BluebirdErrorTypes__.OperationalError||t.isOperational===!0}function b(t){return y(t)&&F.propertyIsWritable(t,"stack")}function w(t){return{}.toString.call(t)}function C(t,e,n){for(var r=F.names(t),i=0;i10||t[0]>0}(),B.isNode&&B.toFastProperties(process);try{throw new Error}catch(U){B.lastLineError=U}e.exports=B},{"./es5":13}]},{},[4])(4)}),"undefined"!=typeof window&&null!==window?window.P=window.Promise:"undefined"!=typeof self&&null!==self&&(self.P=self.Promise); \ No newline at end of file +!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{var e;"undefined"!=typeof window?e=window:"undefined"!=typeof global?e=global:"undefined"!=typeof self&&(e=self),e.Promise=t()}}(function(){var t,e,n;return function r(t,e,n){function i(s,a){if(!e[s]){if(!t[s]){var c="function"==typeof _dereq_&&_dereq_;if(!a&&c)return c(s,!0);if(o)return o(s,!0);var l=new Error("Cannot find module '"+s+"'");throw l.code="MODULE_NOT_FOUND",l}var u=e[s]={exports:{}};t[s][0].call(u.exports,function(e){var n=t[s][1][e];return i(n?n:e)},u,u.exports,r,t,e,n)}return e[s].exports}for(var o="function"==typeof _dereq_&&_dereq_,s=0;s0;)c(t)}function c(t){var e=t.shift();if("function"!=typeof e)e._settlePromises();else{var n=t.shift(),r=t.shift();e.call(n,r)}}var l;try{throw new Error}catch(u){l=u}var p=t("./schedule"),h=t("./queue"),f=t("./util");r.prototype.setScheduler=function(t){var e=this._schedule;return this._schedule=t,this._customScheduler=!0,e},r.prototype.hasCustomScheduler=function(){return this._customScheduler},r.prototype.enableTrampoline=function(){this._trampolineEnabled=!0},r.prototype.disableTrampolineIfNecessary=function(){f.hasDevTools&&(this._trampolineEnabled=!1)},r.prototype.haveItemsQueued=function(){return this._isTickUsed||this._haveDrainedQueues},r.prototype.fatalError=function(t,e){e?(process.stderr.write("Fatal "+(t instanceof Error?t.stack:t)+"\n"),process.exit(2)):this.throwLater(t)},r.prototype.throwLater=function(t,e){if(1===arguments.length&&(e=t,t=function(){throw e}),"undefined"!=typeof setTimeout)setTimeout(function(){t(e)},0);else try{this._schedule(function(){t(e)})}catch(n){throw new Error("No async scheduler available\n\n See http://goo.gl/MqrFmX\n")}},f.hasDevTools?(r.prototype.invokeLater=function(t,e,n){this._trampolineEnabled?i.call(this,t,e,n):this._schedule(function(){setTimeout(function(){t.call(e,n)},100)})},r.prototype.invoke=function(t,e,n){this._trampolineEnabled?o.call(this,t,e,n):this._schedule(function(){t.call(e,n)})},r.prototype.settlePromises=function(t){this._trampolineEnabled?s.call(this,t):this._schedule(function(){t._settlePromises()})}):(r.prototype.invokeLater=i,r.prototype.invoke=o,r.prototype.settlePromises=s),r.prototype._drainQueues=function(){a(this._normalQueue),this._reset(),this._haveDrainedQueues=!0,a(this._lateQueue)},r.prototype._queueTick=function(){this._isTickUsed||(this._isTickUsed=!0,this._schedule(this.drainQueues))},r.prototype._reset=function(){this._isTickUsed=!1},e.exports=r,e.exports.firstLineError=l},{"./queue":26,"./schedule":29,"./util":36}],3:[function(t,e,n){"use strict";e.exports=function(t,e,n,r){var i=!1,o=function(t,e){this._reject(e)},s=function(t,e){e.promiseRejectionQueued=!0,e.bindingPromise._then(o,o,null,this,t)},a=function(t,e){0===(50397184&this._bitField)&&this._resolveCallback(e.target)},c=function(t,e){e.promiseRejectionQueued||this._reject(t)};t.prototype.bind=function(o){i||(i=!0,t.prototype._propagateFrom=r.propagateFromFunction(),t.prototype._boundValue=r.boundValueFunction());var l=n(o),u=new t(e);u._propagateFrom(this,1);var p=this._target();if(u._setBoundTo(l),l instanceof t){var h={promiseRejectionQueued:!1,promise:u,target:p,bindingPromise:l};p._then(e,s,void 0,u,h),l._then(a,c,void 0,u,h),u._setOnCancel(l)}else u._resolveCallback(p);return u},t.prototype._setBoundTo=function(t){void 0!==t?(this._bitField=2097152|this._bitField,this._boundTo=t):this._bitField=-2097153&this._bitField},t.prototype._isBound=function(){return 2097152===(2097152&this._bitField)},t.bind=function(e,n){return t.resolve(n).bind(e)}}},{}],4:[function(t,e,n){"use strict";function r(){try{Promise===o&&(Promise=i)}catch(t){}return o}var i;"undefined"!=typeof Promise&&(i=Promise);var o=t("./promise")();o.noConflict=r,e.exports=o},{"./promise":22}],5:[function(t,e,n){"use strict";var r=Object.create;if(r){var i=r(null),o=r(null);i[" size"]=o[" size"]=0}e.exports=function(e){function n(t,n){var r;if(null!=t&&(r=t[n]),"function"!=typeof r){var i="Object "+a.classString(t)+" has no method '"+a.toString(n)+"'";throw new e.TypeError(i)}return r}function r(t){var e=this.pop(),r=n(t,e);return r.apply(t,this)}function i(t){return t[this]}function o(t){var e=+this;return 0>e&&(e=Math.max(0,e+t.length)),t[e]}var s,a=t("./util"),c=a.canEvaluate;a.isIdentifier;e.prototype.call=function(t){var e=[].slice.call(arguments,1);return e.push(t),this._then(r,void 0,void 0,e,void 0)},e.prototype.get=function(t){var e,n="number"==typeof t;if(n)e=o;else if(c){var r=s(t);e=null!==r?r:i}else e=i;return this._then(e,void 0,void 0,t,void 0)}}},{"./util":36}],6:[function(t,e,n){"use strict";e.exports=function(e,n,r,i){var o=t("./util"),s=o.tryCatch,a=o.errorObj,c=e._async;e.prototype["break"]=e.prototype.cancel=function(){if(!i.cancellation())return this._warn("cancellation is disabled");for(var t=this,e=t;t._isCancellable();){if(!t._cancelBy(e)){e._isFollowing()?e._followee().cancel():e._cancelBranched();break}var n=t._cancellationParent;if(null==n||!n._isCancellable()){t._isFollowing()?t._followee().cancel():t._cancelBranched();break}t._isFollowing()&&t._followee().cancel(),t._setWillBeCancelled(),e=t,t=n}},e.prototype._branchHasCancelled=function(){this._branchesRemainingToCancel--},e.prototype._enoughBranchesHaveCancelled=function(){return void 0===this._branchesRemainingToCancel||this._branchesRemainingToCancel<=0},e.prototype._cancelBy=function(t){return t===this?(this._branchesRemainingToCancel=0,this._invokeOnCancel(),!0):(this._branchHasCancelled(),this._enoughBranchesHaveCancelled()?(this._invokeOnCancel(),!0):!1)},e.prototype._cancelBranched=function(){this._enoughBranchesHaveCancelled()&&this._cancel()},e.prototype._cancel=function(){this._isCancellable()&&(this._setCancelled(),c.invoke(this._cancelPromises,this,void 0))},e.prototype._cancelPromises=function(){this._length()>0&&this._settlePromises()},e.prototype._unsetOnCancel=function(){this._onCancelField=void 0},e.prototype._isCancellable=function(){return this.isPending()&&!this._isCancelled()},e.prototype.isCancellable=function(){return this.isPending()&&!this.isCancelled()},e.prototype._doInvokeOnCancel=function(t,e){if(o.isArray(t))for(var n=0;n=0?o[t]:void 0}var i=!1,o=[];return t.prototype._promiseCreated=function(){},t.prototype._pushContext=function(){},t.prototype._popContext=function(){return null},t._peekContext=t.prototype._peekContext=function(){},e.prototype._pushContext=function(){void 0!==this._trace&&(this._trace._promiseCreated=null,o.push(this._trace))},e.prototype._popContext=function(){if(void 0!==this._trace){var t=o.pop(),e=t._promiseCreated;return t._promiseCreated=null,e}return null},e.CapturedTrace=null,e.create=n,e.deactivateLongStackTraces=function(){},e.activateLongStackTraces=function(){var n=t.prototype._pushContext,o=t.prototype._popContext,s=t._peekContext,a=t.prototype._peekContext,c=t.prototype._promiseCreated;e.deactivateLongStackTraces=function(){t.prototype._pushContext=n,t.prototype._popContext=o,t._peekContext=s,t.prototype._peekContext=a,t.prototype._promiseCreated=c,i=!1},i=!0,t.prototype._pushContext=e.prototype._pushContext,t.prototype._popContext=e.prototype._popContext,t._peekContext=t.prototype._peekContext=r,t.prototype._promiseCreated=function(){var t=this._peekContext();t&&null==t._promiseCreated&&(t._promiseCreated=this)}},e}},{}],9:[function(t,e,n){"use strict";e.exports=function(e,n){function r(t,e){return{promise:e}}function i(){return!1}function o(t,e,n){var r=this;try{t(e,n,function(t){if("function"!=typeof t)throw new TypeError("onCancel must be a function, got: "+H.toString(t));r._attachCancellationCallback(t)})}catch(i){return i}}function s(t){if(!this._isCancellable())return this;var e=this._onCancel();void 0!==e?H.isArray(e)?e.push(t):this._setOnCancel([e,t]):this._setOnCancel(t)}function a(){return this._onCancelField}function c(t){this._onCancelField=t}function l(){this._cancellationParent=void 0,this._onCancelField=void 0}function u(t,e){if(0!==(1&e)){this._cancellationParent=t;var n=t._branchesRemainingToCancel;void 0===n&&(n=0),t._branchesRemainingToCancel=n+1}0!==(2&e)&&t._isBound()&&this._setBoundTo(t._boundTo)}function p(t,e){0!==(2&e)&&t._isBound()&&this._setBoundTo(t._boundTo)}function h(){var t=this._boundTo;return void 0!==t&&t instanceof e?t.isFulfilled()?t.value():void 0:t}function f(){this._trace=new O(this._peekContext())}function _(t,e){if(U(t)){var n=this._trace;if(void 0!==n&&e&&(n=n._parent),void 0!==n)n.attachExtraTrace(t);else if(!t.__stackCleaned__){var r=E(t);H.notEnumerableProp(t,"stack",r.message+"\n"+r.stack.join("\n")),H.notEnumerableProp(t,"__stackCleaned__",!0)}}}function d(){this._trace=void 0}function v(t,e,n,r,i){if(void 0===t&&null!==e&&J){if(void 0!==i&&i._returnedNonUndefined())return;if(0===(65535&r._bitField))return;n&&(n+=" ");var o="",s="";if(e._trace){for(var a=e._trace.stack.split("\n"),c=C(a),l=c.length-1;l>=0;--l){var u=c[l];if(!q.test(u)){var p=u.match($);p&&(o="at "+p[1]+":"+p[2]+":"+p[3]+" ");break}}if(c.length>0)for(var h=c[0],l=0;l0&&(s="\n"+a[l-1]);break}}var f="a promise was created in a "+n+"handler "+o+"but was not returned from it, see http://goo.gl/rRqMUw"+s;r._warn(f,!0,e)}}function y(t,e){var n=t+" is deprecated and will be removed in a future version.";return e&&(n+=" Use "+e+" instead."),m(n)}function m(t,n,r){if(at.warnings){var i,o=new N(t);if(n)r._attachExtraTrace(o);else if(at.longStackTraces&&(i=e._peekContext()))i.attachExtraTrace(o);else{var s=E(o);o.stack=s.message+"\n"+s.stack.join("\n")}nt("warning",o)||k(o,"",!0)}}function g(t,e){for(var n=0;n=0;--a)if(r[a]===o){s=a;break}for(var a=s;a>=0;--a){var c=r[a];if(e[i]!==c)break;e.pop(),i--}e=r}}function C(t){for(var e=[],n=0;n0&&"SyntaxError"!=t.name&&(e=e.slice(n)),e}function E(t){var e=t.stack,n=t.toString();return e="string"==typeof e&&e.length>0?j(t):[" (No stack trace)"],{message:n,stack:"SyntaxError"==t.name?e:C(e)}}function k(t,e,n){if("undefined"!=typeof console){var r;if(H.isObject(t)){var i=t.stack;r=e+G(i,t)}else r=e+String(t);"function"==typeof V?V(r,n):("function"==typeof console.log||"object"==typeof console.log)&&console.log(r)}}function F(t,e,n,r){var i=!1;try{"function"==typeof e&&(i=!0,"rejectionHandled"===t?e(r):e(n,r))}catch(o){L.throwLater(o)}"unhandledRejection"===t?nt(t,n,r)||i||k(n,"Unhandled rejection "):nt(t,r)}function T(t){var e;if("function"==typeof t)e="[function "+(t.name||"anonymous")+"]";else{e=t&&"function"==typeof t.toString?t.toString():H.toString(t);var n=/\[object [a-zA-Z0-9$_]+\]/;if(n.test(e))try{var r=JSON.stringify(t);e=r}catch(i){}0===e.length&&(e="(empty array)")}return"(<"+x(e)+">, no stack trace)"}function x(t){var e=41;return t.lengths||0>a||!n||!r||n!==r||s>=a||(it=function(t){if(M.test(t))return!0;var e=S(t);return e&&e.fileName===n&&s<=e.line&&e.line<=a?!0:!1})}}function O(t){this._parent=t,this._promisesCreated=0;var e=this._length=1+(void 0===t?0:t._length);st(this,O),e>32&&this.uncycle()}var A,D,V,I=e._getDomain,L=e._async,N=t("./errors").Warning,H=t("./util"),B=t("./es5"),U=H.canAttachTrace,M=/[\\\/]bluebird[\\\/]js[\\\/](release|debug|instrumented)/,q=/\((?:timers\.js):\d+:\d+\)/,$=/[\/<\(](.+?):(\d+):(\d+)\)?\s*$/,Q=null,G=null,z=!1,X=!(0==H.env("BLUEBIRD_DEBUG")||!H.env("BLUEBIRD_DEBUG")&&"development"!==H.env("NODE_ENV")),W=!(0==H.env("BLUEBIRD_WARNINGS")||!X&&!H.env("BLUEBIRD_WARNINGS")),K=!(0==H.env("BLUEBIRD_LONG_STACK_TRACES")||!X&&!H.env("BLUEBIRD_LONG_STACK_TRACES")),J=0!=H.env("BLUEBIRD_W_FORGOTTEN_RETURN")&&(W||!!H.env("BLUEBIRD_W_FORGOTTEN_RETURN"));e.prototype.suppressUnhandledRejections=function(){var t=this._target();t._bitField=-1048577&t._bitField|524288},e.prototype._ensurePossibleRejectionHandled=function(){if(0===(524288&this._bitField)){this._setRejectionIsUnhandled();var t=this;setTimeout(function(){t._notifyUnhandledRejection()},1)}},e.prototype._notifyUnhandledRejectionIsHandled=function(){F("rejectionHandled",A,void 0,this)},e.prototype._setReturnedNonUndefined=function(){this._bitField=268435456|this._bitField},e.prototype._returnedNonUndefined=function(){return 0!==(268435456&this._bitField)},e.prototype._notifyUnhandledRejection=function(){if(this._isRejectionUnhandled()){var t=this._settledValue();this._setUnhandledRejectionIsNotified(),F("unhandledRejection",D,t,this)}},e.prototype._setUnhandledRejectionIsNotified=function(){this._bitField=262144|this._bitField},e.prototype._unsetUnhandledRejectionIsNotified=function(){this._bitField=-262145&this._bitField},e.prototype._isUnhandledRejectionNotified=function(){return(262144&this._bitField)>0},e.prototype._setRejectionIsUnhandled=function(){this._bitField=1048576|this._bitField},e.prototype._unsetRejectionIsUnhandled=function(){this._bitField=-1048577&this._bitField,this._isUnhandledRejectionNotified()&&(this._unsetUnhandledRejectionIsNotified(),this._notifyUnhandledRejectionIsHandled())},e.prototype._isRejectionUnhandled=function(){return(1048576&this._bitField)>0},e.prototype._warn=function(t,e,n){return m(t,e,n||this)},e.onPossiblyUnhandledRejection=function(t){var e=I();D="function"==typeof t?null===e?t:H.domainBind(e,t):void 0},e.onUnhandledRejectionHandled=function(t){var e=I();A="function"==typeof t?null===e?t:H.domainBind(e,t):void 0};var Y=function(){};e.longStackTraces=function(){if(L.haveItemsQueued()&&!at.longStackTraces)throw new Error("cannot enable long stack traces after promises have been created\n\n See http://goo.gl/MqrFmX\n");if(!at.longStackTraces&&P()){var t=e.prototype._captureStackTrace,r=e.prototype._attachExtraTrace,i=e.prototype._dereferenceTrace;at.longStackTraces=!0,Y=function(){if(L.haveItemsQueued()&&!at.longStackTraces)throw new Error("cannot enable long stack traces after promises have been created\n\n See http://goo.gl/MqrFmX\n");e.prototype._captureStackTrace=t,e.prototype._attachExtraTrace=r,e.prototype._dereferenceTrace=i,n.deactivateLongStackTraces(),L.enableTrampoline(),at.longStackTraces=!1},e.prototype._captureStackTrace=f,e.prototype._attachExtraTrace=_,e.prototype._dereferenceTrace=d,n.activateLongStackTraces(),L.disableTrampolineIfNecessary()}},e.hasLongStackTraces=function(){return at.longStackTraces&&P()};var Z=function(){try{if("function"==typeof CustomEvent){var t=new CustomEvent("CustomEvent");return H.global.dispatchEvent(t),function(t,e){var n={detail:e,cancelable:!0};B.defineProperty(n,"promise",{value:e.promise}),B.defineProperty(n,"reason",{value:e.reason});var r=new CustomEvent(t.toLowerCase(),n);return!H.global.dispatchEvent(r)}}if("function"==typeof Event){var t=new Event("CustomEvent");return H.global.dispatchEvent(t),function(t,e){var n=new Event(t.toLowerCase(),{cancelable:!0});return n.detail=e,B.defineProperty(n,"promise",{value:e.promise}),B.defineProperty(n,"reason",{value:e.reason}),!H.global.dispatchEvent(n)}}var t=document.createEvent("CustomEvent");return t.initCustomEvent("testingtheevent",!1,!0,{}),H.global.dispatchEvent(t),function(t,e){var n=document.createEvent("CustomEvent");return n.initCustomEvent(t.toLowerCase(),!1,!0,e),!H.global.dispatchEvent(n)}}catch(e){}return function(){return!1}}(),tt=function(){return H.isNode?function(){return process.emit.apply(process,arguments)}:H.global?function(t){var e="on"+t.toLowerCase(),n=H.global[e];return n?(n.apply(H.global,[].slice.call(arguments,1)),!0):!1}:function(){return!1}}(),et={promiseCreated:r,promiseFulfilled:r,promiseRejected:r,promiseResolved:r,promiseCancelled:r,promiseChained:function(t,e,n){return{promise:e,child:n}},warning:function(t,e){return{warning:e}},unhandledRejection:function(t,e,n){return{reason:e,promise:n}},rejectionHandled:r},nt=function(t){var e=!1;try{e=tt.apply(null,arguments)}catch(n){L.throwLater(n),e=!0}var r=!1;try{r=Z(t,et[t].apply(null,arguments))}catch(n){L.throwLater(n),r=!0}return r||e};e.config=function(t){if(t=Object(t),"longStackTraces"in t&&(t.longStackTraces?e.longStackTraces():!t.longStackTraces&&e.hasLongStackTraces()&&Y()),"warnings"in t){var n=t.warnings;at.warnings=!!n,J=at.warnings,H.isObject(n)&&"wForgottenReturn"in n&&(J=!!n.wForgottenReturn)}if("cancellation"in t&&t.cancellation&&!at.cancellation){if(L.haveItemsQueued())throw new Error("cannot enable cancellation after promises are in use");e.prototype._clearCancellationData=l,e.prototype._propagateFrom=u,e.prototype._onCancel=a,e.prototype._setOnCancel=c,e.prototype._attachCancellationCallback=s,e.prototype._execute=o,rt=u,at.cancellation=!0}return"monitoring"in t&&(t.monitoring&&!at.monitoring?(at.monitoring=!0,e.prototype._fireEvent=nt):!t.monitoring&&at.monitoring&&(at.monitoring=!1,e.prototype._fireEvent=i)),e},e.prototype._fireEvent=i,e.prototype._execute=function(t,e,n){try{t(e,n)}catch(r){return r}},e.prototype._onCancel=function(){},e.prototype._setOnCancel=function(t){},e.prototype._attachCancellationCallback=function(t){},e.prototype._captureStackTrace=function(){},e.prototype._attachExtraTrace=function(){},e.prototype._dereferenceTrace=function(){},e.prototype._clearCancellationData=function(){},e.prototype._propagateFrom=function(t,e){};var rt=p,it=function(){return!1},ot=/[\/<\(]([^:\/]+):(\d+):(?:\d+)\)?\s*$/;H.inherits(O,Error),n.CapturedTrace=O,O.prototype.uncycle=function(){var t=this._length;if(!(2>t)){for(var e=[],n={},r=0,i=this;void 0!==i;++r)e.push(i),i=i._parent;t=this._length=r;for(var r=t-1;r>=0;--r){var o=e[r].stack;void 0===n[o]&&(n[o]=r)}for(var r=0;t>r;++r){var s=e[r].stack,a=n[s];if(void 0!==a&&a!==r){a>0&&(e[a-1]._parent=void 0,e[a-1]._length=1),e[r]._parent=void 0,e[r]._length=1;var c=r>0?e[r-1]:this;t-1>a?(c._parent=e[a+1],c._parent.uncycle(),c._length=c._parent._length+1):(c._parent=void 0,c._length=1);for(var l=c._length+1,u=r-2;u>=0;--u)e[u]._length=l,l++;return}}}},O.prototype.attachExtraTrace=function(t){if(!t.__stackCleaned__){this.uncycle();for(var e=E(t),n=e.message,r=[e.stack],i=this;void 0!==i;)r.push(C(i.stack.split("\n"))),i=i._parent;w(r),b(r),H.notEnumerableProp(t,"stack",g(n,r)),H.notEnumerableProp(t,"__stackCleaned__",!0)}};var st=function(){var t=/^\s*at\s*/,e=function(t,e){return"string"==typeof t?t:void 0!==e.name&&void 0!==e.message?e.toString():T(e)};if("number"==typeof Error.stackTraceLimit&&"function"==typeof Error.captureStackTrace){Error.stackTraceLimit+=6,Q=t,G=e;var n=Error.captureStackTrace;return it=function(t){return M.test(t)},function(t,e){Error.stackTraceLimit+=6,n(t,e),Error.stackTraceLimit-=6}}var r=new Error;if("string"==typeof r.stack&&r.stack.split("\n")[0].indexOf("stackDetection@")>=0)return Q=/@/,G=e,z=!0,function(t){t.stack=(new Error).stack};var i;try{throw new Error}catch(o){i="stack"in o}return"stack"in r||!i||"number"!=typeof Error.stackTraceLimit?(G=function(t,e){return"string"==typeof t?t:"object"!=typeof e&&"function"!=typeof e||void 0===e.name||void 0===e.message?T(e):e.toString()},null):(Q=t,G=e,function(t){Error.stackTraceLimit+=6;try{throw new Error}catch(e){t.stack=e.stack}Error.stackTraceLimit-=6})}([]);"undefined"!=typeof console&&"undefined"!=typeof console.warn&&(V=function(t){console.warn(t)},H.isNode&&process.stderr.isTTY?V=function(t,e){var n=e?"":"";console.warn(n+t+"\n")}:H.isNode||"string"!=typeof(new Error).stack||(V=function(t,e){console.warn("%c"+t,e?"color: darkorange":"color: red")}));var at={warnings:W,longStackTraces:!1,cancellation:!1,monitoring:!1};return K&&e.longStackTraces(),{longStackTraces:function(){return at.longStackTraces},warnings:function(){return at.warnings},cancellation:function(){return at.cancellation},monitoring:function(){return at.monitoring},propagateFromFunction:function(){return rt},boundValueFunction:function(){return h},checkForgottenReturns:v,setBounds:R,warn:m,deprecated:y,CapturedTrace:O,fireDomEvent:Z,fireGlobalEvent:tt}}},{"./errors":12,"./es5":13,"./util":36}],10:[function(t,e,n){"use strict";e.exports=function(t){function e(){return this.value}function n(){throw this.reason}t.prototype["return"]=t.prototype.thenReturn=function(n){return n instanceof t&&n.suppressUnhandledRejections(),this._then(e,void 0,void 0,{value:n},void 0)},t.prototype["throw"]=t.prototype.thenThrow=function(t){return this._then(n,void 0,void 0,{reason:t},void 0)},t.prototype.catchThrow=function(t){if(arguments.length<=1)return this._then(void 0,n,void 0,{reason:t},void 0);var e=arguments[1],r=function(){throw e};return this.caught(t,r)},t.prototype.catchReturn=function(n){if(arguments.length<=1)return n instanceof t&&n.suppressUnhandledRejections(),this._then(void 0,e,void 0,{value:n},void 0);var r=arguments[1];r instanceof t&&r.suppressUnhandledRejections();var i=function(){return r};return this.caught(n,i)}}},{}],11:[function(t,e,n){"use strict";e.exports=function(t,e){function n(){return o(this)}function r(t,n){return i(t,n,e,e)}var i=t.reduce,o=t.all;t.prototype.each=function(t){return i(this,t,e,0)._then(n,void 0,void 0,this,void 0)},t.prototype.mapSeries=function(t){return i(this,t,e,e)},t.each=function(t,r){return i(t,r,e,0)._then(n,void 0,void 0,t,void 0)},t.mapSeries=r}},{}],12:[function(t,e,n){"use strict";function r(t,e){function n(r){return this instanceof n?(p(this,"message","string"==typeof r?r:e),p(this,"name",t),void(Error.captureStackTrace?Error.captureStackTrace(this,this.constructor):Error.call(this))):new n(r)}return u(n,Error),n}function i(t){return this instanceof i?(p(this,"name","OperationalError"),p(this,"message",t),this.cause=t,this.isOperational=!0,void(t instanceof Error?(p(this,"message",t.message),p(this,"stack",t.stack)):Error.captureStackTrace&&Error.captureStackTrace(this,this.constructor))):new i(t)}var o,s,a=t("./es5"),c=a.freeze,l=t("./util"),u=l.inherits,p=l.notEnumerableProp,h=r("Warning","warning"),f=r("CancellationError","cancellation error"),_=r("TimeoutError","timeout error"),d=r("AggregateError","aggregate error");try{o=TypeError,s=RangeError}catch(v){o=r("TypeError","type error"),s=r("RangeError","range error")}for(var y="join pop push shift unshift slice filter forEach some every map indexOf lastIndexOf reduce reduceRight sort reverse".split(" "),m=0;m1?t.cancelPromise._reject(e):t.cancelPromise._cancel(),t.cancelPromise=null,!0):!1}function a(){return l.call(this,this.promise._target()._settledValue())}function c(t){return s(this,t)?void 0:(h.e=t,h)}function l(t){var i=this.promise,l=this.handler;if(!this.called){this.called=!0;var u=this.isFinallyHandler()?l.call(i._boundValue()):l.call(i._boundValue(),t);if(u===r)return u;if(void 0!==u){i._setReturnedNonUndefined();var f=n(u,i);if(f instanceof e){if(null!=this.cancelPromise){if(f._isCancelled()){var _=new p("late cancellation observer");return i._attachExtraTrace(_),h.e=_,h}f.isPending()&&f._attachCancellationCallback(new o(this))}return f._then(a,c,void 0,this,void 0)}}}return i.isRejected()?(s(this),h.e=t,h):(s(this),t)}var u=t("./util"),p=e.CancellationError,h=u.errorObj,f=t("./catch_filter")(r);return i.prototype.isFinallyHandler=function(){return 0===this.type},o.prototype._resultCancelled=function(){s(this.finallyHandler)},e.prototype._passThrough=function(t,e,n,r){return"function"!=typeof t?this.then():this._then(n,r,void 0,new i(this,e,t),void 0)},e.prototype.lastly=e.prototype["finally"]=function(t){return this._passThrough(t,0,l,l)},e.prototype.tap=function(t){return this._passThrough(t,1,l)},e.prototype.tapCatch=function(t){var n=arguments.length;if(1===n)return this._passThrough(t,1,void 0,l);var r,i=new Array(n-1),o=0;for(r=0;n-1>r;++r){var s=arguments[r];if(!u.isObject(s))return e.reject(new TypeError("tapCatch statement predicate: expecting an object but got "+u.classString(s)));i[o++]=s}i.length=o;var a=arguments[r];return this._passThrough(f(i,a,this),1,void 0,l)},i}},{"./catch_filter":7,"./util":36}],16:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o,s){function a(t,n,r){for(var o=0;o0&&"function"==typeof arguments[e]){t=arguments[e];var r}var i=[].slice.call(arguments);t&&i.pop();var r=new n(i).promise();return void 0!==t?r.spread(t):r}}},{"./util":36}],18:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o,s){function a(t,e,n,r){this.constructor$(t),this._promise._captureStackTrace();var i=l();this._callback=null===i?e:u.domainBind(i,e),this._preservedValues=r===o?new Array(this.length()):null,this._limit=n,this._inFlight=0,this._queue=[],f.invoke(this._asyncInit,this,void 0)}function c(t,n,i,o){if("function"!=typeof n)return r("expecting a function but got "+u.classString(n));var s=0;if(void 0!==i){if("object"!=typeof i||null===i)return e.reject(new TypeError("options argument must be an object but it is "+u.classString(i)));if("number"!=typeof i.concurrency)return e.reject(new TypeError("'concurrency' must be a number but it is "+u.classString(i.concurrency)));s=i.concurrency}return s="number"==typeof s&&isFinite(s)&&s>=1?s:0,new a(t,n,s,o).promise()}var l=e._getDomain,u=t("./util"),p=u.tryCatch,h=u.errorObj,f=e._async;u.inherits(a,n),a.prototype._asyncInit=function(){this._init$(void 0,-2)},a.prototype._init=function(){},a.prototype._promiseFulfilled=function(t,n){var r=this._values,o=this.length(),a=this._preservedValues,c=this._limit;if(0>n){if(n=-1*n-1,r[n]=t,c>=1&&(this._inFlight--,this._drainQueue(),this._isResolved()))return!0}else{if(c>=1&&this._inFlight>=c)return r[n]=t,this._queue.push(n),!1;null!==a&&(a[n]=t);var l=this._promise,u=this._callback,f=l._boundValue();l._pushContext();var _=p(u).call(f,t,n,o),d=l._popContext();if(s.checkForgottenReturns(_,d,null!==a?"Promise.filter":"Promise.map",l),_===h)return this._reject(_.e),!0;var v=i(_,this._promise);if(v instanceof e){v=v._target();var y=v._bitField;if(0===(50397184&y))return c>=1&&this._inFlight++,r[n]=v,v._proxy(this,-1*(n+1)),!1;if(0===(33554432&y))return 0!==(16777216&y)?(this._reject(v._reason()),!0):(this._cancel(),!0);_=v._value()}r[n]=_}var m=++this._totalResolved;return m>=o?(null!==a?this._filter(r,a):this._resolve(r),!0):!1},a.prototype._drainQueue=function(){for(var t=this._queue,e=this._limit,n=this._values;t.length>0&&this._inFlighto;++o)t[o]&&(r[i++]=e[o]);r.length=i,this._resolve(r)},a.prototype.preservedValues=function(){return this._preservedValues},e.prototype.map=function(t,e){return c(this,t,e,null)},e.map=function(t,e,n,r){return c(t,e,n,r)}}},{"./util":36}],19:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o){var s=t("./util"),a=s.tryCatch;e.method=function(t){if("function"!=typeof t)throw new e.TypeError("expecting a function but got "+s.classString(t));return function(){var r=new e(n);r._captureStackTrace(),r._pushContext();var i=a(t).apply(this,arguments),s=r._popContext();return o.checkForgottenReturns(i,s,"Promise.method",r),r._resolveFromSyncValue(i),r}},e.attempt=e["try"]=function(t){if("function"!=typeof t)return i("expecting a function but got "+s.classString(t));var r=new e(n);r._captureStackTrace(),r._pushContext();var c;if(arguments.length>1){o.deprecated("calling Promise.try with more than 1 argument");var l=arguments[1],u=arguments[2];c=s.isArray(l)?a(t).apply(u,l):a(t).call(u,l)}else c=a(t)();var p=r._popContext();return o.checkForgottenReturns(c,p,"Promise.try",r),r._resolveFromSyncValue(c),r},e.prototype._resolveFromSyncValue=function(t){t===s.errorObj?this._rejectCallback(t.e,!1):this._resolveCallback(t,!0)}}},{"./util":36}],20:[function(t,e,n){"use strict";function r(t){return t instanceof Error&&u.getPrototypeOf(t)===Error.prototype}function i(t){var e;if(r(t)){e=new l(t),e.name=t.name,e.message=t.message,e.stack=t.stack;for(var n=u.keys(t),i=0;i1){var n,r=new Array(e-1),i=0;for(n=0;e-1>n;++n){var o=arguments[n];if(!f.isObject(o))return p("Catch statement predicate: expecting an object but got "+f.classString(o));r[i++]=o}if(r.length=i,t=arguments[n],"function"!=typeof t)throw new m("The last argument to .catch() must be a function, got "+f.toString(t));return this.then(void 0,P(r,t,this))}return this.then(void 0,t)},i.prototype.reflect=function(){return this._then(u,u,void 0,this,void 0)},i.prototype.then=function(t,e){if(T.warnings()&&arguments.length>0&&"function"!=typeof t&&"function"!=typeof e){var n=".then() only accepts functions but was passed: "+f.classString(t);arguments.length>1&&(n+=", "+f.classString(e)),this._warn(n)}return this._then(t,e,void 0,void 0,void 0)},i.prototype.done=function(t,e){var n=this._then(t,e,void 0,void 0,void 0);n._setIsFinal()},i.prototype.spread=function(t){return"function"!=typeof t?p("expecting a function but got "+f.classString(t)):this.all()._then(t,void 0,void 0,w,void 0)},i.prototype.toJSON=function(){var t={isFulfilled:!1,isRejected:!1,fulfillmentValue:void 0,rejectionReason:void 0};return this.isFulfilled()?(t.fulfillmentValue=this.value(),t.isFulfilled=!0):this.isRejected()&&(t.rejectionReason=this.reason(),t.isRejected=!0),t},i.prototype.all=function(){return arguments.length>0&&this._warn(".all() was passed arguments but it does not take any"),new E(this).promise()},i.prototype.error=function(t){return this.caught(f.originatesFromRejection,t)},i.getNewLibraryCopy=e.exports,i.is=function(t){return t instanceof i},i.fromNode=i.fromCallback=function(t){var e=new i(b);e._captureStackTrace();var n=arguments.length>1?!!Object(arguments[1]).multiArgs:!1,r=O(t)(S(e,n));return r===R&&e._rejectCallback(r.e,!0),e._isFateSealed()||e._setAsyncGuaranteed(),e},i.all=function(t){return new E(t).promise()},i.cast=function(t){var e=j(t);return e instanceof i||(e=new i(b),e._captureStackTrace(),e._setFulfilled(),e._rejectionHandler0=t),e},i.resolve=i.fulfilled=i.cast,i.reject=i.rejected=function(t){var e=new i(b);return e._captureStackTrace(),e._rejectCallback(t,!0),e},i.setScheduler=function(t){if("function"!=typeof t)throw new m("expecting a function but got "+f.classString(t));return v.setScheduler(t)},i.prototype._then=function(t,e,n,r,o){var s=void 0!==o,a=s?o:new i(b),l=this._target(),u=l._bitField;s||(a._propagateFrom(this,3),a._captureStackTrace(),void 0===r&&0!==(2097152&this._bitField)&&(r=0!==(50397184&u)?this._boundValue():l===this?void 0:this._boundTo),this._fireEvent("promiseChained",this,a));var p=c();if(0!==(50397184&u)){var h,_,d=l._settlePromiseCtx;0!==(33554432&u)?(_=l._rejectionHandler0,h=t):0!==(16777216&u)?(_=l._fulfillmentHandler0,h=e,l._unsetRejectionIsUnhandled()):(d=l._settlePromiseLateCancellationObserver,_=new g("late cancellation observer"),l._attachExtraTrace(_),h=e),v.invoke(d,l,{handler:null===p?h:"function"==typeof h&&f.domainBind(p,h),promise:a,receiver:r,value:_})}else l._addCallbacks(t,e,a,r,p);return a},i.prototype._length=function(){return 65535&this._bitField},i.prototype._isFateSealed=function(){return 0!==(117506048&this._bitField)},i.prototype._isFollowing=function(){return 67108864===(67108864&this._bitField)},i.prototype._setLength=function(t){this._bitField=-65536&this._bitField|65535&t},i.prototype._setFulfilled=function(){this._bitField=33554432|this._bitField,this._fireEvent("promiseFulfilled",this)},i.prototype._setRejected=function(){this._bitField=16777216|this._bitField,this._fireEvent("promiseRejected",this)},i.prototype._setFollowing=function(){this._bitField=67108864|this._bitField,this._fireEvent("promiseResolved",this)},i.prototype._setIsFinal=function(){this._bitField=4194304|this._bitField},i.prototype._isFinal=function(){return(4194304&this._bitField)>0},i.prototype._unsetCancelled=function(){this._bitField=-65537&this._bitField},i.prototype._setCancelled=function(){this._bitField=65536|this._bitField,this._fireEvent("promiseCancelled",this)},i.prototype._setWillBeCancelled=function(){this._bitField=8388608|this._bitField},i.prototype._setAsyncGuaranteed=function(){v.hasCustomScheduler()||(this._bitField=134217728|this._bitField)},i.prototype._receiverAt=function(t){var e=0===t?this._receiver0:this[4*t-4+3];return e===h?void 0:void 0===e&&this._isBound()?this._boundValue():e},i.prototype._promiseAt=function(t){return this[4*t-4+2]},i.prototype._fulfillmentHandlerAt=function(t){return this[4*t-4+0]},i.prototype._rejectionHandlerAt=function(t){return this[4*t-4+1]},i.prototype._boundValue=function(){},i.prototype._migrateCallback0=function(t){var e=(t._bitField,t._fulfillmentHandler0),n=t._rejectionHandler0,r=t._promise0,i=t._receiverAt(0);void 0===i&&(i=h),this._addCallbacks(e,n,r,i,null)},i.prototype._migrateCallbackAt=function(t,e){var n=t._fulfillmentHandlerAt(e),r=t._rejectionHandlerAt(e),i=t._promiseAt(e),o=t._receiverAt(e);void 0===o&&(o=h),this._addCallbacks(n,r,i,o,null)},i.prototype._addCallbacks=function(t,e,n,r,i){var o=this._length();if(o>=65531&&(o=0,this._setLength(0)),0===o)this._promise0=n,this._receiver0=r,"function"==typeof t&&(this._fulfillmentHandler0=null===i?t:f.domainBind(i,t)),"function"==typeof e&&(this._rejectionHandler0=null===i?e:f.domainBind(i,e));else{var s=4*o-4;this[s+2]=n,this[s+3]=r,"function"==typeof t&&(this[s+0]=null===i?t:f.domainBind(i,t)),"function"==typeof e&&(this[s+1]=null===i?e:f.domainBind(i,e))}return this._setLength(o+1),o},i.prototype._proxy=function(t,e){this._addCallbacks(void 0,void 0,e,t,null)},i.prototype._resolveCallback=function(t,e){if(0===(117506048&this._bitField)){if(t===this)return this._rejectCallback(l(),!1);var n=j(t,this);if(!(n instanceof i))return this._fulfill(t);e&&this._propagateFrom(n,2);var r=n._target();if(r===this)return void this._reject(l());var o=r._bitField;if(0===(50397184&o)){var s=this._length();s>0&&r._migrateCallback0(this);for(var a=1;s>a;++a)r._migrateCallbackAt(this,a);this._setFollowing(),this._setLength(0),this._setFollowee(r)}else if(0!==(33554432&o))this._fulfill(r._value());else if(0!==(16777216&o))this._reject(r._reason());else{var c=new g("late cancellation observer");r._attachExtraTrace(c),this._reject(c)}}},i.prototype._rejectCallback=function(t,e,n){var r=f.ensureErrorObject(t),i=r===t;if(!i&&!n&&T.warnings()){var o="a promise was rejected with a non-error: "+f.classString(t);this._warn(o,!0)}this._attachExtraTrace(r,e?i:!1),this._reject(t)},i.prototype._resolveFromExecutor=function(t){if(t!==b){var e=this;this._captureStackTrace(),this._pushContext();var n=!0,r=this._execute(t,function(t){e._resolveCallback(t)},function(t){e._rejectCallback(t,n)});n=!1,this._popContext(),void 0!==r&&e._rejectCallback(r,!0)}},i.prototype._settlePromiseFromHandler=function(t,e,n,r){var i=r._bitField;if(0===(65536&i)){r._pushContext();var o;e===w?n&&"number"==typeof n.length?o=O(t).apply(this._boundValue(),n):(o=R,o.e=new m("cannot .spread() a non-array: "+f.classString(n))):o=O(t).call(e,n);var s=r._popContext();i=r._bitField,0===(65536&i)&&(o===C?r._reject(n):o===R?r._rejectCallback(o.e,!1):(T.checkForgottenReturns(o,s,"",r,this),r._resolveCallback(o)))}},i.prototype._target=function(){for(var t=this;t._isFollowing();)t=t._followee();return t},i.prototype._followee=function(){return this._rejectionHandler0},i.prototype._setFollowee=function(t){this._rejectionHandler0=t},i.prototype._settlePromise=function(t,e,r,o){var s=t instanceof i,a=this._bitField,c=0!==(134217728&a);0!==(65536&a)?(s&&t._invokeInternalOnCancel(),r instanceof x&&r.isFinallyHandler()?(r.cancelPromise=t,O(e).call(r,o)===R&&t._reject(R.e)):e===u?t._fulfill(u.call(r)):r instanceof n?r._promiseCancelled(t):s||t instanceof E?t._cancel():r.cancel()):"function"==typeof e?s?(c&&t._setAsyncGuaranteed(),this._settlePromiseFromHandler(e,r,o,t)):e.call(r,o,t):r instanceof n?r._isResolved()||(0!==(33554432&a)?r._promiseFulfilled(o,t):r._promiseRejected(o,t)):s&&(c&&t._setAsyncGuaranteed(),0!==(33554432&a)?t._fulfill(o):t._reject(o))},i.prototype._settlePromiseLateCancellationObserver=function(t){var e=t.handler,n=t.promise,r=t.receiver,o=t.value;"function"==typeof e?n instanceof i?this._settlePromiseFromHandler(e,r,o,n):e.call(r,o,n):n instanceof i&&n._reject(o)},i.prototype._settlePromiseCtx=function(t){this._settlePromise(t.promise,t.handler,t.receiver,t.value)},i.prototype._settlePromise0=function(t,e,n){var r=this._promise0,i=this._receiverAt(0);this._promise0=void 0,this._receiver0=void 0,this._settlePromise(r,t,i,e)},i.prototype._clearCallbackDataAtIndex=function(t){var e=4*t-4;this[e+2]=this[e+3]=this[e+0]=this[e+1]=void 0},i.prototype._fulfill=function(t){var e=this._bitField;if(!((117506048&e)>>>16)){if(t===this){var n=l();return this._attachExtraTrace(n),this._reject(n)}this._setFulfilled(),this._rejectionHandler0=t,(65535&e)>0&&(0!==(134217728&e)?this._settlePromises():v.settlePromises(this),this._dereferenceTrace())}},i.prototype._reject=function(t){var e=this._bitField;if(!((117506048&e)>>>16))return this._setRejected(),this._fulfillmentHandler0=t,this._isFinal()?v.fatalError(t,f.isNode):void((65535&e)>0?v.settlePromises(this):this._ensurePossibleRejectionHandled())},i.prototype._fulfillPromises=function(t,e){for(var n=1;t>n;n++){var r=this._fulfillmentHandlerAt(n),i=this._promiseAt(n),o=this._receiverAt(n);this._clearCallbackDataAtIndex(n),this._settlePromise(i,r,o,e)}},i.prototype._rejectPromises=function(t,e){for(var n=1;t>n;n++){var r=this._rejectionHandlerAt(n),i=this._promiseAt(n),o=this._receiverAt(n);this._clearCallbackDataAtIndex(n),this._settlePromise(i,r,o,e)}},i.prototype._settlePromises=function(){var t=this._bitField,e=65535&t;if(e>0){if(0!==(16842752&t)){var n=this._fulfillmentHandler0;this._settlePromise0(this._rejectionHandler0,n,t),this._rejectPromises(e,n)}else{var r=this._rejectionHandler0;this._settlePromise0(this._fulfillmentHandler0,r,t),this._fulfillPromises(e,r)}this._setLength(0)}this._clearCancellationData()},i.prototype._settledValue=function(){var t=this._bitField;return 0!==(33554432&t)?this._rejectionHandler0:0!==(16777216&t)?this._fulfillmentHandler0:void 0},"undefined"!=typeof Symbol&&Symbol.toStringTag&&_.defineProperty(i.prototype,Symbol.toStringTag,{get:function(){return"Object"}}),i.defer=i.pending=function(){T.deprecated("Promise.defer","new Promise");var t=new i(b);return{promise:t,resolve:o,reject:s}},f.notEnumerableProp(i,"_makeSelfResolutionError",l),t("./method")(i,b,j,p,T),t("./bind")(i,b,j,T),t("./cancel")(i,E,p,T),t("./direct_resolve")(i),t("./synchronous_inspection")(i),t("./join")(i,E,j,b,v,c),i.Promise=i,i.version="3.5.5",t("./call_get.js")(i),t("./generators.js")(i,p,b,j,n,T),t("./map.js")(i,E,p,j,b,T),t("./nodeify.js")(i),t("./promisify.js")(i,b),t("./props.js")(i,E,j,p),t("./race.js")(i,b,j,p),t("./reduce.js")(i,E,p,j,b,T),t("./settle.js")(i,E,T),t("./some.js")(i,E,p),t("./timers.js")(i,b,T),t("./using.js")(i,p,j,F,b,T),t("./any.js")(i),t("./each.js")(i,b),t("./filter.js")(i,b),f.toFastProperties(i),f.toFastProperties(i.prototype),a({a:1}),a({b:2}),a({c:3}),a(1),a(function(){}),a(void 0),a(!1),a(new i(b)),T.setBounds(d.firstLineError,f.lastLineError),i}},{"./any.js":1,"./async":2,"./bind":3,"./call_get.js":5,"./cancel":6,"./catch_filter":7,"./context":8,"./debuggability":9,"./direct_resolve":10,"./each.js":11,"./errors":12,"./es5":13,"./filter.js":14,"./finally":15,"./generators.js":16,"./join":17,"./map.js":18,"./method":19,"./nodeback":20,"./nodeify.js":21,"./promise_array":23,"./promisify.js":24,"./props.js":25,"./race.js":27,"./reduce.js":28,"./settle.js":30,"./some.js":31,"./synchronous_inspection":32,"./thenables":33,"./timers.js":34,"./using.js":35,"./util":36}],23:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o){function s(t){switch(t){case-2:return[];case-3:return{};case-6:return new Map}}function a(t){var r=this._promise=new e(n);t instanceof e&&r._propagateFrom(t,3),r._setOnCancel(this),this._values=t,this._length=0,this._totalResolved=0,this._init(void 0,-2)}var c=t("./util");c.isArray;return c.inherits(a,o),a.prototype.length=function(){return this._length},a.prototype.promise=function(){return this._promise},a.prototype._init=function l(t,n){var o=r(this._values,this._promise);if(o instanceof e){o=o._target();var a=o._bitField;if(this._values=o,0===(50397184&a))return this._promise._setAsyncGuaranteed(),o._then(l,this._reject,void 0,this,n);if(0===(33554432&a))return 0!==(16777216&a)?this._reject(o._reason()):this._cancel();o=o._value()}if(o=c.asArray(o),null===o){var u=i("expecting an array or an iterable object but got "+c.classString(o)).reason();return void this._promise._rejectCallback(u,!1)}return 0===o.length?void(-5===n?this._resolveEmptyArray():this._resolve(s(n))):void this._iterate(o)},a.prototype._iterate=function(t){var n=this.getActualLength(t.length);this._length=n,this._values=this.shouldCopyValues()?new Array(n):this._values;for(var i=this._promise,o=!1,s=null,a=0;n>a;++a){var c=r(t[a],i);c instanceof e?(c=c._target(),s=c._bitField):s=null,o?null!==s&&c.suppressUnhandledRejections():null!==s?0===(50397184&s)?(c._proxy(this,a),this._values[a]=c):o=0!==(33554432&s)?this._promiseFulfilled(c._value(),a):0!==(16777216&s)?this._promiseRejected(c._reason(),a):this._promiseCancelled(a):o=this._promiseFulfilled(c,a)}o||i._setAsyncGuaranteed()},a.prototype._isResolved=function(){return null===this._values},a.prototype._resolve=function(t){this._values=null,this._promise._fulfill(t)},a.prototype._cancel=function(){!this._isResolved()&&this._promise._isCancellable()&&(this._values=null,this._promise._cancel())},a.prototype._reject=function(t){this._values=null,this._promise._rejectCallback(t,!1)},a.prototype._promiseFulfilled=function(t,e){this._values[e]=t;var n=++this._totalResolved;return n>=this._length?(this._resolve(this._values),!0):!1},a.prototype._promiseCancelled=function(){return this._cancel(),!0},a.prototype._promiseRejected=function(t){return this._totalResolved++,this._reject(t),!0},a.prototype._resultCancelled=function(){if(!this._isResolved()){var t=this._values;if(this._cancel(),t instanceof e)t.cancel();else for(var n=0;nc;c+=2){var u=s[c],p=s[c+1],_=u+e;if(r===k)t[_]=k(u,h,u,p,e,i);else{var d=r(p,function(){return k(u,h,u,p,e,i)});f.notEnumerableProp(d,"__isPromisified__",!0),t[_]=d}}return f.toFastProperties(t),t}function u(t,e,n){return k(t,e,void 0,t,null,n)}var p,h={},f=t("./util"),_=t("./nodeback"),d=f.withAppended,v=f.maybeWrapAsError,y=f.canEvaluate,m=t("./errors").TypeError,g="Async",b={__isPromisified__:!0},w=["arity","length","name","arguments","caller","callee","prototype","__isPromisified__"],C=new RegExp("^(?:"+w.join("|")+")$"),j=function(t){return f.isIdentifier(t)&&"_"!==t.charAt(0)&&"constructor"!==t},E=function(t){return t.replace(/([$])/,"\\$")},k=y?p:c;e.promisify=function(t,e){if("function"!=typeof t)throw new m("expecting a function but got "+f.classString(t));if(i(t))return t;e=Object(e);var n=void 0===e.context?h:e.context,o=!!e.multiArgs,s=u(t,n,o);return f.copyDescriptors(t,s,r),s},e.promisifyAll=function(t,e){if("function"!=typeof t&&"object"!=typeof t)throw new m("the target of promisifyAll must be an object or a function\n\n See http://goo.gl/MqrFmX\n");e=Object(e);var n=!!e.multiArgs,r=e.suffix;"string"!=typeof r&&(r=g);var i=e.filter;"function"!=typeof i&&(i=j);var o=e.promisifier;if("function"!=typeof o&&(o=k),!f.isIdentifier(r))throw new RangeError("suffix must be a valid identifier\n\n See http://goo.gl/MqrFmX\n");for(var s=f.inheritedDataKeys(t),a=0;ao;++o){var s=r[o];e[o]=t[s],e[o+i]=s}}this.constructor$(e),this._isMap=n,this._init$(void 0,n?-6:-3)}function s(t){var n,s=r(t);return l(s)?(n=s instanceof e?s._then(e.props,void 0,void 0,void 0,void 0):new o(s).promise(),s instanceof e&&n._propagateFrom(s,2),n):i("cannot await properties of a non-object\n\n See http://goo.gl/MqrFmX\n")}var a,c=t("./util"),l=c.isObject,u=t("./es5");"function"==typeof Map&&(a=Map);var p=function(){function t(t,r){this[e]=t,this[e+n]=r,e++}var e=0,n=0;return function(r){n=r.size,e=0;var i=new Array(2*r.size);return r.forEach(t,i),i}}(),h=function(t){for(var e=new a,n=t.length/2|0,r=0;n>r;++r){var i=t[n+r],o=t[r];e.set(i,o)}return e};c.inherits(o,n),o.prototype._init=function(){},o.prototype._promiseFulfilled=function(t,e){this._values[e]=t;var n=++this._totalResolved;if(n>=this._length){var r;if(this._isMap)r=h(this._values);else{r={};for(var i=this.length(),o=0,s=this.length();s>o;++o)r[this._values[o+i]]=this._values[o]}return this._resolve(r),!0}return!1},o.prototype.shouldCopyValues=function(){return!1},o.prototype.getActualLength=function(t){return t>>1},e.prototype.props=function(){return s(this)},e.props=function(t){return s(t)}}},{"./es5":13,"./util":36}],26:[function(t,e,n){"use strict";function r(t,e,n,r,i){for(var o=0;i>o;++o)n[o+r]=t[o+e],t[o+e]=void 0}function i(t){this._capacity=t,this._length=0,this._front=0}i.prototype._willBeOverCapacity=function(t){return this._capacityh;++h){var _=t[h];(void 0!==_||h in t)&&e.cast(_)._then(u,p,void 0,l,null)}return l}var s=t("./util"),a=function(t){return t.then(function(e){return o(e,t)})};e.race=function(t){return o(t,void 0)},e.prototype.race=function(){return o(this,void 0)}}},{"./util":36}],28:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o,s){function a(t,n,r,i){this.constructor$(t);var s=h();this._fn=null===s?n:f.domainBind(s,n),void 0!==r&&(r=e.resolve(r),r._attachCancellationCallback(this)),this._initialValue=r,this._currentCancellable=null,i===o?this._eachValues=Array(this._length):0===i?this._eachValues=null:this._eachValues=void 0,this._promise._captureStackTrace(),this._init$(void 0,-5)}function c(t,e){this.isFulfilled()?e._resolve(t):e._reject(t)}function l(t,e,n,i){if("function"!=typeof e)return r("expecting a function but got "+f.classString(e));var o=new a(t,e,n,i);return o.promise()}function u(t){this.accum=t,this.array._gotAccum(t);var n=i(this.value,this.array._promise);return n instanceof e?(this.array._currentCancellable=n,n._then(p,void 0,void 0,this,void 0)):p.call(this,n)}function p(t){var n=this.array,r=n._promise,i=_(n._fn);r._pushContext();var o;o=void 0!==n._eachValues?i.call(r._boundValue(),t,this.index,this.length):i.call(r._boundValue(),this.accum,t,this.index,this.length),o instanceof e&&(n._currentCancellable=o);var a=r._popContext();return s.checkForgottenReturns(o,a,void 0!==n._eachValues?"Promise.each":"Promise.reduce",r),o}var h=e._getDomain,f=t("./util"),_=f.tryCatch;f.inherits(a,n),a.prototype._gotAccum=function(t){void 0!==this._eachValues&&null!==this._eachValues&&t!==o&&this._eachValues.push(t)},a.prototype._eachComplete=function(t){return null!==this._eachValues&&this._eachValues.push(t),this._eachValues},a.prototype._init=function(){},a.prototype._resolveEmptyArray=function(){this._resolve(void 0!==this._eachValues?this._eachValues:this._initialValue)},a.prototype.shouldCopyValues=function(){return!1},a.prototype._resolve=function(t){this._promise._resolveCallback(t),this._values=null},a.prototype._resultCancelled=function(t){return t===this._initialValue?this._cancel():void(this._isResolved()||(this._resultCancelled$(),this._currentCancellable instanceof e&&this._currentCancellable.cancel(),this._initialValue instanceof e&&this._initialValue.cancel()))},a.prototype._iterate=function(t){this._values=t;var n,r,i=t.length;if(void 0!==this._initialValue?(n=this._initialValue,r=0):(n=e.resolve(t[0]),r=1),this._currentCancellable=n,!n.isRejected())for(;i>r;++r){var o={accum:null,value:t[r],index:r,length:i,array:this};n=n._then(u,void 0,void 0,o,void 0)}void 0!==this._eachValues&&(n=n._then(this._eachComplete,void 0,void 0,this,void 0)),n._then(c,c,void 0,n,this)},e.prototype.reduce=function(t,e){return l(this,t,e,null)},e.reduce=function(t,e,n,r){return l(t,e,n,r)}}},{"./util":36}],29:[function(t,e,n){"use strict";var r,i=t("./util"),o=function(){throw new Error("No async scheduler available\n\n See http://goo.gl/MqrFmX\n")},s=i.getNativePromise();if(i.isNode&&"undefined"==typeof MutationObserver){var a=global.setImmediate,c=process.nextTick;r=i.isRecentNode?function(t){a.call(global,t)}:function(t){c.call(process,t)}}else if("function"==typeof s&&"function"==typeof s.resolve){var l=s.resolve();r=function(t){l.then(t)}}else r="undefined"!=typeof MutationObserver&&("undefined"==typeof window||!window.navigator||!window.navigator.standalone&&!window.cordova)&&"classList"in document.documentElement?function(){var t=document.createElement("div"),e={attributes:!0},n=!1,r=document.createElement("div"),i=new MutationObserver(function(){t.classList.toggle("foo"),n=!1});i.observe(r,e);var o=function(){n||(n=!0,r.classList.toggle("foo"))};return function(n){var r=new MutationObserver(function(){r.disconnect(),n()});r.observe(t,e),o()}}():"undefined"!=typeof setImmediate?function(t){setImmediate(t)}:"undefined"!=typeof setTimeout?function(t){setTimeout(t,0)}:o;e.exports=r},{"./util":36 +}],30:[function(t,e,n){"use strict";e.exports=function(e,n,r){function i(t){this.constructor$(t)}var o=e.PromiseInspection,s=t("./util");s.inherits(i,n),i.prototype._promiseResolved=function(t,e){this._values[t]=e;var n=++this._totalResolved;return n>=this._length?(this._resolve(this._values),!0):!1},i.prototype._promiseFulfilled=function(t,e){var n=new o;return n._bitField=33554432,n._settledValueField=t,this._promiseResolved(e,n)},i.prototype._promiseRejected=function(t,e){var n=new o;return n._bitField=16777216,n._settledValueField=t,this._promiseResolved(e,n)},e.settle=function(t){return r.deprecated(".settle()",".reflect()"),new i(t).promise()},e.prototype.settle=function(){return e.settle(this)}}},{"./util":36}],31:[function(t,e,n){"use strict";e.exports=function(e,n,r){function i(t){this.constructor$(t),this._howMany=0,this._unwrap=!1,this._initialized=!1}function o(t,e){if((0|e)!==e||0>e)return r("expecting a positive integer\n\n See http://goo.gl/MqrFmX\n");var n=new i(t),o=n.promise();return n.setHowMany(e),n.init(),o}var s=t("./util"),a=t("./errors").RangeError,c=t("./errors").AggregateError,l=s.isArray,u={};s.inherits(i,n),i.prototype._init=function(){if(this._initialized){if(0===this._howMany)return void this._resolve([]);this._init$(void 0,-5);var t=l(this._values);!this._isResolved()&&t&&this._howMany>this._canPossiblyFulfill()&&this._reject(this._getRangeError(this.length()))}},i.prototype.init=function(){this._initialized=!0,this._init()},i.prototype.setUnwrap=function(){this._unwrap=!0},i.prototype.howMany=function(){return this._howMany},i.prototype.setHowMany=function(t){this._howMany=t},i.prototype._promiseFulfilled=function(t){return this._addFulfilled(t),this._fulfilled()===this.howMany()?(this._values.length=this.howMany(),1===this.howMany()&&this._unwrap?this._resolve(this._values[0]):this._resolve(this._values),!0):!1},i.prototype._promiseRejected=function(t){return this._addRejected(t),this._checkOutcome()},i.prototype._promiseCancelled=function(){return this._values instanceof e||null==this._values?this._cancel():(this._addRejected(u),this._checkOutcome())},i.prototype._checkOutcome=function(){if(this.howMany()>this._canPossiblyFulfill()){for(var t=new c,e=this.length();e0?this._reject(t):this._cancel(),!0}return!1},i.prototype._fulfilled=function(){return this._totalResolved},i.prototype._rejected=function(){return this._values.length-this.length()},i.prototype._addRejected=function(t){this._values.push(t)},i.prototype._addFulfilled=function(t){this._values[this._totalResolved++]=t},i.prototype._canPossiblyFulfill=function(){return this.length()-this._rejected()},i.prototype._getRangeError=function(t){var e="Input array must contain at least "+this._howMany+" items but contains only "+t+" items";return new a(e)},i.prototype._resolveEmptyArray=function(){this._reject(this._getRangeError(0))},e.some=function(t,e){return o(t,e)},e.prototype.some=function(t){return o(this,t)},e._SomePromiseArray=i}},{"./errors":12,"./util":36}],32:[function(t,e,n){"use strict";e.exports=function(t){function e(t){void 0!==t?(t=t._target(),this._bitField=t._bitField,this._settledValueField=t._isFateSealed()?t._settledValue():void 0):(this._bitField=0,this._settledValueField=void 0)}e.prototype._settledValue=function(){return this._settledValueField};var n=e.prototype.value=function(){if(!this.isFulfilled())throw new TypeError("cannot get fulfillment value of a non-fulfilled promise\n\n See http://goo.gl/MqrFmX\n");return this._settledValue()},r=e.prototype.error=e.prototype.reason=function(){if(!this.isRejected())throw new TypeError("cannot get rejection reason of a non-rejected promise\n\n See http://goo.gl/MqrFmX\n");return this._settledValue()},i=e.prototype.isFulfilled=function(){return 0!==(33554432&this._bitField)},o=e.prototype.isRejected=function(){return 0!==(16777216&this._bitField)},s=e.prototype.isPending=function(){return 0===(50397184&this._bitField)},a=e.prototype.isResolved=function(){return 0!==(50331648&this._bitField)};e.prototype.isCancelled=function(){return 0!==(8454144&this._bitField)},t.prototype.__isCancelled=function(){return 65536===(65536&this._bitField)},t.prototype._isCancelled=function(){return this._target().__isCancelled()},t.prototype.isCancelled=function(){return 0!==(8454144&this._target()._bitField)},t.prototype.isPending=function(){return s.call(this._target())},t.prototype.isRejected=function(){return o.call(this._target())},t.prototype.isFulfilled=function(){return i.call(this._target())},t.prototype.isResolved=function(){return a.call(this._target())},t.prototype.value=function(){return n.call(this._target())},t.prototype.reason=function(){var t=this._target();return t._unsetRejectionIsUnhandled(),r.call(t)},t.prototype._value=function(){return this._settledValue()},t.prototype._reason=function(){return this._unsetRejectionIsUnhandled(),this._settledValue()},t.PromiseInspection=e}},{}],33:[function(t,e,n){"use strict";e.exports=function(e,n){function r(t,r){if(u(t)){if(t instanceof e)return t;var i=o(t);if(i===l){r&&r._pushContext();var c=e.reject(i.e);return r&&r._popContext(),c}if("function"==typeof i){if(s(t)){var c=new e(n);return t._then(c._fulfill,c._reject,void 0,c,null),c}return a(t,i,r)}}return t}function i(t){return t.then}function o(t){try{return i(t)}catch(e){return l.e=e,l}}function s(t){try{return p.call(t,"_promise0")}catch(e){return!1}}function a(t,r,i){function o(t){a&&(a._resolveCallback(t),a=null)}function s(t){a&&(a._rejectCallback(t,p,!0),a=null)}var a=new e(n),u=a;i&&i._pushContext(),a._captureStackTrace(),i&&i._popContext();var p=!0,h=c.tryCatch(r).call(t,o,s);return p=!1,a&&h===l&&(a._rejectCallback(h.e,!0,!0),a=null),u}var c=t("./util"),l=c.errorObj,u=c.isObject,p={}.hasOwnProperty;return r}},{"./util":36}],34:[function(t,e,n){"use strict";e.exports=function(e,n,r){function i(t){this.handle=t}function o(t){return clearTimeout(this.handle),t}function s(t){throw clearTimeout(this.handle),t}var a=t("./util"),c=e.TimeoutError;i.prototype._resultCancelled=function(){clearTimeout(this.handle)};var l=function(t){return u(+this).thenReturn(t)},u=e.delay=function(t,o){var s,a;return void 0!==o?(s=e.resolve(o)._then(l,null,null,t,void 0),r.cancellation()&&o instanceof e&&s._setOnCancel(o)):(s=new e(n),a=setTimeout(function(){s._fulfill()},+t),r.cancellation()&&s._setOnCancel(new i(a)),s._captureStackTrace()),s._setAsyncGuaranteed(),s};e.prototype.delay=function(t){return u(t,this)};var p=function(t,e,n){var r;r="string"!=typeof e?e instanceof Error?e:new c("operation timed out"):new c(e),a.markAsOriginatingFromRejection(r),t._attachExtraTrace(r),t._reject(r),null!=n&&n.cancel()};e.prototype.timeout=function(t,e){t=+t;var n,a,c=new i(setTimeout(function(){n.isPending()&&p(n,e,a)},t));return r.cancellation()?(a=this.then(),n=a._then(o,s,void 0,c,void 0),n._setOnCancel(c)):n=this._then(o,s,void 0,c,void 0),n}}},{"./util":36}],35:[function(t,e,n){"use strict";e.exports=function(e,n,r,i,o,s){function a(t){setTimeout(function(){throw t},0)}function c(t){var e=r(t);return e!==t&&"function"==typeof t._isDisposable&&"function"==typeof t._getDisposer&&t._isDisposable()&&e._setDisposable(t._getDisposer()),e}function l(t,n){function i(){if(s>=l)return u._fulfill();var o=c(t[s++]);if(o instanceof e&&o._isDisposable()){try{o=r(o._getDisposer().tryDispose(n),t.promise)}catch(p){return a(p)}if(o instanceof e)return o._then(i,a,null,null,null)}i()}var s=0,l=t.length,u=new e(o);return i(),u}function u(t,e,n){this._data=t,this._promise=e,this._context=n}function p(t,e,n){this.constructor$(t,e,n)}function h(t){return u.isDisposer(t)?(this.resources[this.index]._setDisposable(t),t.promise()):t}function f(t){this.length=t,this.promise=null,this[t-1]=null}var _=t("./util"),d=t("./errors").TypeError,v=t("./util").inherits,y=_.errorObj,m=_.tryCatch,g={};u.prototype.data=function(){return this._data},u.prototype.promise=function(){return this._promise},u.prototype.resource=function(){return this.promise().isFulfilled()?this.promise().value():g},u.prototype.tryDispose=function(t){var e=this.resource(),n=this._context;void 0!==n&&n._pushContext();var r=e!==g?this.doDispose(e,t):null;return void 0!==n&&n._popContext(),this._promise._unsetDisposable(),this._data=null,r},u.isDisposer=function(t){return null!=t&&"function"==typeof t.resource&&"function"==typeof t.tryDispose},v(p,u),p.prototype.doDispose=function(t,e){var n=this.data();return n.call(t,t,e)},f.prototype._resultCancelled=function(){for(var t=this.length,n=0;t>n;++n){var r=this[n];r instanceof e&&r.cancel()}},e.using=function(){var t=arguments.length;if(2>t)return n("you must pass at least 2 arguments to Promise.using");var i=arguments[t-1];if("function"!=typeof i)return n("expecting a function but got "+_.classString(i));var o,a=!0;2===t&&Array.isArray(arguments[0])?(o=arguments[0],t=o.length,a=!1):(o=arguments,t--);for(var c=new f(t),p=0;t>p;++p){var d=o[p];if(u.isDisposer(d)){var v=d;d=d.promise(),d._setDisposable(v)}else{var g=r(d);g instanceof e&&(d=g._then(h,null,null,{resources:c,index:p},void 0))}c[p]=d}for(var b=new Array(c.length),p=0;p0},e.prototype._getDisposer=function(){return this._disposer},e.prototype._unsetDisposable=function(){this._bitField=-131073&this._bitField,this._disposer=void 0},e.prototype.disposer=function(t){if("function"==typeof t)return new p(t,this,i());throw new d}}},{"./errors":12,"./util":36}],36:[function(t,e,n){"use strict";function r(){try{var t=P;return P=null,t.apply(this,arguments)}catch(e){return x.e=e,x}}function i(t){return P=t,r}function o(t){return null==t||t===!0||t===!1||"string"==typeof t||"number"==typeof t}function s(t){return"function"==typeof t||"object"==typeof t&&null!==t}function a(t){return o(t)?new Error(v(t)):t}function c(t,e){var n,r=t.length,i=new Array(r+1);for(n=0;r>n;++n)i[n]=t[n];return i[n]=e,i}function l(t,e,n){if(!F.isES5)return{}.hasOwnProperty.call(t,e)?t[e]:void 0;var r=Object.getOwnPropertyDescriptor(t,e);return null!=r?null==r.get&&null==r.set?r.value:n:void 0}function u(t,e,n){if(o(t))return t;var r={value:n,configurable:!0,enumerable:!1,writable:!0};return F.defineProperty(t,e,r),t}function p(t){throw t}function h(t){try{if("function"==typeof t){var e=F.names(t.prototype),n=F.isES5&&e.length>1,r=e.length>0&&!(1===e.length&&"constructor"===e[0]),i=A.test(t+"")&&F.names(t).length>0;if(n||r||i)return!0}return!1}catch(o){return!1}}function f(t){function e(){}function n(){return typeof r.foo}e.prototype=t;var r=new e;return n(),n(),t}function _(t){return D.test(t)}function d(t,e,n){for(var r=new Array(t),i=0;t>i;++i)r[i]=e+i+n;return r}function v(t){try{return t+""}catch(e){return"[no string representation]"}}function y(t){return t instanceof Error||null!==t&&"object"==typeof t&&"string"==typeof t.message&&"string"==typeof t.name}function m(t){try{u(t,"isOperational",!0)}catch(e){}}function g(t){return null==t?!1:t instanceof Error.__BluebirdErrorTypes__.OperationalError||t.isOperational===!0}function b(t){return y(t)&&F.propertyIsWritable(t,"stack")}function w(t){return{}.toString.call(t)}function C(t,e,n){for(var r=F.names(t),i=0;i10||t[0]>0}(),B.isNode&&B.toFastProperties(process);try{throw new Error}catch(U){B.lastLineError=U}e.exports=B},{"./es5":13}]},{},[4])(4)}),"undefined"!=typeof window&&null!==window?window.P=window.Promise:"undefined"!=typeof self&&null!==self&&(self.P=self.Promise); \ No newline at end of file diff --git a/node_modules/bluebird/js/release/debuggability.js b/node_modules/bluebird/js/release/debuggability.js index 213d4ac6d27e4..9a64c99c90b34 100644 --- a/node_modules/bluebird/js/release/debuggability.js +++ b/node_modules/bluebird/js/release/debuggability.js @@ -689,8 +689,8 @@ function parseLineInfo(line) { function setBounds(firstLineError, lastLineError) { if (!longStackTracesIsSupported()) return; - var firstStackLines = firstLineError.stack.split("\n"); - var lastStackLines = lastLineError.stack.split("\n"); + var firstStackLines = (firstLineError.stack || "").split("\n"); + var lastStackLines = (lastLineError.stack || "").split("\n"); var firstIndex = -1; var lastIndex = -1; var firstFileName; diff --git a/node_modules/bluebird/js/release/promise.js b/node_modules/bluebird/js/release/promise.js index f28d2f01f3023..d80b44da43813 100644 --- a/node_modules/bluebird/js/release/promise.js +++ b/node_modules/bluebird/js/release/promise.js @@ -101,6 +101,11 @@ Promise.prototype.caught = Promise.prototype["catch"] = function (fn) { } catchInstances.length = j; fn = arguments[i]; + + if (typeof fn !== "function") { + throw new TypeError("The last argument to .catch() " + + "must be a function, got " + util.toString(fn)); + } return this.then(undefined, catchFilter(catchInstances, fn, this)); } return this.then(undefined, fn); @@ -708,6 +713,14 @@ Promise.prototype._settledValue = function() { } }; +if (typeof Symbol !== "undefined" && Symbol.toStringTag) { + es5.defineProperty(Promise.prototype, Symbol.toStringTag, { + get: function () { + return "Object"; + } + }); +} + function deferResolve(v) {this.promise._resolveCallback(v);} function deferReject(v) {this.promise._rejectCallback(v, false);} @@ -734,12 +747,10 @@ require("./synchronous_inspection")(Promise); require("./join")( Promise, PromiseArray, tryConvertToPromise, INTERNAL, async, getDomain); Promise.Promise = Promise; -Promise.version = "3.5.3"; -require('./map.js')(Promise, PromiseArray, apiRejection, tryConvertToPromise, INTERNAL, debug); +Promise.version = "3.5.5"; require('./call_get.js')(Promise); -require('./using.js')(Promise, apiRejection, tryConvertToPromise, createContext, INTERNAL, debug); -require('./timers.js')(Promise, INTERNAL, debug); require('./generators.js')(Promise, apiRejection, INTERNAL, tryConvertToPromise, Proxyable, debug); +require('./map.js')(Promise, PromiseArray, apiRejection, tryConvertToPromise, INTERNAL, debug); require('./nodeify.js')(Promise); require('./promisify.js')(Promise, INTERNAL); require('./props.js')(Promise, PromiseArray, tryConvertToPromise, apiRejection); @@ -747,9 +758,11 @@ require('./race.js')(Promise, INTERNAL, tryConvertToPromise, apiRejection); require('./reduce.js')(Promise, PromiseArray, apiRejection, tryConvertToPromise, INTERNAL, debug); require('./settle.js')(Promise, PromiseArray, debug); require('./some.js')(Promise, PromiseArray, apiRejection); -require('./filter.js')(Promise, INTERNAL); -require('./each.js')(Promise, INTERNAL); +require('./timers.js')(Promise, INTERNAL, debug); +require('./using.js')(Promise, apiRejection, tryConvertToPromise, createContext, INTERNAL, debug); require('./any.js')(Promise); +require('./each.js')(Promise, INTERNAL); +require('./filter.js')(Promise, INTERNAL); util.toFastProperties(Promise); util.toFastProperties(Promise.prototype); diff --git a/node_modules/bluebird/js/release/schedule.js b/node_modules/bluebird/js/release/schedule.js index f70df9fc123b8..15197d1433cc8 100644 --- a/node_modules/bluebird/js/release/schedule.js +++ b/node_modules/bluebird/js/release/schedule.js @@ -20,7 +20,8 @@ if (util.isNode && typeof MutationObserver === "undefined") { } else if ((typeof MutationObserver !== "undefined") && !(typeof window !== "undefined" && window.navigator && - (window.navigator.standalone || window.cordova))) { + (window.navigator.standalone || window.cordova)) && + ("classList" in document.documentElement)) { schedule = (function() { var div = document.createElement("div"); var opts = {attributes: true}; diff --git a/node_modules/bluebird/js/release/util.js b/node_modules/bluebird/js/release/util.js index c5617ee870d50..74a24fa6c393d 100644 --- a/node_modules/bluebird/js/release/util.js +++ b/node_modules/bluebird/js/release/util.js @@ -374,7 +374,12 @@ var ret = { domainBind: domainBind }; ret.isRecentNode = ret.isNode && (function() { - var version = process.versions.node.split(".").map(Number); + var version; + if (process.versions && process.versions.node) { + version = process.versions.node.split(".").map(Number); + } else if (process.version) { + version = process.version.split(".").map(Number); + } return (version[0] === 0 && version[1] > 10) || (version[0] > 0); })(); diff --git a/node_modules/bluebird/package.json b/node_modules/bluebird/package.json index 305d330978aad..fb1c7c821c873 100644 --- a/node_modules/bluebird/package.json +++ b/node_modules/bluebird/package.json @@ -1,19 +1,19 @@ { - "_from": "bluebird@3.5.3", - "_id": "bluebird@3.5.3", + "_from": "bluebird@^3.5.3", + "_id": "bluebird@3.5.5", "_inBundle": false, - "_integrity": "sha512-/qKPUQlaW1OyR51WeCPBvRnAlnZFUJkCSG5HzGnuIqhgyJtF+T94lFnn33eiazjRm2LAHVy2guNnaq48X9SJuw==", + "_integrity": "sha512-5am6HnnfN+urzt4yfg7IgTbotDjIT/u8AJpEt0sIU9FtXfVeezXAPKswrG+xKUCOYAINpSdgZVDU6QFh+cuH3w==", "_location": "/bluebird", "_phantomChildren": {}, "_requested": { - "type": "version", + "type": "range", "registry": true, - "raw": "bluebird@3.5.3", + "raw": "bluebird@^3.5.3", "name": "bluebird", "escapedName": "bluebird", - "rawSpec": "3.5.3", + "rawSpec": "^3.5.3", "saveSpec": null, - "fetchSpec": "3.5.3" + "fetchSpec": "^3.5.3" }, "_requiredBy": [ "#USER", @@ -21,16 +21,14 @@ "/bin-links", "/cacache", "/libcipm", - "/libnpmhook/npm-registry-fetch", + "/libnpm", "/npm-registry-fetch", - "/npm-registry-fetch/cacache", - "/pacote", - "/tap" + "/pacote" ], - "_resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.3.tgz", - "_shasum": "7d01c6f9616c9a51ab0f8c549a79dfe6ec33efa7", - "_spec": "bluebird@3.5.3", - "_where": "/Users/zkat/Documents/code/work/npm", + "_resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.5.tgz", + "_shasum": "a8d0afd73251effbbd5fe384a77d73003c17a71f", + "_spec": "bluebird@^3.5.3", + "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Petka Antonov", "email": "petka_antonov@hotmail.com", @@ -106,6 +104,6 @@ "prepublish": "npm run generate-browser-core && npm run generate-browser-full", "test": "node --expose-gc tools/test.js" }, - "version": "3.5.3", + "version": "3.5.5", "webpack": "./js/release/bluebird.js" } diff --git a/node_modules/byte-size/README.hbs b/node_modules/byte-size/README.hbs index ebf5101b37002..8a57e4a9c784a 100644 --- a/node_modules/byte-size/README.hbs +++ b/node_modules/byte-size/README.hbs @@ -7,6 +7,34 @@ {{>main}} +### Load anywhere + +This library is compatible with Node.js, the Web and any style of module loader. It can be loaded anywhere, natively without transpilation. + +Node.js: + +```js +const byteSize = require('byte-size') +``` + +Within Node.js with ECMAScript Module support enabled: + +```js +import byteSize from 'byte-size' +``` + +Within a modern browser ECMAScript Module: + +```js +import byteSize from './node_modules/byte-size/index.mjs' +``` + +Old browser (adds `window.byteSize`): + +```html + +``` + * * * © 2014-18 Lloyd Brookes \<75pound@gmail.com\>. Documented by [jsdoc-to-markdown](https://github.com/jsdoc2md/jsdoc-to-markdown). diff --git a/node_modules/byte-size/README.md b/node_modules/byte-size/README.md index fdd605d580867..9a36daaf52f48 100644 --- a/node_modules/byte-size/README.md +++ b/node_modules/byte-size/README.md @@ -8,7 +8,7 @@ ## byte-size -Convert a bytes value to a more human-readable format. Choose between [metric or IEC units](https://en.wikipedia.org/wiki/Gigabyte), summarised below. +An isomorphic, load-anywhere function to convert a bytes value into a more human-readable format. Choose between [metric or IEC units](https://en.wikipedia.org/wiki/Gigabyte), summarised below. Value | Metric ----- | ------------- @@ -100,6 +100,34 @@ const byteSize = require('byte-size') '1.5 Kio' ``` +### Load anywhere + +This library is compatible with Node.js, the Web and any style of module loader. It can be loaded anywhere, natively without transpilation. + +Node.js: + +```js +const byteSize = require('byte-size') +``` + +Within Node.js with ECMAScript Module support enabled: + +```js +import byteSize from 'byte-size' +``` + +Within a modern browser ECMAScript Module: + +```js +import byteSize from './node_modules/byte-size/index.mjs' +``` + +Old browser (adds `window.byteSize`): + +```html + +``` + * * * © 2014-18 Lloyd Brookes \<75pound@gmail.com\>. Documented by [jsdoc-to-markdown](https://github.com/jsdoc2md/jsdoc-to-markdown). diff --git a/node_modules/byte-size/dist/index.js b/node_modules/byte-size/dist/index.js new file mode 100644 index 0000000000000..8253a63545b3a --- /dev/null +++ b/node_modules/byte-size/dist/index.js @@ -0,0 +1,152 @@ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : + typeof define === 'function' && define.amd ? define(factory) : + (global = global || self, global.byteSize = factory()); +}(this, function () { 'use strict'; + + /** + * An isomorphic, load-anywhere function to convert a bytes value into a more human-readable format. Choose between [metric or IEC units](https://en.wikipedia.org/wiki/Gigabyte), summarised below. + * + * Value | Metric + * ----- | ------------- + * 1000 | kB kilobyte + * 1000^2 | MB megabyte + * 1000^3 | GB gigabyte + * 1000^4 | TB terabyte + * 1000^5 | PB petabyte + * 1000^6 | EB exabyte + * 1000^7 | ZB zettabyte + * 1000^8 | YB yottabyte + * + * Value | IEC + * ----- | ------------ + * 1024 | KiB kibibyte + * 1024^2 | MiB mebibyte + * 1024^3 | GiB gibibyte + * 1024^4 | TiB tebibyte + * 1024^5 | PiB pebibyte + * 1024^6 | EiB exbibyte + * 1024^7 | ZiB zebibyte + * 1024^8 | YiB yobibyte + * + * Value | Metric (octet) + * ----- | ------------- + * 1000 | ko kilooctet + * 1000^2 | Mo megaoctet + * 1000^3 | Go gigaoctet + * 1000^4 | To teraoctet + * 1000^5 | Po petaoctet + * 1000^6 | Eo exaoctet + * 1000^7 | Zo zettaoctet + * 1000^8 | Yo yottaoctet + * + * Value | IEC (octet) + * ----- | ------------ + * 1024 | Kio kilooctet + * 1024^2 | Mio mebioctet + * 1024^3 | Gio gibioctet + * 1024^4 | Tio tebioctet + * 1024^5 | Pio pebioctet + * 1024^6 | Eio exbioctet + * 1024^7 | Zio zebioctet + * 1024^8 | Yio yobioctet + * + * @module byte-size + * @example + * ```js + * const byteSize = require('byte-size') + * ``` + */ + + class ByteSize { + constructor (bytes, options) { + options = options || {}; + options.units = options.units || 'metric'; + options.precision = typeof options.precision === 'undefined' ? 1 : options.precision; + + const table = [ + { expFrom: 0, expTo: 1, metric: 'B', iec: 'B', metric_octet: 'o', iec_octet: 'o' }, + { expFrom: 1, expTo: 2, metric: 'kB', iec: 'KiB', metric_octet: 'ko', iec_octet: 'Kio' }, + { expFrom: 2, expTo: 3, metric: 'MB', iec: 'MiB', metric_octet: 'Mo', iec_octet: 'Mio' }, + { expFrom: 3, expTo: 4, metric: 'GB', iec: 'GiB', metric_octet: 'Go', iec_octet: 'Gio' }, + { expFrom: 4, expTo: 5, metric: 'TB', iec: 'TiB', metric_octet: 'To', iec_octet: 'Tio' }, + { expFrom: 5, expTo: 6, metric: 'PB', iec: 'PiB', metric_octet: 'Po', iec_octet: 'Pio' }, + { expFrom: 6, expTo: 7, metric: 'EB', iec: 'EiB', metric_octet: 'Eo', iec_octet: 'Eio' }, + { expFrom: 7, expTo: 8, metric: 'ZB', iec: 'ZiB', metric_octet: 'Zo', iec_octet: 'Zio' }, + { expFrom: 8, expTo: 9, metric: 'YB', iec: 'YiB', metric_octet: 'Yo', iec_octet: 'Yio' } + ]; + + const base = options.units === 'metric' || options.units === 'metric_octet' ? 1000 : 1024; + const prefix = bytes < 0 ? '-' : ''; + bytes = Math.abs(bytes); + + for (let i = 0; i < table.length; i++) { + const lower = Math.pow(base, table[i].expFrom); + const upper = Math.pow(base, table[i].expTo); + if (bytes >= lower && bytes < upper) { + const units = table[i][options.units]; + if (i === 0) { + this.value = prefix + bytes; + this.unit = units; + return + } else { + this.value = prefix + (bytes / lower).toFixed(options.precision); + this.unit = units; + return + } + } + } + + this.value = prefix + bytes; + this.unit = ''; + } + + toString () { + return `${this.value} ${this.unit}`.trim() + } + } + + /** + * @param {number} - the bytes value to convert. + * @param [options] {object} - optional config. + * @param [options.precision=1] {number} - number of decimal places. + * @param [options.units=metric] {string} - select `'metric'`, `'iec'`, `'metric_octet'` or `'iec_octet'` units. + * @returns {{ value: string, unit: string}} + * @alias module:byte-size + * @example + * ```js + * > const byteSize = require('byte-size') + * + * > byteSize(1580) + * { value: '1.6', unit: 'kB' } + * + * > byteSize(1580, { units: 'iec' }) + * { value: '1.5', unit: 'KiB' } + * + * > byteSize(1580, { units: 'iec', precision: 3 }) + * { value: '1.543', unit: 'KiB' } + * + * > byteSize(1580, { units: 'iec', precision: 0 }) + * { value: '2', unit: 'KiB' } + * + * > byteSize(1580, { units: 'metric_octet' }) + * { value: '1.6', unit: 'ko' } + * + * > byteSize(1580, { units: 'iec_octet' }) + * { value: '1.5', unit: 'Kio' } + * + * > byteSize(1580, { units: 'iec_octet' }).toString() + * '1.5 Kio' + * + * > const { value, unit } = byteSize(1580, { units: 'iec_octet' }) + * > `${value} ${unit}` + * '1.5 Kio' + * ``` + */ + function byteSize (bytes, options) { + return new ByteSize(bytes, options) + } + + return byteSize; + +})); diff --git a/node_modules/byte-size/index.js b/node_modules/byte-size/index.js deleted file mode 100644 index ec1a0293892e6..0000000000000 --- a/node_modules/byte-size/index.js +++ /dev/null @@ -1,143 +0,0 @@ -'use strict' - -/** - * Convert a bytes value to a more human-readable format. Choose between [metric or IEC units](https://en.wikipedia.org/wiki/Gigabyte), summarised below. - * - * Value | Metric - * ----- | ------------- - * 1000 | kB kilobyte - * 1000^2 | MB megabyte - * 1000^3 | GB gigabyte - * 1000^4 | TB terabyte - * 1000^5 | PB petabyte - * 1000^6 | EB exabyte - * 1000^7 | ZB zettabyte - * 1000^8 | YB yottabyte - * - * Value | IEC - * ----- | ------------ - * 1024 | KiB kibibyte - * 1024^2 | MiB mebibyte - * 1024^3 | GiB gibibyte - * 1024^4 | TiB tebibyte - * 1024^5 | PiB pebibyte - * 1024^6 | EiB exbibyte - * 1024^7 | ZiB zebibyte - * 1024^8 | YiB yobibyte - * - * Value | Metric (octet) - * ----- | ------------- - * 1000 | ko kilooctet - * 1000^2 | Mo megaoctet - * 1000^3 | Go gigaoctet - * 1000^4 | To teraoctet - * 1000^5 | Po petaoctet - * 1000^6 | Eo exaoctet - * 1000^7 | Zo zettaoctet - * 1000^8 | Yo yottaoctet - * - * Value | IEC (octet) - * ----- | ------------ - * 1024 | Kio kilooctet - * 1024^2 | Mio mebioctet - * 1024^3 | Gio gibioctet - * 1024^4 | Tio tebioctet - * 1024^5 | Pio pebioctet - * 1024^6 | Eio exbioctet - * 1024^7 | Zio zebioctet - * 1024^8 | Yio yobioctet - * - * @module byte-size - * @example - * ```js - * const byteSize = require('byte-size') - * ``` - */ -module.exports = byteSize - -class ByteSize { - constructor (bytes, options) { - options = options || {} - options.units = options.units || 'metric' - options.precision = typeof options.precision === 'undefined' ? 1 : options.precision - - const table = [ - { expFrom: 0, expTo: 1, metric: 'B', iec: 'B', metric_octet: 'o', iec_octet: 'o' }, - { expFrom: 1, expTo: 2, metric: 'kB', iec: 'KiB', metric_octet: 'ko', iec_octet: 'Kio' }, - { expFrom: 2, expTo: 3, metric: 'MB', iec: 'MiB', metric_octet: 'Mo', iec_octet: 'Mio' }, - { expFrom: 3, expTo: 4, metric: 'GB', iec: 'GiB', metric_octet: 'Go', iec_octet: 'Gio' }, - { expFrom: 4, expTo: 5, metric: 'TB', iec: 'TiB', metric_octet: 'To', iec_octet: 'Tio' }, - { expFrom: 5, expTo: 6, metric: 'PB', iec: 'PiB', metric_octet: 'Po', iec_octet: 'Pio' }, - { expFrom: 6, expTo: 7, metric: 'EB', iec: 'EiB', metric_octet: 'Eo', iec_octet: 'Eio' }, - { expFrom: 7, expTo: 8, metric: 'ZB', iec: 'ZiB', metric_octet: 'Zo', iec_octet: 'Zio' }, - { expFrom: 8, expTo: 9, metric: 'YB', iec: 'YiB', metric_octet: 'Yo', iec_octet: 'Yio' } - ] - - const base = options.units === 'metric' || options.units === 'metric_octet' ? 1000 : 1024 - - for (let i = 0; i < table.length; i++) { - const lower = Math.pow(base, table[i].expFrom) - const upper = Math.pow(base, table[i].expTo) - if (bytes >= lower && bytes < upper) { - const units = table[i][options.units] - if (i === 0) { - this.value = String(bytes) - this.unit = units - return - } else { - this.value = (bytes / lower).toFixed(options.precision) - this.unit = units - return - } - } - } - - this.value = String(bytes) - this.unit = '' - } - - toString () { - return `${this.value} ${this.unit}`.trim() - } -} - -/** - * @param {number} - the bytes value to convert. - * @param [options] {object} - optional config. - * @param [options.precision=1] {number} - number of decimal places. - * @param [options.units=metric] {string} - select `'metric'`, `'iec'`, `'metric_octet'` or `'iec_octet'` units. - * @returns {{ value: string, unit: string}} - * @alias module:byte-size - * @example - * ```js - * > const byteSize = require('byte-size') - * - * > byteSize(1580) - * { value: '1.6', unit: 'kB' } - * - * > byteSize(1580, { units: 'iec' }) - * { value: '1.5', unit: 'KiB' } - * - * > byteSize(1580, { units: 'iec', precision: 3 }) - * { value: '1.543', unit: 'KiB' } - * - * > byteSize(1580, { units: 'iec', precision: 0 }) - * { value: '2', unit: 'KiB' } - * - * > byteSize(1580, { units: 'metric_octet' }) - * { value: '1.6', unit: 'ko' } - * - * > byteSize(1580, { units: 'iec_octet' }) - * { value: '1.5', unit: 'Kio' } - * - * > byteSize(1580, { units: 'iec_octet' }).toString() - * '1.5 Kio' - * - * > const { value, unit } = byteSize(1580, { units: 'iec_octet' }) - * > `${value} ${unit}` - * '1.5 Kio' - * ``` - */ -function byteSize (bytes, options) { - return new ByteSize(bytes, options) -} diff --git a/node_modules/byte-size/index.mjs b/node_modules/byte-size/index.mjs new file mode 100644 index 0000000000000..2de3e205b087c --- /dev/null +++ b/node_modules/byte-size/index.mjs @@ -0,0 +1,144 @@ +/** + * An isomorphic, load-anywhere function to convert a bytes value into a more human-readable format. Choose between [metric or IEC units](https://en.wikipedia.org/wiki/Gigabyte), summarised below. + * + * Value | Metric + * ----- | ------------- + * 1000 | kB kilobyte + * 1000^2 | MB megabyte + * 1000^3 | GB gigabyte + * 1000^4 | TB terabyte + * 1000^5 | PB petabyte + * 1000^6 | EB exabyte + * 1000^7 | ZB zettabyte + * 1000^8 | YB yottabyte + * + * Value | IEC + * ----- | ------------ + * 1024 | KiB kibibyte + * 1024^2 | MiB mebibyte + * 1024^3 | GiB gibibyte + * 1024^4 | TiB tebibyte + * 1024^5 | PiB pebibyte + * 1024^6 | EiB exbibyte + * 1024^7 | ZiB zebibyte + * 1024^8 | YiB yobibyte + * + * Value | Metric (octet) + * ----- | ------------- + * 1000 | ko kilooctet + * 1000^2 | Mo megaoctet + * 1000^3 | Go gigaoctet + * 1000^4 | To teraoctet + * 1000^5 | Po petaoctet + * 1000^6 | Eo exaoctet + * 1000^7 | Zo zettaoctet + * 1000^8 | Yo yottaoctet + * + * Value | IEC (octet) + * ----- | ------------ + * 1024 | Kio kilooctet + * 1024^2 | Mio mebioctet + * 1024^3 | Gio gibioctet + * 1024^4 | Tio tebioctet + * 1024^5 | Pio pebioctet + * 1024^6 | Eio exbioctet + * 1024^7 | Zio zebioctet + * 1024^8 | Yio yobioctet + * + * @module byte-size + * @example + * ```js + * const byteSize = require('byte-size') + * ``` + */ + +class ByteSize { + constructor (bytes, options) { + options = options || {} + options.units = options.units || 'metric' + options.precision = typeof options.precision === 'undefined' ? 1 : options.precision + + const table = [ + { expFrom: 0, expTo: 1, metric: 'B', iec: 'B', metric_octet: 'o', iec_octet: 'o' }, + { expFrom: 1, expTo: 2, metric: 'kB', iec: 'KiB', metric_octet: 'ko', iec_octet: 'Kio' }, + { expFrom: 2, expTo: 3, metric: 'MB', iec: 'MiB', metric_octet: 'Mo', iec_octet: 'Mio' }, + { expFrom: 3, expTo: 4, metric: 'GB', iec: 'GiB', metric_octet: 'Go', iec_octet: 'Gio' }, + { expFrom: 4, expTo: 5, metric: 'TB', iec: 'TiB', metric_octet: 'To', iec_octet: 'Tio' }, + { expFrom: 5, expTo: 6, metric: 'PB', iec: 'PiB', metric_octet: 'Po', iec_octet: 'Pio' }, + { expFrom: 6, expTo: 7, metric: 'EB', iec: 'EiB', metric_octet: 'Eo', iec_octet: 'Eio' }, + { expFrom: 7, expTo: 8, metric: 'ZB', iec: 'ZiB', metric_octet: 'Zo', iec_octet: 'Zio' }, + { expFrom: 8, expTo: 9, metric: 'YB', iec: 'YiB', metric_octet: 'Yo', iec_octet: 'Yio' } + ] + + const base = options.units === 'metric' || options.units === 'metric_octet' ? 1000 : 1024 + const prefix = bytes < 0 ? '-' : ''; + bytes = Math.abs(bytes); + + for (let i = 0; i < table.length; i++) { + const lower = Math.pow(base, table[i].expFrom) + const upper = Math.pow(base, table[i].expTo) + if (bytes >= lower && bytes < upper) { + const units = table[i][options.units] + if (i === 0) { + this.value = prefix + bytes + this.unit = units + return + } else { + this.value = prefix + (bytes / lower).toFixed(options.precision) + this.unit = units + return + } + } + } + + this.value = prefix + bytes + this.unit = '' + } + + toString () { + return `${this.value} ${this.unit}`.trim() + } +} + +/** + * @param {number} - the bytes value to convert. + * @param [options] {object} - optional config. + * @param [options.precision=1] {number} - number of decimal places. + * @param [options.units=metric] {string} - select `'metric'`, `'iec'`, `'metric_octet'` or `'iec_octet'` units. + * @returns {{ value: string, unit: string}} + * @alias module:byte-size + * @example + * ```js + * > const byteSize = require('byte-size') + * + * > byteSize(1580) + * { value: '1.6', unit: 'kB' } + * + * > byteSize(1580, { units: 'iec' }) + * { value: '1.5', unit: 'KiB' } + * + * > byteSize(1580, { units: 'iec', precision: 3 }) + * { value: '1.543', unit: 'KiB' } + * + * > byteSize(1580, { units: 'iec', precision: 0 }) + * { value: '2', unit: 'KiB' } + * + * > byteSize(1580, { units: 'metric_octet' }) + * { value: '1.6', unit: 'ko' } + * + * > byteSize(1580, { units: 'iec_octet' }) + * { value: '1.5', unit: 'Kio' } + * + * > byteSize(1580, { units: 'iec_octet' }).toString() + * '1.5 Kio' + * + * > const { value, unit } = byteSize(1580, { units: 'iec_octet' }) + * > `${value} ${unit}` + * '1.5 Kio' + * ``` + */ +function byteSize (bytes, options) { + return new ByteSize(bytes, options) +} + +export default byteSize diff --git a/node_modules/byte-size/package.json b/node_modules/byte-size/package.json index f69fc683f38cc..57e46ba988d9b 100644 --- a/node_modules/byte-size/package.json +++ b/node_modules/byte-size/package.json @@ -1,32 +1,28 @@ { - "_args": [ - [ - "byte-size@4.0.3", - "/Users/rebecca/code/npm" - ] - ], - "_from": "byte-size@4.0.3", - "_id": "byte-size@4.0.3", + "_from": "byte-size@5.0.1", + "_id": "byte-size@5.0.1", "_inBundle": false, - "_integrity": "sha512-JGC3EV2bCzJH/ENSh3afyJrH4vwxbHTuO5ljLoI5+2iJOcEpMgP8T782jH9b5qGxf2mSUIp1lfGnfKNrRHpvVg==", + "_integrity": "sha512-/XuKeqWocKsYa/cBY1YbSJSWWqTi4cFgr9S6OyM7PBaPbr9zvNGwWP33vt0uqGhwDdN+y3yhbXVILEUpnwEWGw==", "_location": "/byte-size", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "byte-size@4.0.3", + "raw": "byte-size@5.0.1", "name": "byte-size", "escapedName": "byte-size", - "rawSpec": "4.0.3", + "rawSpec": "5.0.1", "saveSpec": null, - "fetchSpec": "4.0.3" + "fetchSpec": "5.0.1" }, "_requiredBy": [ + "#USER", "/" ], - "_resolved": "https://registry.npmjs.org/byte-size/-/byte-size-4.0.3.tgz", - "_spec": "4.0.3", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/byte-size/-/byte-size-5.0.1.tgz", + "_shasum": "4b651039a5ecd96767e71a3d7ed380e48bed4191", + "_spec": "byte-size@5.0.1", + "_where": "/Users/aeschright/code/cli", "author": { "name": "Lloyd Brookes", "email": "75pound@gmail.com" @@ -34,6 +30,7 @@ "bugs": { "url": "https://github.com/75lb/byte-size/issues" }, + "bundleDependencies": false, "contributors": [ { "name": "Raul Perez", @@ -41,14 +38,20 @@ "url": "http://repejota.com" } ], + "deprecated": false, "description": "Convert a bytes (and octets) value to a more human-readable format. Choose between metric or IEC units.", "devDependencies": { - "coveralls": "^3.0.1", + "coveralls": "^3.0.2", "jsdoc-to-markdown": "^4.0.1", - "test-runner": "^0.5.0" + "rollup": "^0.68.1", + "test-runner": "^0.5.1" + }, + "engines": { + "node": ">=6.0.0" }, "files": [ - "index.js" + "index.mjs", + "dist/index.js" ], "homepage": "https://github.com/75lb/byte-size#readme", "keywords": [ @@ -62,6 +65,7 @@ "IEC" ], "license": "MIT", + "main": "dist/index.js", "name": "byte-size", "repository": { "type": "git", @@ -69,8 +73,11 @@ }, "scripts": { "cover": "istanbul cover ./node_modules/.bin/test-runner test.js && cat coverage/lcov.info | ./node_modules/.bin/coveralls", - "docs": "jsdoc2md -t README.hbs index.js > README.md; echo", - "test": "test-runner test.js" + "dist": "rollup -c dist/index.config.js", + "docs": "jsdoc2md -t README.hbs dist/index.js > README.md; echo", + "test": "npm run test:js && npm run test:mjs", + "test:js": "rollup -c dist/test.config.js && node dist/test.js", + "test:mjs": "node --experimental-modules test/test.mjs" }, - "version": "4.0.3" + "version": "5.0.1" } diff --git a/node_modules/cacache/CHANGELOG.md b/node_modules/cacache/CHANGELOG.md index ec9174f80d76c..f67fbc8b4ec9e 100644 --- a/node_modules/cacache/CHANGELOG.md +++ b/node_modules/cacache/CHANGELOG.md @@ -1,66 +1,178 @@ -# Change Log +# Changelog All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [12.0.3](https://github.com/npm/cacache/compare/v12.0.2...v12.0.3) (2019-08-19) + + +### Bug Fixes + +* do not chown if not running as root ([2d80af9](https://github.com/npm/cacache/commit/2d80af9)) + + + +### [12.0.2](https://github.com/npm/cacache/compare/v12.0.1...v12.0.2) (2019-07-19) + + + +### [12.0.1](https://github.com/npm/cacache/compare/v12.0.0...v12.0.1) (2019-07-19) + +* **deps** Abstracted out `lib/util/infer-owner.js` to + [@npmcli/infer-owner](https://www.npmjs.com/package/@npmcli/infer-owner) + so that it could be more easily used in other parts of the npm CLI. + + +## [12.0.0](https://github.com/npm/cacache/compare/v11.3.3...v12.0.0) (2019-07-15) + + +### Features + +* infer uid/gid instead of accepting as options ([ac84d14](https://github.com/npm/cacache/commit/ac84d14)) +* **i18n:** add another error message ([676cb32](https://github.com/npm/cacache/commit/676cb32)) + + +### BREAKING CHANGES + +* the uid gid options are no longer respected or +necessary. As of this change, cacache will always match the cache +contents to the ownership of the cache directory (or its parent +directory), regardless of what the caller passes in. + +Reasoning: + +The number one reason to use a uid or gid option was to keep root-owned +files from causing problems in the cache. In npm's case, this meant +that CLI's ./lib/command.js had to work out the appropriate uid and gid, +then pass it to the libnpmcommand module, which had to in turn pass the +uid and gid to npm-registry-fetch, which then passed it to +make-fetch-happen, which passed it to cacache. (For package fetching, +pacote would be in that mix as well.) + +Added to that, `cacache.rm()` will actually _write_ a file into the +cache index, but has no way to accept an option so that its call to +entry-index.js will write the index with the appropriate uid/gid. +Little ownership bugs were all over the place, and tricky to trace +through. (Why should make-fetch-happen even care about accepting or +passing uids and gids? It's an http library.) + +This change allows us to keep the cache from having mixed ownership in +any situation. + +Of course, this _does_ mean that if you have a root-owned but +user-writable folder (for example, `/tmp`), then the cache will try to +chown everything to root. + +The solution is for the user to create a folder, make it user-owned, and +use that, rather than relying on cacache to create the root cache folder. + +If we decide to restore the uid/gid opts, and use ownership inferrence +only when uid/gid are unset, then take care to also make rm take an +option object, and pass it through to entry-index.js. + + + +### [11.3.3](https://github.com/npm/cacache/compare/v11.3.2...v11.3.3) (2019-06-17) + + +### Bug Fixes + +* **audit:** npm audit fix ([200a6d5](https://github.com/npm/cacache/commit/200a6d5)) +* **config:** Add ssri config 'error' option ([#146](https://github.com/npm/cacache/issues/146)) ([47de8f5](https://github.com/npm/cacache/commit/47de8f5)) +* **deps:** npm audit fix ([481a7dc](https://github.com/npm/cacache/commit/481a7dc)) +* **standard:** standard --fix ([7799149](https://github.com/npm/cacache/commit/7799149)) +* **write:** avoid another cb never called situation ([5156561](https://github.com/npm/cacache/commit/5156561)) + + + + +## [11.3.2](https://github.com/npm/cacache/compare/v11.3.1...v11.3.2) (2018-12-21) + + +### Bug Fixes + +* **get:** make sure to handle errors in the .then ([b10bcd0](https://github.com/npm/cacache/commit/b10bcd0)) + + + + +## [11.3.1](https://github.com/npm/cacache/compare/v11.3.0...v11.3.1) (2018-11-05) + + +### Bug Fixes + +* **get:** export hasContent.sync properly ([d76c920](https://github.com/npm/cacache/commit/d76c920)) + + + + +# [11.3.0](https://github.com/npm/cacache/compare/v11.2.0...v11.3.0) (2018-11-05) + + +### Features + +* **get:** add sync API for reading ([db1e094](https://github.com/npm/cacache/commit/db1e094)) + + + -# [11.2.0](https://github.com/zkat/cacache/compare/v11.1.0...v11.2.0) (2018-08-08) +# [11.2.0](https://github.com/npm/cacache/compare/v11.1.0...v11.2.0) (2018-08-08) ### Features -* **read:** add sync support to other internal read.js fns ([fe638b6](https://github.com/zkat/cacache/commit/fe638b6)) +* **read:** add sync support to other internal read.js fns ([fe638b6](https://github.com/npm/cacache/commit/fe638b6)) -# [11.1.0](https://github.com/zkat/cacache/compare/v11.0.3...v11.1.0) (2018-08-01) +# [11.1.0](https://github.com/npm/cacache/compare/v11.0.3...v11.1.0) (2018-08-01) ### Features -* **read:** add sync support for low-level content read ([b43af83](https://github.com/zkat/cacache/commit/b43af83)) +* **read:** add sync support for low-level content read ([b43af83](https://github.com/npm/cacache/commit/b43af83)) -## [11.0.3](https://github.com/zkat/cacache/compare/v11.0.2...v11.0.3) (2018-08-01) +## [11.0.3](https://github.com/npm/cacache/compare/v11.0.2...v11.0.3) (2018-08-01) ### Bug Fixes -* **config:** add ssri config options ([#136](https://github.com/zkat/cacache/issues/136)) ([10d5d9a](https://github.com/zkat/cacache/commit/10d5d9a)) -* **perf:** refactor content.read to avoid lstats ([c5ac10e](https://github.com/zkat/cacache/commit/c5ac10e)) -* **test:** oops when removing safe-buffer ([1950490](https://github.com/zkat/cacache/commit/1950490)) +* **config:** add ssri config options ([#136](https://github.com/npm/cacache/issues/136)) ([10d5d9a](https://github.com/npm/cacache/commit/10d5d9a)) +* **perf:** refactor content.read to avoid lstats ([c5ac10e](https://github.com/npm/cacache/commit/c5ac10e)) +* **test:** oops when removing safe-buffer ([1950490](https://github.com/npm/cacache/commit/1950490)) -## [11.0.2](https://github.com/zkat/cacache/compare/v11.0.1...v11.0.2) (2018-05-07) +## [11.0.2](https://github.com/npm/cacache/compare/v11.0.1...v11.0.2) (2018-05-07) ### Bug Fixes -* **verify:** size param no longer lost in a verify ([#131](https://github.com/zkat/cacache/issues/131)) ([c614a19](https://github.com/zkat/cacache/commit/c614a19)), closes [#130](https://github.com/zkat/cacache/issues/130) +* **verify:** size param no longer lost in a verify ([#131](https://github.com/npm/cacache/issues/131)) ([c614a19](https://github.com/npm/cacache/commit/c614a19)), closes [#130](https://github.com/npm/cacache/issues/130) -## [11.0.1](https://github.com/zkat/cacache/compare/v11.0.0...v11.0.1) (2018-04-10) +## [11.0.1](https://github.com/npm/cacache/compare/v11.0.0...v11.0.1) (2018-04-10) -# [11.0.0](https://github.com/zkat/cacache/compare/v10.0.4...v11.0.0) (2018-04-09) +# [11.0.0](https://github.com/npm/cacache/compare/v10.0.4...v11.0.0) (2018-04-09) ### Features -* **opts:** use figgy-pudding for opts ([#128](https://github.com/zkat/cacache/issues/128)) ([33d4eed](https://github.com/zkat/cacache/commit/33d4eed)) +* **opts:** use figgy-pudding for opts ([#128](https://github.com/npm/cacache/issues/128)) ([33d4eed](https://github.com/npm/cacache/commit/33d4eed)) ### meta -* drop support for node@4 ([529f347](https://github.com/zkat/cacache/commit/529f347)) +* drop support for node@4 ([529f347](https://github.com/npm/cacache/commit/529f347)) ### BREAKING CHANGES @@ -70,52 +182,52 @@ All notable changes to this project will be documented in this file. See [standa -## [10.0.4](https://github.com/zkat/cacache/compare/v10.0.3...v10.0.4) (2018-02-16) +## [10.0.4](https://github.com/npm/cacache/compare/v10.0.3...v10.0.4) (2018-02-16) -## [10.0.3](https://github.com/zkat/cacache/compare/v10.0.2...v10.0.3) (2018-02-16) +## [10.0.3](https://github.com/npm/cacache/compare/v10.0.2...v10.0.3) (2018-02-16) ### Bug Fixes -* **content:** rethrow aggregate errors as ENOENT ([fa918f5](https://github.com/zkat/cacache/commit/fa918f5)) +* **content:** rethrow aggregate errors as ENOENT ([fa918f5](https://github.com/npm/cacache/commit/fa918f5)) -## [10.0.2](https://github.com/zkat/cacache/compare/v10.0.1...v10.0.2) (2018-01-07) +## [10.0.2](https://github.com/npm/cacache/compare/v10.0.1...v10.0.2) (2018-01-07) ### Bug Fixes -* **ls:** deleted entries could cause a premature stream EOF ([347dc36](https://github.com/zkat/cacache/commit/347dc36)) +* **ls:** deleted entries could cause a premature stream EOF ([347dc36](https://github.com/npm/cacache/commit/347dc36)) -## [10.0.1](https://github.com/zkat/cacache/compare/v10.0.0...v10.0.1) (2017-11-15) +## [10.0.1](https://github.com/npm/cacache/compare/v10.0.0...v10.0.1) (2017-11-15) ### Bug Fixes -* **move-file:** actually use the fallback to `move-concurrently` (#110) ([073fbe1](https://github.com/zkat/cacache/commit/073fbe1)) +* **move-file:** actually use the fallback to `move-concurrently` (#110) ([073fbe1](https://github.com/npm/cacache/commit/073fbe1)) -# [10.0.0](https://github.com/zkat/cacache/compare/v9.3.0...v10.0.0) (2017-10-23) +# [10.0.0](https://github.com/npm/cacache/compare/v9.3.0...v10.0.0) (2017-10-23) ### Features -* **license:** relicense to ISC (#111) ([fdbb4e5](https://github.com/zkat/cacache/commit/fdbb4e5)) +* **license:** relicense to ISC (#111) ([fdbb4e5](https://github.com/npm/cacache/commit/fdbb4e5)) ### Performance Improvements -* more copyFile benchmarks ([63787bb](https://github.com/zkat/cacache/commit/63787bb)) +* more copyFile benchmarks ([63787bb](https://github.com/npm/cacache/commit/63787bb)) ### BREAKING CHANGES @@ -125,132 +237,132 @@ All notable changes to this project will be documented in this file. See [standa -# [9.3.0](https://github.com/zkat/cacache/compare/v9.2.9...v9.3.0) (2017-10-07) +# [9.3.0](https://github.com/npm/cacache/compare/v9.2.9...v9.3.0) (2017-10-07) ### Features -* **copy:** added cacache.get.copy api for fast copies (#107) ([067b5f6](https://github.com/zkat/cacache/commit/067b5f6)) +* **copy:** added cacache.get.copy api for fast copies (#107) ([067b5f6](https://github.com/npm/cacache/commit/067b5f6)) -## [9.2.9](https://github.com/zkat/cacache/compare/v9.2.8...v9.2.9) (2017-06-17) +## [9.2.9](https://github.com/npm/cacache/compare/v9.2.8...v9.2.9) (2017-06-17) -## [9.2.8](https://github.com/zkat/cacache/compare/v9.2.7...v9.2.8) (2017-06-05) +## [9.2.8](https://github.com/npm/cacache/compare/v9.2.7...v9.2.8) (2017-06-05) ### Bug Fixes -* **ssri:** bump ssri for bugfix ([c3232ea](https://github.com/zkat/cacache/commit/c3232ea)) +* **ssri:** bump ssri for bugfix ([c3232ea](https://github.com/npm/cacache/commit/c3232ea)) -## [9.2.7](https://github.com/zkat/cacache/compare/v9.2.6...v9.2.7) (2017-06-05) +## [9.2.7](https://github.com/npm/cacache/compare/v9.2.6...v9.2.7) (2017-06-05) ### Bug Fixes -* **content:** make verified content completely read-only (#96) ([4131196](https://github.com/zkat/cacache/commit/4131196)) +* **content:** make verified content completely read-only (#96) ([4131196](https://github.com/npm/cacache/commit/4131196)) -## [9.2.6](https://github.com/zkat/cacache/compare/v9.2.5...v9.2.6) (2017-05-31) +## [9.2.6](https://github.com/npm/cacache/compare/v9.2.5...v9.2.6) (2017-05-31) ### Bug Fixes -* **node:** update ssri to prevent old node 4 crash ([5209ffe](https://github.com/zkat/cacache/commit/5209ffe)) +* **node:** update ssri to prevent old node 4 crash ([5209ffe](https://github.com/npm/cacache/commit/5209ffe)) -## [9.2.5](https://github.com/zkat/cacache/compare/v9.2.4...v9.2.5) (2017-05-25) +## [9.2.5](https://github.com/npm/cacache/compare/v9.2.4...v9.2.5) (2017-05-25) ### Bug Fixes -* **deps:** fix lockfile issues and bump ssri ([84e1d7e](https://github.com/zkat/cacache/commit/84e1d7e)) +* **deps:** fix lockfile issues and bump ssri ([84e1d7e](https://github.com/npm/cacache/commit/84e1d7e)) -## [9.2.4](https://github.com/zkat/cacache/compare/v9.2.3...v9.2.4) (2017-05-24) +## [9.2.4](https://github.com/npm/cacache/compare/v9.2.3...v9.2.4) (2017-05-24) ### Bug Fixes -* **deps:** bumping deps ([bbccb12](https://github.com/zkat/cacache/commit/bbccb12)) +* **deps:** bumping deps ([bbccb12](https://github.com/npm/cacache/commit/bbccb12)) -## [9.2.3](https://github.com/zkat/cacache/compare/v9.2.2...v9.2.3) (2017-05-24) +## [9.2.3](https://github.com/npm/cacache/compare/v9.2.2...v9.2.3) (2017-05-24) ### Bug Fixes -* **rm:** stop crashing if content is missing on rm ([ac90bc0](https://github.com/zkat/cacache/commit/ac90bc0)) +* **rm:** stop crashing if content is missing on rm ([ac90bc0](https://github.com/npm/cacache/commit/ac90bc0)) -## [9.2.2](https://github.com/zkat/cacache/compare/v9.2.1...v9.2.2) (2017-05-14) +## [9.2.2](https://github.com/npm/cacache/compare/v9.2.1...v9.2.2) (2017-05-14) ### Bug Fixes -* **i18n:** lets pretend this didn't happen ([519b4ee](https://github.com/zkat/cacache/commit/519b4ee)) +* **i18n:** lets pretend this didn't happen ([519b4ee](https://github.com/npm/cacache/commit/519b4ee)) -## [9.2.1](https://github.com/zkat/cacache/compare/v9.2.0...v9.2.1) (2017-05-14) +## [9.2.1](https://github.com/npm/cacache/compare/v9.2.0...v9.2.1) (2017-05-14) ### Bug Fixes -* **docs:** fixing translation messup ([bb9e4f9](https://github.com/zkat/cacache/commit/bb9e4f9)) +* **docs:** fixing translation messup ([bb9e4f9](https://github.com/npm/cacache/commit/bb9e4f9)) -# [9.2.0](https://github.com/zkat/cacache/compare/v9.1.0...v9.2.0) (2017-05-14) +# [9.2.0](https://github.com/npm/cacache/compare/v9.1.0...v9.2.0) (2017-05-14) ### Features -* **i18n:** add Spanish translation for API ([531f9a4](https://github.com/zkat/cacache/commit/531f9a4)) +* **i18n:** add Spanish translation for API ([531f9a4](https://github.com/npm/cacache/commit/531f9a4)) -# [9.1.0](https://github.com/zkat/cacache/compare/v9.0.0...v9.1.0) (2017-05-14) +# [9.1.0](https://github.com/npm/cacache/compare/v9.0.0...v9.1.0) (2017-05-14) ### Features -* **i18n:** Add Spanish translation and i18n setup (#91) ([323b90c](https://github.com/zkat/cacache/commit/323b90c)) +* **i18n:** Add Spanish translation and i18n setup (#91) ([323b90c](https://github.com/npm/cacache/commit/323b90c)) -# [9.0.0](https://github.com/zkat/cacache/compare/v8.0.0...v9.0.0) (2017-04-28) +# [9.0.0](https://github.com/npm/cacache/compare/v8.0.0...v9.0.0) (2017-04-28) ### Bug Fixes -* **memoization:** actually use the LRU ([0e55dc9](https://github.com/zkat/cacache/commit/0e55dc9)) +* **memoization:** actually use the LRU ([0e55dc9](https://github.com/npm/cacache/commit/0e55dc9)) ### Features -* **memoization:** memoizers can be injected through opts.memoize (#90) ([e5614c7](https://github.com/zkat/cacache/commit/e5614c7)) +* **memoization:** memoizers can be injected through opts.memoize (#90) ([e5614c7](https://github.com/npm/cacache/commit/e5614c7)) ### BREAKING CHANGES @@ -260,12 +372,12 @@ All notable changes to this project will be documented in this file. See [standa -# [8.0.0](https://github.com/zkat/cacache/compare/v7.1.0...v8.0.0) (2017-04-22) +# [8.0.0](https://github.com/npm/cacache/compare/v7.1.0...v8.0.0) (2017-04-22) ### Features -* **read:** change hasContent to return {sri, size} (#88) ([bad6c49](https://github.com/zkat/cacache/commit/bad6c49)), closes [#87](https://github.com/zkat/cacache/issues/87) +* **read:** change hasContent to return {sri, size} (#88) ([bad6c49](https://github.com/npm/cacache/commit/bad6c49)), closes [#87](https://github.com/npm/cacache/issues/87) ### BREAKING CHANGES @@ -275,78 +387,78 @@ All notable changes to this project will be documented in this file. See [standa -# [7.1.0](https://github.com/zkat/cacache/compare/v7.0.5...v7.1.0) (2017-04-20) +# [7.1.0](https://github.com/npm/cacache/compare/v7.0.5...v7.1.0) (2017-04-20) ### Features -* **size:** handle content size info (#49) ([91230af](https://github.com/zkat/cacache/commit/91230af)) +* **size:** handle content size info (#49) ([91230af](https://github.com/npm/cacache/commit/91230af)) -## [7.0.5](https://github.com/zkat/cacache/compare/v7.0.4...v7.0.5) (2017-04-18) +## [7.0.5](https://github.com/npm/cacache/compare/v7.0.4...v7.0.5) (2017-04-18) ### Bug Fixes -* **integrity:** new ssri with fixed integrity stream ([6d13e8e](https://github.com/zkat/cacache/commit/6d13e8e)) -* **write:** wrap stuff in promises to improve errors ([3624fc5](https://github.com/zkat/cacache/commit/3624fc5)) +* **integrity:** new ssri with fixed integrity stream ([6d13e8e](https://github.com/npm/cacache/commit/6d13e8e)) +* **write:** wrap stuff in promises to improve errors ([3624fc5](https://github.com/npm/cacache/commit/3624fc5)) -## [7.0.4](https://github.com/zkat/cacache/compare/v7.0.3...v7.0.4) (2017-04-15) +## [7.0.4](https://github.com/npm/cacache/compare/v7.0.3...v7.0.4) (2017-04-15) ### Bug Fixes -* **fix-owner:** throw away ENOENTs on chownr ([d49bbcd](https://github.com/zkat/cacache/commit/d49bbcd)) +* **fix-owner:** throw away ENOENTs on chownr ([d49bbcd](https://github.com/npm/cacache/commit/d49bbcd)) -## [7.0.3](https://github.com/zkat/cacache/compare/v7.0.2...v7.0.3) (2017-04-05) +## [7.0.3](https://github.com/npm/cacache/compare/v7.0.2...v7.0.3) (2017-04-05) ### Bug Fixes -* **read:** fixing error message for integrity verification failures ([9d4f0a5](https://github.com/zkat/cacache/commit/9d4f0a5)) +* **read:** fixing error message for integrity verification failures ([9d4f0a5](https://github.com/npm/cacache/commit/9d4f0a5)) -## [7.0.2](https://github.com/zkat/cacache/compare/v7.0.1...v7.0.2) (2017-04-03) +## [7.0.2](https://github.com/npm/cacache/compare/v7.0.1...v7.0.2) (2017-04-03) ### Bug Fixes -* **integrity:** use EINTEGRITY error code and update ssri ([8dc2e62](https://github.com/zkat/cacache/commit/8dc2e62)) +* **integrity:** use EINTEGRITY error code and update ssri ([8dc2e62](https://github.com/npm/cacache/commit/8dc2e62)) -## [7.0.1](https://github.com/zkat/cacache/compare/v7.0.0...v7.0.1) (2017-04-03) +## [7.0.1](https://github.com/npm/cacache/compare/v7.0.0...v7.0.1) (2017-04-03) ### Bug Fixes -* **docs:** fix header name conflict in readme ([afcd456](https://github.com/zkat/cacache/commit/afcd456)) +* **docs:** fix header name conflict in readme ([afcd456](https://github.com/npm/cacache/commit/afcd456)) -# [7.0.0](https://github.com/zkat/cacache/compare/v6.3.0...v7.0.0) (2017-04-03) +# [7.0.0](https://github.com/npm/cacache/compare/v6.3.0...v7.0.0) (2017-04-03) ### Bug Fixes -* **test:** fix content.write tests when running in docker ([d2e9b6a](https://github.com/zkat/cacache/commit/d2e9b6a)) +* **test:** fix content.write tests when running in docker ([d2e9b6a](https://github.com/npm/cacache/commit/d2e9b6a)) ### Features -* **integrity:** subresource integrity support (#78) ([b1e731f](https://github.com/zkat/cacache/commit/b1e731f)) +* **integrity:** subresource integrity support (#78) ([b1e731f](https://github.com/npm/cacache/commit/b1e731f)) ### BREAKING CHANGES @@ -382,150 +494,150 @@ All notable changes to this project will be documented in this file. See [standa -# [6.3.0](https://github.com/zkat/cacache/compare/v6.2.0...v6.3.0) (2017-04-01) +# [6.3.0](https://github.com/npm/cacache/compare/v6.2.0...v6.3.0) (2017-04-01) ### Bug Fixes -* **fixOwner:** ignore EEXIST race condition from mkdirp ([4670e9b](https://github.com/zkat/cacache/commit/4670e9b)) -* **index:** ignore index removal races when inserting ([b9d2fa2](https://github.com/zkat/cacache/commit/b9d2fa2)) -* **memo:** use lru-cache for better mem management (#75) ([d8ac5aa](https://github.com/zkat/cacache/commit/d8ac5aa)) +* **fixOwner:** ignore EEXIST race condition from mkdirp ([4670e9b](https://github.com/npm/cacache/commit/4670e9b)) +* **index:** ignore index removal races when inserting ([b9d2fa2](https://github.com/npm/cacache/commit/b9d2fa2)) +* **memo:** use lru-cache for better mem management (#75) ([d8ac5aa](https://github.com/npm/cacache/commit/d8ac5aa)) ### Features -* **dependencies:** Switch to move-concurrently (#77) ([dc6482d](https://github.com/zkat/cacache/commit/dc6482d)) +* **dependencies:** Switch to move-concurrently (#77) ([dc6482d](https://github.com/npm/cacache/commit/dc6482d)) -# [6.2.0](https://github.com/zkat/cacache/compare/v6.1.2...v6.2.0) (2017-03-15) +# [6.2.0](https://github.com/npm/cacache/compare/v6.1.2...v6.2.0) (2017-03-15) ### Bug Fixes -* **index:** additional bucket entry verification with checksum (#72) ([f8e0f25](https://github.com/zkat/cacache/commit/f8e0f25)) -* **verify:** return fixOwner.chownr promise ([6818521](https://github.com/zkat/cacache/commit/6818521)) +* **index:** additional bucket entry verification with checksum (#72) ([f8e0f25](https://github.com/npm/cacache/commit/f8e0f25)) +* **verify:** return fixOwner.chownr promise ([6818521](https://github.com/npm/cacache/commit/6818521)) ### Features -* **tmp:** safe tmp dir creation/management util (#73) ([c42da71](https://github.com/zkat/cacache/commit/c42da71)) +* **tmp:** safe tmp dir creation/management util (#73) ([c42da71](https://github.com/npm/cacache/commit/c42da71)) -## [6.1.2](https://github.com/zkat/cacache/compare/v6.1.1...v6.1.2) (2017-03-13) +## [6.1.2](https://github.com/npm/cacache/compare/v6.1.1...v6.1.2) (2017-03-13) ### Bug Fixes -* **index:** set default hashAlgorithm ([d6eb2f0](https://github.com/zkat/cacache/commit/d6eb2f0)) +* **index:** set default hashAlgorithm ([d6eb2f0](https://github.com/npm/cacache/commit/d6eb2f0)) -## [6.1.1](https://github.com/zkat/cacache/compare/v6.1.0...v6.1.1) (2017-03-13) +## [6.1.1](https://github.com/npm/cacache/compare/v6.1.0...v6.1.1) (2017-03-13) ### Bug Fixes -* **coverage:** bumping coverage for verify (#71) ([0b7faf6](https://github.com/zkat/cacache/commit/0b7faf6)) -* **deps:** glob should have been a regular dep :< ([0640bc4](https://github.com/zkat/cacache/commit/0640bc4)) +* **coverage:** bumping coverage for verify (#71) ([0b7faf6](https://github.com/npm/cacache/commit/0b7faf6)) +* **deps:** glob should have been a regular dep :< ([0640bc4](https://github.com/npm/cacache/commit/0640bc4)) -# [6.1.0](https://github.com/zkat/cacache/compare/v6.0.2...v6.1.0) (2017-03-12) +# [6.1.0](https://github.com/npm/cacache/compare/v6.0.2...v6.1.0) (2017-03-12) ### Bug Fixes -* **coverage:** more coverage for content reads (#70) ([ef4f70a](https://github.com/zkat/cacache/commit/ef4f70a)) -* **tests:** use safe-buffer because omfg (#69) ([6ab8132](https://github.com/zkat/cacache/commit/6ab8132)) +* **coverage:** more coverage for content reads (#70) ([ef4f70a](https://github.com/npm/cacache/commit/ef4f70a)) +* **tests:** use safe-buffer because omfg (#69) ([6ab8132](https://github.com/npm/cacache/commit/6ab8132)) ### Features -* **rm:** limited rm.all and fixed bugs (#66) ([d5d25ba](https://github.com/zkat/cacache/commit/d5d25ba)), closes [#66](https://github.com/zkat/cacache/issues/66) -* **verify:** tested, working cache verifier/gc (#68) ([45ad77a](https://github.com/zkat/cacache/commit/45ad77a)) +* **rm:** limited rm.all and fixed bugs (#66) ([d5d25ba](https://github.com/npm/cacache/commit/d5d25ba)), closes [#66](https://github.com/npm/cacache/issues/66) +* **verify:** tested, working cache verifier/gc (#68) ([45ad77a](https://github.com/npm/cacache/commit/45ad77a)) -## [6.0.2](https://github.com/zkat/cacache/compare/v6.0.1...v6.0.2) (2017-03-11) +## [6.0.2](https://github.com/npm/cacache/compare/v6.0.1...v6.0.2) (2017-03-11) ### Bug Fixes -* **index:** segment cache items with another subbucket (#64) ([c3644e5](https://github.com/zkat/cacache/commit/c3644e5)) +* **index:** segment cache items with another subbucket (#64) ([c3644e5](https://github.com/npm/cacache/commit/c3644e5)) -## [6.0.1](https://github.com/zkat/cacache/compare/v6.0.0...v6.0.1) (2017-03-05) +## [6.0.1](https://github.com/npm/cacache/compare/v6.0.0...v6.0.1) (2017-03-05) ### Bug Fixes -* **docs:** Missed spots in README ([8ffb7fa](https://github.com/zkat/cacache/commit/8ffb7fa)) +* **docs:** Missed spots in README ([8ffb7fa](https://github.com/npm/cacache/commit/8ffb7fa)) -# [6.0.0](https://github.com/zkat/cacache/compare/v5.0.3...v6.0.0) (2017-03-05) - - -### Bug Fixes - -* **api:** keep memo cache mostly-internal ([2f72d0a](https://github.com/zkat/cacache/commit/2f72d0a)) -* **content:** use the rest of the string, not the whole string ([fa8f3c3](https://github.com/zkat/cacache/commit/fa8f3c3)) -* **deps:** removed `format-number[@2](https://github.com/2).0.2` ([1187791](https://github.com/zkat/cacache/commit/1187791)) -* **deps:** removed inflight[@1](https://github.com/1).0.6 ([0d1819c](https://github.com/zkat/cacache/commit/0d1819c)) -* **deps:** rimraf[@2](https://github.com/2).6.1 ([9efab6b](https://github.com/zkat/cacache/commit/9efab6b)) -* **deps:** standard[@9](https://github.com/9).0.0 ([4202cba](https://github.com/zkat/cacache/commit/4202cba)) -* **deps:** tap[@10](https://github.com/10).3.0 ([aa03088](https://github.com/zkat/cacache/commit/aa03088)) -* **deps:** weallcontribute[@1](https://github.com/1).0.8 ([ad4f4dc](https://github.com/zkat/cacache/commit/ad4f4dc)) -* **docs:** add security note to hashKey ([03f81ba](https://github.com/zkat/cacache/commit/03f81ba)) -* **hashes:** change default hashAlgorithm to sha512 ([ea00ba6](https://github.com/zkat/cacache/commit/ea00ba6)) -* **hashes:** missed a spot for hashAlgorithm defaults ([45997d8](https://github.com/zkat/cacache/commit/45997d8)) -* **index:** add length header before JSON for verification ([fb8cb4d](https://github.com/zkat/cacache/commit/fb8cb4d)) -* **index:** change index filenames to sha1s of keys ([bbc5fca](https://github.com/zkat/cacache/commit/bbc5fca)) -* **index:** who cares about race conditions anyway ([b1d3888](https://github.com/zkat/cacache/commit/b1d3888)) -* **perf:** bulk-read get+read for massive speed ([d26cdf9](https://github.com/zkat/cacache/commit/d26cdf9)) -* **perf:** use bulk file reads for index reads ([79a8891](https://github.com/zkat/cacache/commit/79a8891)) -* **put-stream:** remove tmp file on stream insert error ([65f6632](https://github.com/zkat/cacache/commit/65f6632)) -* **put-stream:** robustified and predictibilized ([daf9e08](https://github.com/zkat/cacache/commit/daf9e08)) -* **put-stream:** use new promise API for moves ([1d36013](https://github.com/zkat/cacache/commit/1d36013)) -* **readme:** updated to reflect new default hashAlgo ([c60a2fa](https://github.com/zkat/cacache/commit/c60a2fa)) -* **verify:** tiny typo fix ([db22d05](https://github.com/zkat/cacache/commit/db22d05)) +# [6.0.0](https://github.com/npm/cacache/compare/v5.0.3...v6.0.0) (2017-03-05) + + +### Bug Fixes + +* **api:** keep memo cache mostly-internal ([2f72d0a](https://github.com/npm/cacache/commit/2f72d0a)) +* **content:** use the rest of the string, not the whole string ([fa8f3c3](https://github.com/npm/cacache/commit/fa8f3c3)) +* **deps:** removed `format-number@2.0.2` ([1187791](https://github.com/npm/cacache/commit/1187791)) +* **deps:** removed inflight@1.0.6 ([0d1819c](https://github.com/npm/cacache/commit/0d1819c)) +* **deps:** rimraf@2.6.1 ([9efab6b](https://github.com/npm/cacache/commit/9efab6b)) +* **deps:** standard@9.0.0 ([4202cba](https://github.com/npm/cacache/commit/4202cba)) +* **deps:** tap@10.3.0 ([aa03088](https://github.com/npm/cacache/commit/aa03088)) +* **deps:** weallcontribute@1.0.8 ([ad4f4dc](https://github.com/npm/cacache/commit/ad4f4dc)) +* **docs:** add security note to hashKey ([03f81ba](https://github.com/npm/cacache/commit/03f81ba)) +* **hashes:** change default hashAlgorithm to sha512 ([ea00ba6](https://github.com/npm/cacache/commit/ea00ba6)) +* **hashes:** missed a spot for hashAlgorithm defaults ([45997d8](https://github.com/npm/cacache/commit/45997d8)) +* **index:** add length header before JSON for verification ([fb8cb4d](https://github.com/npm/cacache/commit/fb8cb4d)) +* **index:** change index filenames to sha1s of keys ([bbc5fca](https://github.com/npm/cacache/commit/bbc5fca)) +* **index:** who cares about race conditions anyway ([b1d3888](https://github.com/npm/cacache/commit/b1d3888)) +* **perf:** bulk-read get+read for massive speed ([d26cdf9](https://github.com/npm/cacache/commit/d26cdf9)) +* **perf:** use bulk file reads for index reads ([79a8891](https://github.com/npm/cacache/commit/79a8891)) +* **put-stream:** remove tmp file on stream insert error ([65f6632](https://github.com/npm/cacache/commit/65f6632)) +* **put-stream:** robustified and predictibilized ([daf9e08](https://github.com/npm/cacache/commit/daf9e08)) +* **put-stream:** use new promise API for moves ([1d36013](https://github.com/npm/cacache/commit/1d36013)) +* **readme:** updated to reflect new default hashAlgo ([c60a2fa](https://github.com/npm/cacache/commit/c60a2fa)) +* **verify:** tiny typo fix ([db22d05](https://github.com/npm/cacache/commit/db22d05)) ### Features -* **api:** converted external api ([7bf032f](https://github.com/zkat/cacache/commit/7bf032f)) -* **cacache:** exported clearMemoized() utility ([8d2c5b6](https://github.com/zkat/cacache/commit/8d2c5b6)) -* **cache:** add versioning to content and index ([31bc549](https://github.com/zkat/cacache/commit/31bc549)) -* **content:** collate content files into subdirs ([c094d9f](https://github.com/zkat/cacache/commit/c094d9f)) -* **deps:** [@npmcorp](https://github.com/npmcorp)/move[@1](https://github.com/1).0.0 ([bdd00bf](https://github.com/zkat/cacache/commit/bdd00bf)) -* **deps:** bluebird[@3](https://github.com/3).4.7 ([3a17aff](https://github.com/zkat/cacache/commit/3a17aff)) -* **deps:** promise-inflight[@1](https://github.com/1).0.1 ([a004fe6](https://github.com/zkat/cacache/commit/a004fe6)) -* **get:** added memoization support for get ([c77d794](https://github.com/zkat/cacache/commit/c77d794)) -* **get:** export hasContent ([2956ec3](https://github.com/zkat/cacache/commit/2956ec3)) -* **index:** add hashAlgorithm and format insert ret val ([b639746](https://github.com/zkat/cacache/commit/b639746)) -* **index:** collate index files into subdirs ([e8402a5](https://github.com/zkat/cacache/commit/e8402a5)) -* **index:** promisify entry index ([cda3335](https://github.com/zkat/cacache/commit/cda3335)) -* **memo:** added memoization lib ([da07b92](https://github.com/zkat/cacache/commit/da07b92)) -* **memo:** export memoization api ([954b1b3](https://github.com/zkat/cacache/commit/954b1b3)) -* **move-file:** add move fallback for weird errors ([5cf4616](https://github.com/zkat/cacache/commit/5cf4616)) -* **perf:** bulk content write api ([51b536e](https://github.com/zkat/cacache/commit/51b536e)) -* **put:** added memoization support to put ([b613a70](https://github.com/zkat/cacache/commit/b613a70)) -* **read:** switched to promises ([a869362](https://github.com/zkat/cacache/commit/a869362)) -* **rm:** added memoization support to rm ([4205cf0](https://github.com/zkat/cacache/commit/4205cf0)) -* **rm:** switched to promises ([a000d24](https://github.com/zkat/cacache/commit/a000d24)) -* **util:** promise-inflight ownership fix requests ([9517cd7](https://github.com/zkat/cacache/commit/9517cd7)) -* **util:** use promises for api ([ae204bb](https://github.com/zkat/cacache/commit/ae204bb)) -* **verify:** converted to Promises ([f0b3974](https://github.com/zkat/cacache/commit/f0b3974)) +* **api:** converted external api ([7bf032f](https://github.com/npm/cacache/commit/7bf032f)) +* **cacache:** exported clearMemoized() utility ([8d2c5b6](https://github.com/npm/cacache/commit/8d2c5b6)) +* **cache:** add versioning to content and index ([31bc549](https://github.com/npm/cacache/commit/31bc549)) +* **content:** collate content files into subdirs ([c094d9f](https://github.com/npm/cacache/commit/c094d9f)) +* **deps:** `@npmcorp/move@1.0.0` ([bdd00bf](https://github.com/npm/cacache/commit/bdd00bf)) +* **deps:** `bluebird@3.4.7` ([3a17aff](https://github.com/npm/cacache/commit/3a17aff)) +* **deps:** `promise-inflight@1.0.1` ([a004fe6](https://github.com/npm/cacache/commit/a004fe6)) +* **get:** added memoization support for get ([c77d794](https://github.com/npm/cacache/commit/c77d794)) +* **get:** export hasContent ([2956ec3](https://github.com/npm/cacache/commit/2956ec3)) +* **index:** add hashAlgorithm and format insert ret val ([b639746](https://github.com/npm/cacache/commit/b639746)) +* **index:** collate index files into subdirs ([e8402a5](https://github.com/npm/cacache/commit/e8402a5)) +* **index:** promisify entry index ([cda3335](https://github.com/npm/cacache/commit/cda3335)) +* **memo:** added memoization lib ([da07b92](https://github.com/npm/cacache/commit/da07b92)) +* **memo:** export memoization api ([954b1b3](https://github.com/npm/cacache/commit/954b1b3)) +* **move-file:** add move fallback for weird errors ([5cf4616](https://github.com/npm/cacache/commit/5cf4616)) +* **perf:** bulk content write api ([51b536e](https://github.com/npm/cacache/commit/51b536e)) +* **put:** added memoization support to put ([b613a70](https://github.com/npm/cacache/commit/b613a70)) +* **read:** switched to promises ([a869362](https://github.com/npm/cacache/commit/a869362)) +* **rm:** added memoization support to rm ([4205cf0](https://github.com/npm/cacache/commit/4205cf0)) +* **rm:** switched to promises ([a000d24](https://github.com/npm/cacache/commit/a000d24)) +* **util:** promise-inflight ownership fix requests ([9517cd7](https://github.com/npm/cacache/commit/9517cd7)) +* **util:** use promises for api ([ae204bb](https://github.com/npm/cacache/commit/ae204bb)) +* **verify:** converted to Promises ([f0b3974](https://github.com/npm/cacache/commit/f0b3974)) ### BREAKING CHANGES diff --git a/node_modules/cacache/README.es.md b/node_modules/cacache/README.es.md index 783a0a19b01da..55007e20dd4f1 100644 --- a/node_modules/cacache/README.es.md +++ b/node_modules/cacache/README.es.md @@ -287,7 +287,7 @@ entrada si existe. * `key` - Clave de la entrada. Igual al argumento `clave`. * `integrity` - [hacheo de Subresource Integrity](#integrity) del contenido al que se refiere esta entrada. -* `path` - Dirección del fichero de datos almacenados, relativa al argumento `cache`. +* `path` - Dirección del fichero de datos almacenados, unida al argumento `cache`. * `time` - Hora de creación de la entrada * `metadata` - Metadatos asignados a esta entrada por el usuario diff --git a/node_modules/cacache/README.md b/node_modules/cacache/README.md index 4b284588a65aa..7f8ec5eec05ff 100644 --- a/node_modules/cacache/README.md +++ b/node_modules/cacache/README.md @@ -1,12 +1,16 @@ -# cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/zkat/cacache.svg)](https://travis-ci.org/zkat/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/cacache?svg=true)](https://ci.appveyor.com/project/zkat/cacache) [![Coverage Status](https://coveralls.io/repos/github/zkat/cacache/badge.svg?branch=latest)](https://coveralls.io/github/zkat/cacache?branch=latest) +# cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/npm/cacache.svg)](https://travis-ci.org/npm/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/cacache?svg=true)](https://ci.appveyor.com/project/npm/cacache) [![Coverage Status](https://coveralls.io/repos/github/npm/cacache/badge.svg?branch=latest)](https://coveralls.io/github/npm/cacache?branch=latest) -[`cacache`](https://github.com/zkat/cacache) is a Node.js library for managing +[`cacache`](https://github.com/npm/cacache) is a Node.js library for managing local key and content address caches. It's really fast, really good at concurrency, and it will never give you corrupted data, even if cache files get corrupted or manipulated. -It was originally written to be used as [npm](https://npm.im)'s local cache, but -can just as easily be used on its own. +On systems that support user and group settings on files, cacache will +match the `uid` and `gid` values to the folder where the cache lives, even +when running as `root`. + +It was written to be used as [npm](https://npm.im)'s local cache, but can +just as easily be used on its own. _Translations: [español](README.es.md)_ @@ -290,7 +294,7 @@ one exists. * `key` - Key the entry was looked up under. Matches the `key` argument. * `integrity` - [Subresource Integrity hash](#integrity) for the content this entry refers to. -* `path` - Filesystem path relative to `cache` argument where content is stored. +* `path` - Filesystem path where content is stored, joined with `cache` argument. * `time` - Timestamp the entry was first added on. * `metadata` - User-assigned metadata associated with the entry/content. @@ -414,13 +418,6 @@ may also use any anagram of `'modnar'` to use this feature. Currently only supports one algorithm at a time (i.e., an array length of exactly `1`). Has no effect if `opts.integrity` is present. -##### `opts.uid`/`opts.gid` - -If provided, cacache will do its best to make sure any new files added to the -cache use this particular `uid`/`gid` combination. This can be used, -for example, to drop permissions when someone uses `sudo`, but cacache makes -no assumptions about your needs here. - ##### `opts.memoize` Default: null @@ -498,10 +495,11 @@ Completely resets the in-memory entry cache. Returns a unique temporary directory inside the cache's `tmp` dir. This directory will use the same safe user assignment that all the other stuff use. -Once the directory is made, it's the user's responsibility that all files within -are made according to the same `opts.gid`/`opts.uid` settings that would be -passed in. If not, you can ask cacache to do it for you by calling -[`tmp.fix()`](#tmp-fix), which will fix all tmp directory permissions. +Once the directory is made, it's the user's responsibility that all files +within are given the appropriate `gid`/`uid` ownership settings to match +the rest of the cache. If not, you can ask cacache to do it for you by +calling [`tmp.fix()`](#tmp-fix), which will fix all tmp directory +permissions. If you want automatic cleanup of this directory, use [`tmp.withTmp()`](#with-tpm) @@ -514,6 +512,27 @@ cacache.tmp.mkdir(cache).then(dir => { }) ``` +#### `> tmp.fix(cache) -> Promise` + +Sets the `uid` and `gid` properties on all files and folders within the tmp +folder to match the rest of the cache. + +Use this after manually writing files into [`tmp.mkdir`](#tmp-mkdir) or +[`tmp.withTmp`](#with-tmp). + +##### Example + +```javascript +cacache.tmp.mkdir(cache).then(dir => { + writeFile(path.join(dir, 'file'), someData).then(() => { + // make sure we didn't just put a root-owned file in the cache + cacache.tmp.fix().then(() => { + // all uids and gids match now + }) + }) +}) +``` + #### `> tmp.withTmp(cache, opts, cb) -> Promise` Creates a temporary directory with [`tmp.mkdir()`](#tmp-mkdir) and calls `cb` @@ -591,8 +610,6 @@ of entries removed, etc. ##### Options -* `opts.uid` - uid to assign to cache and its contents -* `opts.gid` - gid to assign to cache and its contents * `opts.filter` - receives a formatted entry. Return false to remove it. Note: might be called more than once on the same entry. diff --git a/node_modules/cacache/get.js b/node_modules/cacache/get.js index 7bafe128e4bf3..008cb83a9ed87 100644 --- a/node_modules/cacache/get.js +++ b/node_modules/cacache/get.js @@ -63,6 +63,55 @@ function getData (byDigest, cache, key, opts) { }) } +module.exports.sync = function get (cache, key, opts) { + return getDataSync(false, cache, key, opts) +} +module.exports.sync.byDigest = function getByDigest (cache, digest, opts) { + return getDataSync(true, cache, digest, opts) +} +function getDataSync (byDigest, cache, key, opts) { + opts = GetOpts(opts) + const memoized = ( + byDigest + ? memo.get.byDigest(cache, key, opts) + : memo.get(cache, key, opts) + ) + if (memoized && opts.memoize !== false) { + return byDigest ? memoized : { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size + } + } + const entry = !byDigest && index.find.sync(cache, key, opts) + if (!entry && !byDigest) { + throw new index.NotFoundError(cache, key) + } + const data = read.sync( + cache, + byDigest ? key : entry.integrity, + { + integrity: opts.integrity, + size: opts.size + } + ) + const res = byDigest + ? data + : { + metadata: entry.metadata, + data: data, + size: entry.size, + integrity: entry.integrity + } + if (opts.memoize && byDigest) { + memo.put.byDigest(cache, key, res, opts) + } else if (opts.memoize) { + memo.put(cache, entry, res.data, opts) + } + return res +} + module.exports.stream = getStream function getStream (cache, key, opts) { opts = GetOpts(opts) @@ -113,7 +162,7 @@ function getStream (cache, key, opts) { memoStream, stream ) - }, err => stream.emit('error', err)) + }).catch(err => stream.emit('error', err)) return stream } diff --git a/node_modules/cacache/lib/content/path.js b/node_modules/cacache/lib/content/path.js index fa6491ba6f895..c67c28061259f 100644 --- a/node_modules/cacache/lib/content/path.js +++ b/node_modules/cacache/lib/content/path.js @@ -12,7 +12,7 @@ const ssri = require('ssri') // module.exports = contentPath function contentPath (cache, integrity) { - const sri = ssri.parse(integrity, {single: true}) + const sri = ssri.parse(integrity, { single: true }) // contentPath is the *strongest* algo given return path.join.apply(path, [ contentDir(cache), diff --git a/node_modules/cacache/lib/content/read.js b/node_modules/cacache/lib/content/read.js index 8f1acc06d8d9d..7929524f82c3c 100644 --- a/node_modules/cacache/lib/content/read.js +++ b/node_modules/cacache/lib/content/read.js @@ -54,8 +54,8 @@ function readStream (cache, integrity, opts) { opts = ReadOpts(opts) const stream = new PassThrough() withContentSri(cache, integrity, (cpath, sri) => { - return lstatAsync(cpath).then(stat => ({cpath, sri, stat})) - }).then(({cpath, sri, stat}) => { + return lstatAsync(cpath).then(stat => ({ cpath, sri, stat })) + }).then(({ cpath, sri, stat }) => { return pipe( fs.createReadStream(cpath), ssri.integrityStream({ @@ -95,7 +95,7 @@ module.exports.hasContent = hasContent function hasContent (cache, integrity) { if (!integrity) { return BB.resolve(false) } return withContentSri(cache, integrity, (cpath, sri) => { - return lstatAsync(cpath).then(stat => ({size: stat.size, sri, stat})) + return lstatAsync(cpath).then(stat => ({ size: stat.size, sri, stat })) }).catch(err => { if (err.code === 'ENOENT') { return false } if (err.code === 'EPERM') { @@ -114,7 +114,7 @@ function hasContentSync (cache, integrity) { return withContentSriSync(cache, integrity, (cpath, sri) => { try { const stat = fs.lstatSync(cpath) - return {size: stat.size, sri, stat} + return { size: stat.size, sri, stat } } catch (err) { if (err.code === 'ENOENT') { return false } if (err.code === 'EPERM') { @@ -141,12 +141,12 @@ function withContentSri (cache, integrity, fn) { } else { return BB.any(sri[sri.pickAlgorithm()].map(meta => { return withContentSri(cache, meta, fn) - }, {concurrency: 1})) + }, { concurrency: 1 })) .catch(err => { if ([].some.call(err, e => e.code === 'ENOENT')) { throw Object.assign( new Error('No matching content found for ' + sri.toString()), - {code: 'ENOENT'} + { code: 'ENOENT' } ) } else { throw err[0] diff --git a/node_modules/cacache/lib/content/write.js b/node_modules/cacache/lib/content/write.js index c71363413ca4a..4d96a3cffe1f1 100644 --- a/node_modules/cacache/lib/content/write.js +++ b/node_modules/cacache/lib/content/write.js @@ -36,11 +36,11 @@ function write (cache, data, opts) { } return BB.using(makeTmp(cache, opts), tmp => ( writeFileAsync( - tmp.target, data, {flag: 'wx'} + tmp.target, data, { flag: 'wx' } ).then(() => ( moveToDestination(tmp, cache, sri, opts) )) - )).then(() => ({integrity: sri, size: data.length})) + )).then(() => ({ integrity: sri, size: data.length })) } module.exports.stream = writeStream @@ -111,8 +111,8 @@ function pipeToTmp (inputStream, cache, tmpTarget, opts, errCheck) { }) errCheck() return pipe(inputStream, hashStream, outStream).then(() => { - return {integrity, size} - }, err => { + return { integrity, size } + }).catch(err => { return rimraf(tmpTarget).then(() => { throw err }) }) }) @@ -121,7 +121,7 @@ function pipeToTmp (inputStream, cache, tmpTarget, opts, errCheck) { function makeTmp (cache, opts) { const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) return fixOwner.mkdirfix( - path.dirname(tmpTarget), opts.uid, opts.gid + cache, path.dirname(tmpTarget) ).then(() => ({ target: tmpTarget, moved: false @@ -134,14 +134,14 @@ function moveToDestination (tmp, cache, sri, opts, errCheck) { const destDir = path.dirname(destination) return fixOwner.mkdirfix( - destDir, opts.uid, opts.gid + cache, destDir ).then(() => { errCheck && errCheck() return moveFile(tmp.target, destination) }).then(() => { errCheck && errCheck() tmp.moved = true - return fixOwner.chownr(destination, opts.uid, opts.gid) + return fixOwner.chownr(cache, destination) }) } diff --git a/node_modules/cacache/lib/entry-index.js b/node_modules/cacache/lib/entry-index.js index 43fa7b95b1d0f..dee1824b1d091 100644 --- a/node_modules/cacache/lib/entry-index.js +++ b/node_modules/cacache/lib/entry-index.js @@ -32,9 +32,7 @@ module.exports.NotFoundError = class NotFoundError extends Error { const IndexOpts = figgyPudding({ metadata: {}, - size: {}, - uid: {}, - gid: {} + size: {} }) module.exports.insert = insert @@ -49,7 +47,7 @@ function insert (cache, key, integrity, opts) { metadata: opts.metadata } return fixOwner.mkdirfix( - path.dirname(bucket), opts.uid, opts.gid + cache, path.dirname(bucket) ).then(() => { const stringified = JSON.stringify(entry) // NOTE - Cleverness ahoy! @@ -63,8 +61,8 @@ function insert (cache, key, integrity, opts) { bucket, `\n${hashEntry(stringified)}\t${stringified}` ) }).then( - () => fixOwner.chownr(bucket, opts.uid, opts.gid) - ).catch({code: 'ENOENT'}, () => { + () => fixOwner.chownr(cache, bucket) + ).catch({ code: 'ENOENT' }, () => { // There's a class of race conditions that happen when things get deleted // during fixOwner, or between the two mkdirfix/chownr calls. // @@ -75,10 +73,36 @@ function insert (cache, key, integrity, opts) { }) } +module.exports.insert.sync = insertSync +function insertSync (cache, key, integrity, opts) { + opts = IndexOpts(opts) + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: Date.now(), + size: opts.size, + metadata: opts.metadata + } + fixOwner.mkdirfix.sync(cache, path.dirname(bucket)) + const stringified = JSON.stringify(entry) + fs.appendFileSync( + bucket, `\n${hashEntry(stringified)}\t${stringified}` + ) + try { + fixOwner.chownr.sync(cache, bucket) + } catch (err) { + if (err.code !== 'ENOENT') { + throw err + } + } + return formatEntry(cache, entry) +} + module.exports.find = find function find (cache, key) { const bucket = bucketPath(cache, key) - return bucketEntries(cache, bucket).then(entries => { + return bucketEntries(bucket).then(entries => { return entries.reduce((latest, next) => { if (next && next.key === key) { return formatEntry(cache, next) @@ -95,11 +119,36 @@ function find (cache, key) { }) } +module.exports.find.sync = findSync +function findSync (cache, key) { + const bucket = bucketPath(cache, key) + try { + return bucketEntriesSync(bucket).reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + module.exports.delete = del function del (cache, key, opts) { return insert(cache, key, null, opts) } +module.exports.delete.sync = delSync +function delSync (cache, key, opts) { + return insertSync(cache, key, null, opts) +} + module.exports.lsStream = lsStream function lsStream (cache) { const indexDir = bucketDir(cache) @@ -116,7 +165,6 @@ function lsStream (cache) { // "/cachename///*" return readdirOrEmpty(subbucketPath).map(entry => { const getKeyToEntry = bucketEntries( - cache, path.join(subbucketPath, entry) ).reduce((acc, entry) => { acc.set(entry.key, entry) @@ -128,7 +176,7 @@ function lsStream (cache) { const formatted = formatEntry(cache, entry) formatted && stream.push(formatted) } - }).catch({code: 'ENOENT'}, nop) + }).catch({ code: 'ENOENT' }, nop) }) }) }).then(() => { @@ -152,32 +200,39 @@ function ls (cache) { }) } -function bucketEntries (cache, bucket, filter) { +function bucketEntries (bucket, filter) { return readFileAsync( bucket, 'utf8' - ).then(data => { - let entries = [] - data.split('\n').forEach(entry => { - if (!entry) { return } - const pieces = entry.split('\t') - if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { - // Hash is no good! Corruption or malice? Doesn't matter! - // EJECT EJECT - return - } - let obj - try { - obj = JSON.parse(pieces[1]) - } catch (e) { - // Entry is corrupted! - return - } - if (obj) { - entries.push(obj) - } - }) - return entries + ).then(data => _bucketEntries(data, filter)) +} + +function bucketEntriesSync (bucket, filter) { + const data = fs.readFileSync(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data, filter) { + let entries = [] + data.split('\n').forEach(entry => { + if (!entry) { return } + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (e) { + // Entry is corrupted! + return + } + if (obj) { + entries.push(obj) + } }) + return entries } module.exports._bucketDir = bucketDir @@ -225,8 +280,8 @@ function formatEntry (cache, entry) { function readdirOrEmpty (dir) { return readdirAsync(dir) - .catch({code: 'ENOENT'}, () => []) - .catch({code: 'ENOTDIR'}, () => []) + .catch({ code: 'ENOENT' }, () => []) + .catch({ code: 'ENOTDIR' }, () => []) } function nop () { diff --git a/node_modules/cacache/lib/util/fix-owner.js b/node_modules/cacache/lib/util/fix-owner.js index 7000bff04807a..f5c33db5f0adc 100644 --- a/node_modules/cacache/lib/util/fix-owner.js +++ b/node_modules/cacache/lib/util/fix-owner.js @@ -5,40 +5,124 @@ const BB = require('bluebird') const chownr = BB.promisify(require('chownr')) const mkdirp = BB.promisify(require('mkdirp')) const inflight = require('promise-inflight') +const inferOwner = require('infer-owner') + +// Memoize getuid()/getgid() calls. +// patch process.setuid/setgid to invalidate cached value on change +const self = { uid: null, gid: null } +const getSelf = () => { + if (typeof self.uid !== 'number') { + self.uid = process.getuid() + const setuid = process.setuid + process.setuid = (uid) => { + self.uid = null + process.setuid = setuid + return process.setuid(uid) + } + } + if (typeof self.gid !== 'number') { + self.gid = process.getgid() + const setgid = process.setgid + process.setgid = (gid) => { + self.gid = null + process.setgid = setgid + return process.setgid(gid) + } + } +} module.exports.chownr = fixOwner -function fixOwner (filepath, uid, gid) { +function fixOwner (cache, filepath) { if (!process.getuid) { // This platform doesn't need ownership fixing return BB.resolve() } - if (typeof uid !== 'number' && typeof gid !== 'number') { - // There's no permissions override. Nothing to do here. + + getSelf() + if (self.uid !== 0) { + // almost certainly can't chown anyway return BB.resolve() } - if ((typeof uid === 'number' && process.getuid() === uid) && - (typeof gid === 'number' && process.getgid() === gid)) { + + return BB.resolve(inferOwner(cache)).then(owner => { + const { uid, gid } = owner + // No need to override if it's already what we used. - return BB.resolve() + if (self.uid === uid && self.gid === gid) { + return + } + + return inflight( + 'fixOwner: fixing ownership on ' + filepath, + () => chownr( + filepath, + typeof uid === 'number' ? uid : self.uid, + typeof gid === 'number' ? gid : self.gid + ).catch({ code: 'ENOENT' }, () => null) + ) + }) +} + +module.exports.chownr.sync = fixOwnerSync +function fixOwnerSync (cache, filepath) { + if (!process.getuid) { + // This platform doesn't need ownership fixing + return } - return inflight( - 'fixOwner: fixing ownership on ' + filepath, - () => chownr( + const { uid, gid } = inferOwner.sync(cache) + getSelf() + if (self.uid === uid && self.gid === gid) { + // No need to override if it's already what we used. + return + } + try { + chownr.sync( filepath, - typeof uid === 'number' ? uid : process.getuid(), - typeof gid === 'number' ? gid : process.getgid() - ).catch({code: 'ENOENT'}, () => null) - ) + typeof uid === 'number' ? uid : self.uid, + typeof gid === 'number' ? gid : self.gid + ) + } catch (err) { + // only catch ENOENT, any other error is a problem. + if (err.code === 'ENOENT') { + return null + } + throw err + } } module.exports.mkdirfix = mkdirfix -function mkdirfix (p, uid, gid, cb) { - return mkdirp(p).then(made => { +function mkdirfix (cache, p, cb) { + // we have to infer the owner _before_ making the directory, even though + // we aren't going to use the results, since the cache itself might not + // exist yet. If we mkdirp it, then our current uid/gid will be assumed + // to be correct if it creates the cache folder in the process. + return BB.resolve(inferOwner(cache)).then(() => { + return mkdirp(p).then(made => { + if (made) { + return fixOwner(cache, made).then(() => made) + } + }).catch({ code: 'EEXIST' }, () => { + // There's a race in mkdirp! + return fixOwner(cache, p).then(() => null) + }) + }) +} + +module.exports.mkdirfix.sync = mkdirfixSync +function mkdirfixSync (cache, p) { + try { + inferOwner.sync(cache) + const made = mkdirp.sync(p) if (made) { - return fixOwner(made, uid, gid).then(() => made) + fixOwnerSync(cache, made) + return made } - }).catch({code: 'EEXIST'}, () => { - // There's a race in mkdirp! - return fixOwner(p, uid, gid).then(() => null) - }) + } catch (err) { + if (err.code === 'EEXIST') { + fixOwnerSync(cache, p) + return null + } else { + throw err + } + } } diff --git a/node_modules/cacache/lib/util/tmp.js b/node_modules/cacache/lib/util/tmp.js index 65fc4b297eb42..78494b8eae712 100644 --- a/node_modules/cacache/lib/util/tmp.js +++ b/node_modules/cacache/lib/util/tmp.js @@ -9,16 +9,14 @@ const rimraf = BB.promisify(require('rimraf')) const uniqueFilename = require('unique-filename') const TmpOpts = figgyPudding({ - tmpPrefix: {}, - uid: {}, - gid: {} + tmpPrefix: {} }) module.exports.mkdir = mktmpdir function mktmpdir (cache, opts) { opts = TmpOpts(opts) const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - return fixOwner.mkdirfix(tmpTarget, opts.uid, opts.gid).then(() => { + return fixOwner.mkdirfix(cache, tmpTarget).then(() => { return tmpTarget }) } @@ -34,7 +32,6 @@ function withTmp (cache, opts, cb) { } module.exports.fix = fixtmpdir -function fixtmpdir (cache, opts) { - opts = TmpOpts(opts) - return fixOwner(path.join(cache, 'tmp'), opts.uid, opts.gid) +function fixtmpdir (cache) { + return fixOwner(cache, path.join(cache, 'tmp')) } diff --git a/node_modules/cacache/lib/verify.js b/node_modules/cacache/lib/verify.js index 3468bc6b8ec6f..617d38db12c33 100644 --- a/node_modules/cacache/lib/verify.js +++ b/node_modules/cacache/lib/verify.js @@ -22,9 +22,7 @@ const VerifyOpts = figgyPudding({ filter: {}, log: { default: { silly () {} } - }, - uid: {}, - gid: {} + } }) module.exports = verify @@ -67,9 +65,9 @@ function markEndTime (cache, opts) { function fixPerms (cache, opts) { opts.log.silly('verify', 'fixing cache permissions') - return fixOwner.mkdirfix(cache, opts.uid, opts.gid).then(() => { + return fixOwner.mkdirfix(cache, cache).then(() => { // TODO - fix file permissions too - return fixOwner.chownr(cache, opts.uid, opts.gid) + return fixOwner.chownr(cache, cache) }).then(() => null) } @@ -130,7 +128,7 @@ function garbageCollect (cache, opts) { }) }) } - }, {concurrency: opts.concurrency})) + }, { concurrency: opts.concurrency })) }) }) } @@ -150,7 +148,7 @@ function verifyContent (filepath, sri) { contentInfo.valid = false }) }).then(() => contentInfo) - }).catch({code: 'ENOENT'}, () => ({size: 0, valid: false})) + }).catch({ code: 'ENOENT' }, () => ({ size: 0, valid: false })) } function rebuildIndex (cache, opts) { @@ -183,7 +181,7 @@ function rebuildIndex (cache, opts) { } return BB.map(Object.keys(buckets), key => { return rebuildBucket(cache, buckets[key], stats, opts) - }, {concurrency: opts.concurrency}).then(() => stats) + }, { concurrency: opts.concurrency }).then(() => stats) }) } @@ -195,12 +193,10 @@ function rebuildBucket (cache, bucket, stats, opts) { const content = contentPath(cache, entry.integrity) return fs.statAsync(content).then(() => { return index.insert(cache, entry.key, entry.integrity, { - uid: opts.uid, - gid: opts.gid, metadata: entry.metadata, size: entry.size }).then(() => { stats.totalEntries++ }) - }).catch({code: 'ENOENT'}, () => { + }).catch({ code: 'ENOENT' }, () => { stats.rejectedEntries++ stats.missingContent++ }) @@ -216,7 +212,11 @@ function cleanTmp (cache, opts) { function writeVerifile (cache, opts) { const verifile = path.join(cache, '_lastverified') opts.log.silly('verify', 'writing verifile to ' + verifile) - return fs.writeFileAsync(verifile, '' + (+(new Date()))) + try { + return fs.writeFileAsync(verifile, '' + (+(new Date()))) + } finally { + fixOwner.chownr.sync(cache, verifile) + } } module.exports.lastRun = lastRun diff --git a/node_modules/cacache/locales/en.js b/node_modules/cacache/locales/en.js index 22025cf0e895e..1715fdb53cd3f 100644 --- a/node_modules/cacache/locales/en.js +++ b/node_modules/cacache/locales/en.js @@ -18,12 +18,15 @@ x.ls.stream = cache => ls.stream(cache) x.get = (cache, key, opts) => get(cache, key, opts) x.get.byDigest = (cache, hash, opts) => get.byDigest(cache, hash, opts) +x.get.sync = (cache, key, opts) => get.sync(cache, key, opts) +x.get.sync.byDigest = (cache, key, opts) => get.sync.byDigest(cache, key, opts) x.get.stream = (cache, key, opts) => get.stream(cache, key, opts) x.get.stream.byDigest = (cache, hash, opts) => get.stream.byDigest(cache, hash, opts) x.get.copy = (cache, key, dest, opts) => get.copy(cache, key, dest, opts) x.get.copy.byDigest = (cache, hash, dest, opts) => get.copy.byDigest(cache, hash, dest, opts) x.get.info = (cache, key) => get.info(cache, key) x.get.hasContent = (cache, hash) => get.hasContent(cache, hash) +x.get.hasContent.sync = (cache, hash) => get.hasContent.sync(cache, hash) x.put = (cache, key, data, opts) => put(cache, key, data, opts) x.put.stream = (cache, key, opts) => put.stream(cache, key, opts) diff --git a/node_modules/cacache/locales/en.json b/node_modules/cacache/locales/en.json index 82ecb0832490d..4f145288408ec 100644 --- a/node_modules/cacache/locales/en.json +++ b/node_modules/cacache/locales/en.json @@ -2,5 +2,6 @@ "No cache entry for `%s` found in `%s`": "No cache entry for %s found in %s", "Integrity verification failed for %s (%s)": "Integrity verification failed for %s (%s)", "Bad data size: expected inserted data to be %s bytes, but got %s instead": "Bad data size: expected inserted data to be %s bytes, but got %s instead", - "Cache input stream was empty": "Cache input stream was empty" -} + "Cache input stream was empty": "Cache input stream was empty", + "Integrity check failed:\n Wanted: %s\n Found: %s": "Integrity check failed:\n Wanted: %s\n Found: %s" +} \ No newline at end of file diff --git a/node_modules/cacache/locales/es.js b/node_modules/cacache/locales/es.js index 9a27de6585a23..ac4e4cfe7d2f4 100644 --- a/node_modules/cacache/locales/es.js +++ b/node_modules/cacache/locales/es.js @@ -18,12 +18,15 @@ x.ls.flujo = cache => ls.stream(cache) x.saca = (cache, clave, ops) => get(cache, clave, ops) x.saca.porHacheo = (cache, hacheo, ops) => get.byDigest(cache, hacheo, ops) +x.saca.sinc = (cache, clave, ops) => get.sync(cache, clave, ops) +x.saca.sinc.porHacheo = (cache, hacheo, ops) => get.sync.byDigest(cache, hacheo, ops) x.saca.flujo = (cache, clave, ops) => get.stream(cache, clave, ops) x.saca.flujo.porHacheo = (cache, hacheo, ops) => get.stream.byDigest(cache, hacheo, ops) x.sava.copia = (cache, clave, destino, opts) => get.copy(cache, clave, destino, opts) x.sava.copia.porHacheo = (cache, hacheo, destino, opts) => get.copy.byDigest(cache, hacheo, destino, opts) x.saca.info = (cache, clave) => get.info(cache, clave) x.saca.tieneDatos = (cache, hacheo) => get.hasContent(cache, hacheo) +x.saca.tieneDatos.sinc = (cache, hacheo) => get.hasContent.sync(cache, hacheo) x.mete = (cache, clave, datos, ops) => put(cache, clave, datos, ops) x.mete.flujo = (cache, clave, ops) => put.stream(cache, clave, ops) diff --git a/node_modules/cacache/package.json b/node_modules/cacache/package.json index bf13242f80eb5..aa20092ccc9a1 100644 --- a/node_modules/cacache/package.json +++ b/node_modules/cacache/package.json @@ -1,19 +1,19 @@ { - "_from": "cacache@11.2.0", - "_id": "cacache@11.2.0", + "_from": "cacache@12.0.3", + "_id": "cacache@12.0.3", "_inBundle": false, - "_integrity": "sha512-IFWl6lfK6wSeYCHUXh+N1lY72UDrpyrYQJNIVQf48paDuWbv5RbAtJYf/4gUQFObTCHZwdZ5sI8Iw7nqwP6nlQ==", + "_integrity": "sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw==", "_location": "/cacache", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "cacache@11.2.0", + "raw": "cacache@12.0.3", "name": "cacache", "escapedName": "cacache", - "rawSpec": "11.2.0", + "rawSpec": "12.0.3", "saveSpec": null, - "fetchSpec": "11.2.0" + "fetchSpec": "12.0.3" }, "_requiredBy": [ "#USER", @@ -21,16 +21,16 @@ "/make-fetch-happen", "/pacote" ], - "_resolved": "https://registry.npmjs.org/cacache/-/cacache-11.2.0.tgz", - "_shasum": "617bdc0b02844af56310e411c0878941d5739965", - "_spec": "cacache@11.2.0", - "_where": "/Users/zkat/Documents/code/work/npm", + "_resolved": "https://registry.npmjs.org/cacache/-/cacache-12.0.3.tgz", + "_shasum": "be99abba4e1bf5df461cd5a2c1071fc432573390", + "_spec": "cacache@12.0.3", + "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Kat Marchán", "email": "kzm@sykosomatic.org" }, "bugs": { - "url": "https://github.com/zkat/cacache/issues" + "url": "https://github.com/npm/cacache/issues" }, "bundleDependencies": false, "cache-version": { @@ -56,41 +56,42 @@ } ], "dependencies": { - "bluebird": "^3.5.1", - "chownr": "^1.0.1", - "figgy-pudding": "^3.1.0", - "glob": "^7.1.2", - "graceful-fs": "^4.1.11", - "lru-cache": "^4.1.3", + "bluebird": "^3.5.5", + "chownr": "^1.1.1", + "figgy-pudding": "^3.5.1", + "glob": "^7.1.4", + "graceful-fs": "^4.1.15", + "infer-owner": "^1.0.3", + "lru-cache": "^5.1.1", "mississippi": "^3.0.0", "mkdirp": "^0.5.1", "move-concurrently": "^1.0.1", "promise-inflight": "^1.0.1", - "rimraf": "^2.6.2", - "ssri": "^6.0.0", - "unique-filename": "^1.1.0", + "rimraf": "^2.6.3", + "ssri": "^6.0.1", + "unique-filename": "^1.1.1", "y18n": "^4.0.0" }, "deprecated": false, "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", "devDependencies": { "benchmark": "^2.1.4", - "chalk": "^2.3.2", + "chalk": "^2.4.2", "cross-env": "^5.1.4", - "require-inject": "^1.4.2", - "standard": "^11.0.1", - "standard-version": "^4.4.0", - "tacks": "^1.2.7", - "tap": "^12.0.1", + "require-inject": "^1.4.4", + "standard": "^12.0.1", + "standard-version": "^6.0.1", + "tacks": "^1.3.0", + "tap": "^12.7.0", "weallbehave": "^1.2.0", - "weallcontribute": "^1.0.8" + "weallcontribute": "^1.0.9" }, "files": [ "*.js", "lib", "locales" ], - "homepage": "https://github.com/zkat/cacache#readme", + "homepage": "https://github.com/npm/cacache#readme", "keywords": [ "cache", "caching", @@ -111,7 +112,7 @@ "name": "cacache", "repository": { "type": "git", - "url": "git+https://github.com/zkat/cacache.git" + "url": "git+https://github.com/npm/cacache.git" }, "scripts": { "benchmarks": "node test/benchmarks", @@ -124,5 +125,5 @@ "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "11.2.0" + "version": "12.0.3" } diff --git a/node_modules/cacache/put.js b/node_modules/cacache/put.js index 01b0dd84fc5ad..a400639303a44 100644 --- a/node_modules/cacache/put.js +++ b/node_modules/cacache/put.js @@ -16,10 +16,9 @@ const PutOpts = figgyPudding({ pickAlgorithm: {}, size: {}, tmpPrefix: {}, - uid: {}, - gid: {}, single: {}, sep: {}, + error: {}, strict: {} }) @@ -28,7 +27,7 @@ function putData (cache, key, data, opts) { opts = PutOpts(opts) return write(cache, data, opts).then(res => { return index.insert( - cache, key, res.integrity, opts.concat({size: res.size}) + cache, key, res.integrity, opts.concat({ size: res.size }) ).then(entry => { if (opts.memoize) { memo.put(cache, entry, data, opts) @@ -63,7 +62,7 @@ function putStream (cache, key, opts) { }) }, cb => { contentStream.end(() => { - index.insert(cache, key, integrity, opts.concat({size})).then(entry => { + index.insert(cache, key, integrity, opts.concat({ size })).then(entry => { if (opts.memoize) { memo.put(cache, entry, Buffer.concat(memoData, memoTotal), opts) } diff --git a/node_modules/call-limit/CHANGELOG.md b/node_modules/call-limit/CHANGELOG.md new file mode 100644 index 0000000000000..a6b1df978e11d --- /dev/null +++ b/node_modules/call-limit/CHANGELOG.md @@ -0,0 +1,16 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +### [1.1.1](https://github.com/iarna/call-limit/compare/v1.1.0...v1.1.1) (2019-06-03) + + +### Bug Fixes + +* **call-limit:** Limiter rewrite ([bbd0ea6](https://github.com/iarna/call-limit/commit/bbd0ea6)) +* **test:** Update tests to let/const ([f0b7f98](https://github.com/iarna/call-limit/commit/f0b7f98)) + + +### Tests + +* Test coverage for limited promise based methods ([d5069f4](https://github.com/iarna/call-limit/commit/d5069f4)) diff --git a/node_modules/call-limit/LICENSE b/node_modules/call-limit/LICENSE new file mode 100644 index 0000000000000..216e843abcbac --- /dev/null +++ b/node_modules/call-limit/LICENSE @@ -0,0 +1,13 @@ +Copyright Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/call-limit/README.md b/node_modules/call-limit/README.md index 590ca419ee6cf..62086b68496f3 100644 --- a/node_modules/call-limit/README.md +++ b/node_modules/call-limit/README.md @@ -4,17 +4,17 @@ call-limit Limit the number of simultaneous executions of a async function. ```javascript -var fs = require('fs') -var limit = require('call-limit') -var limitedStat = limit(fs.stat, 5) +const fs = require('fs') +const limit = require('call-limit') +const limitedStat = limit(fs.stat, 5) ``` Or with promise returning functions: ```javascript -var fs = Bluebird.promisifyAll(require('fs')) -var limit = require('call-limit') -var limitedStat = limit.promise(fs.statAsync, 5) +const fs = Bluebird.promisifyAll(require('fs')) +const limit = require('call-limit') +const limitedStat = limit.promise(fs.statAsync, 5) ``` ### USAGE: @@ -22,7 +22,7 @@ var limitedStat = limit.promise(fs.statAsync, 5) Given that: ```javascript -var limit = require('call-limit') +const limit = require('call-limit') ``` ### limit(func, maxRunning) → limitedFunc diff --git a/node_modules/call-limit/call-limit.js b/node_modules/call-limit/call-limit.js index d6e4534b8a2e8..2dc7d279a3a52 100644 --- a/node_modules/call-limit/call-limit.js +++ b/node_modules/call-limit/call-limit.js @@ -1,86 +1,99 @@ -"use strict" +'use strict' -var defaultMaxRunning = 50 +const defaultMaxRunning = 50 -var limit = module.exports = function (func, maxRunning) { - var running = 0 - var queue = [] +const limit = module.exports = function (func, maxRunning) { + const state = {running: 0, queue: []} if (!maxRunning) maxRunning = defaultMaxRunning return function limited () { - var self = this - var args = Array.prototype.slice.call(arguments) - if (running >= maxRunning) { - queue.push({self: this, args: args}) - return + const args = Array.prototype.slice.call(arguments) + if (state.running >= maxRunning) { + state.queue.push({obj: this, args}) + } else { + callFunc(this, args) + } + } + function callNext () { + if (state.queue.length === 0) return + const next = state.queue.shift() + callFunc(next.obj, next.args) + } + function callFunc (obj, args) { + const cb = typeof args[args.length - 1] === 'function' && args.pop() + try { + ++state.running + func.apply(obj, args.concat(function () { + --state.running + process.nextTick(callNext) + if (cb) process.nextTick(() => cb.apply(obj, arguments)) + })) + } catch (err) { + --state.running + if (cb) process.nextTick(() => cb.call(obj, err)) + process.nextTick(callNext) } - var cb = typeof args[args.length-1] === 'function' && args.pop() - ++ running - args.push(function () { - var cbargs = arguments - -- running - cb && process.nextTick(function () { - cb.apply(self, cbargs) - }) - if (queue.length) { - var next = queue.shift() - limited.apply(next.self, next.args) - } - }) - func.apply(self, args) } } module.exports.method = function (classOrObj, method, maxRunning) { if (typeof classOrObj === 'function') { - var func = classOrObj.prototype[method] + const func = classOrObj.prototype[method] classOrObj.prototype[method] = limit(func, maxRunning) } else { - var func = classOrObj[method] + const func = classOrObj[method] classOrObj[method] = limit(func, maxRunning) } } module.exports.promise = function (func, maxRunning) { - var running = 0 - var queue = [] + const state = {running: 0, queue: []} if (!maxRunning) maxRunning = defaultMaxRunning - return function () { - var self = this - var args = Array.prototype.slice.call(arguments) - return new Promise(function (resolve) { - if (running >= maxRunning) { - queue.push({self: self, args: args, resolve: resolve}) - return - } else { - runNext(self, args, resolve) - } - function runNext (self, args, resolve) { - ++ running - resolve( - func.apply(self, args) - .then(finish, function (err) { - finish(err) - throw err - })) - } - - function finish () { - -- running - if (queue.length) { - var next = queue.shift() - process.nextTick(runNext, next.self, next.args, next.resolve) - } - } + return function limited () { + const args = Array.prototype.slice.call(arguments) + if (state.running >= maxRunning) { + return new Promise(resolve => { + state.queue.push({resolve, obj: this, args}) + }) + } else { + return callFunc(this, args) + } + } + function callNext () { + if (state.queue.length === 0) return + const next = state.queue.shift() + next.resolve(callFunc(next.obj, next.args)) + } + function callFunc (obj, args) { + return callFinally(() => { + ++state.running + return func.apply(obj, args) + }, () => { + --state.running + process.nextTick(callNext) }) } + function callFinally (action, fin) { + try { + return Promise.resolve(action()).then(value => { + fin() + return value + }, err => { + fin() + return Promise.reject(err) + }) + } catch (err) { + fin() + return Promise.reject(err) + } + } } module.exports.promise.method = function (classOrObj, method, maxRunning) { if (typeof classOrObj === 'function') { - var func = classOrObj.prototype[method] + const func = classOrObj.prototype[method] classOrObj.prototype[method] = limit.promise(func, maxRunning) } else { - var func = classOrObj[method] + const func = classOrObj[method] classOrObj[method] = limit.promise(func, maxRunning) } } diff --git a/node_modules/call-limit/package.json b/node_modules/call-limit/package.json index e074b244eb367..2e3b264de115b 100644 --- a/node_modules/call-limit/package.json +++ b/node_modules/call-limit/package.json @@ -1,32 +1,28 @@ { - "_args": [ - [ - "call-limit@1.1.0", - "/Users/rebecca/code/npm" - ] - ], - "_from": "call-limit@1.1.0", - "_id": "call-limit@1.1.0", + "_from": "call-limit@1.1.1", + "_id": "call-limit@1.1.1", "_inBundle": false, - "_integrity": "sha1-b9YbA/PaQqLNDsK2DwK9DnGZH+o=", + "_integrity": "sha512-5twvci5b9eRBw2wCfPtN0GmlR2/gadZqyFpPhOK6CvMFoFgA+USnZ6Jpu1lhG9h85pQ3Ouil3PfXWRD4EUaRiQ==", "_location": "/call-limit", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "call-limit@1.1.0", + "raw": "call-limit@1.1.1", "name": "call-limit", "escapedName": "call-limit", - "rawSpec": "1.1.0", + "rawSpec": "1.1.1", "saveSpec": null, - "fetchSpec": "1.1.0" + "fetchSpec": "1.1.1" }, "_requiredBy": [ + "#USER", "/" ], - "_resolved": "https://registry.npmjs.org/call-limit/-/call-limit-1.1.0.tgz", - "_spec": "1.1.0", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/call-limit/-/call-limit-1.1.1.tgz", + "_shasum": "ef15f2670db3f1992557e2d965abc459e6e358d4", + "_spec": "call-limit@1.1.1", + "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Rebecca Turner", "email": "me@re-becca.org" @@ -34,10 +30,16 @@ "bugs": { "url": "https://github.com/iarna/call-limit/issues" }, + "bundleDependencies": false, "dependencies": {}, + "deprecated": false, "description": "Limit the number of simultaneous calls to an async function", "devDependencies": { - "tap": "^1.0.0" + "@iarna/standard": "*", + "standard-version": "*", + "tap": "^14.2.0", + "weallbehave": "*", + "weallcontribute": "*" }, "files": [ "call-limit.js" @@ -51,7 +53,13 @@ "url": "git+https://github.com/iarna/call-limit.git" }, "scripts": { - "test": "tap test/*.js" + "postrelease": "npm publish && git push --follow-tags", + "prerelease": "npm t", + "pretest": "iarna-standard", + "release": "standard-version -s", + "test": "tap test/*.js", + "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", + "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "1.1.0" + "version": "1.1.1" } diff --git a/node_modules/chownr/chownr.js b/node_modules/chownr/chownr.js index ecd7b452df57d..0d40932169654 100644 --- a/node_modules/chownr/chownr.js +++ b/node_modules/chownr/chownr.js @@ -1,52 +1,167 @@ -module.exports = chownr -chownr.sync = chownrSync +'use strict' +const fs = require('fs') +const path = require('path') + +/* istanbul ignore next */ +const LCHOWN = fs.lchown ? 'lchown' : 'chown' +/* istanbul ignore next */ +const LCHOWNSYNC = fs.lchownSync ? 'lchownSync' : 'chownSync' + +/* istanbul ignore next */ +const needEISDIRHandled = fs.lchown && + !process.version.match(/v1[1-9]+\./) && + !process.version.match(/v10\.[6-9]/) + +const lchownSync = (path, uid, gid) => { + try { + return fs[LCHOWNSYNC](path, uid, gid) + } catch (er) { + if (er.code !== 'ENOENT') + throw er + } +} + +/* istanbul ignore next */ +const chownSync = (path, uid, gid) => { + try { + return fs.chownSync(path, uid, gid) + } catch (er) { + if (er.code !== 'ENOENT') + throw er + } +} + +/* istanbul ignore next */ +const handleEISDIR = + needEISDIRHandled ? (path, uid, gid, cb) => er => { + // Node prior to v10 had a very questionable implementation of + // fs.lchown, which would always try to call fs.open on a directory + // Fall back to fs.chown in those cases. + if (!er || er.code !== 'EISDIR') + cb(er) + else + fs.chown(path, uid, gid, cb) + } + : (_, __, ___, cb) => cb + +/* istanbul ignore next */ +const handleEISDirSync = + needEISDIRHandled ? (path, uid, gid) => { + try { + return lchownSync(path, uid, gid) + } catch (er) { + if (er.code !== 'EISDIR') + throw er + chownSync(path, uid, gid) + } + } + : (path, uid, gid) => lchownSync(path, uid, gid) + +// fs.readdir could only accept an options object as of node v6 +const nodeVersion = process.version +let readdir = (path, options, cb) => fs.readdir(path, options, cb) +let readdirSync = (path, options) => fs.readdirSync(path, options) +/* istanbul ignore next */ +if (/^v4\./.test(nodeVersion)) + readdir = (path, options, cb) => fs.readdir(path, cb) + +const chown = (cpath, uid, gid, cb) => { + fs[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, er => { + // Skip ENOENT error + cb(er && er.code !== 'ENOENT' ? er : null) + })) +} + +const chownrKid = (p, child, uid, gid, cb) => { + if (typeof child === 'string') + return fs.lstat(path.resolve(p, child), (er, stats) => { + // Skip ENOENT error + if (er) + return cb(er.code !== 'ENOENT' ? er : null) + stats.name = child + chownrKid(p, stats, uid, gid, cb) + }) -var fs = require("fs") -, path = require("path") - -function chownr (p, uid, gid, cb) { - fs.readdir(p, function (er, children) { - // any error other than ENOTDIR means it's not readable, or - // doesn't exist. give up. - if (er && er.code !== "ENOTDIR") return cb(er) - if (er || !children.length) return fs.chown(p, uid, gid, cb) - - var len = children.length - , errState = null - children.forEach(function (child) { - var pathChild = path.resolve(p, child); - fs.lstat(pathChild, function(er, stats) { - if (er) - return cb(er) - if (!stats.isSymbolicLink()) - chownr(pathChild, uid, gid, then) - else - then() - }) + if (child.isDirectory()) { + chownr(path.resolve(p, child.name), uid, gid, er => { + if (er) + return cb(er) + const cpath = path.resolve(p, child.name) + chown(cpath, uid, gid, cb) }) - function then (er) { - if (errState) return - if (er) return cb(errState = er) - if (-- len === 0) return fs.chown(p, uid, gid, cb) + } else { + const cpath = path.resolve(p, child.name) + chown(cpath, uid, gid, cb) + } +} + + +const chownr = (p, uid, gid, cb) => { + readdir(p, { withFileTypes: true }, (er, children) => { + // any error other than ENOTDIR or ENOTSUP means it's not readable, + // or doesn't exist. give up. + if (er) { + if (er.code === 'ENOENT') + return cb() + else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP') + return cb(er) + } + if (er || !children.length) + return chown(p, uid, gid, cb) + + let len = children.length + let errState = null + const then = er => { + if (errState) + return + if (er) + return cb(errState = er) + if (-- len === 0) + return chown(p, uid, gid, cb) } + + children.forEach(child => chownrKid(p, child, uid, gid, then)) }) } -function chownrSync (p, uid, gid) { - var children +const chownrKidSync = (p, child, uid, gid) => { + if (typeof child === 'string') { + try { + const stats = fs.lstatSync(path.resolve(p, child)) + stats.name = child + child = stats + } catch (er) { + if (er.code === 'ENOENT') + return + else + throw er + } + } + + if (child.isDirectory()) + chownrSync(path.resolve(p, child.name), uid, gid) + + handleEISDirSync(path.resolve(p, child.name), uid, gid) +} + +const chownrSync = (p, uid, gid) => { + let children try { - children = fs.readdirSync(p) + children = readdirSync(p, { withFileTypes: true }) } catch (er) { - if (er && er.code === "ENOTDIR") return fs.chownSync(p, uid, gid) - throw er + if (er.code === 'ENOENT') + return + else if (er.code === 'ENOTDIR' || er.code === 'ENOTSUP') + return handleEISDirSync(p, uid, gid) + else + throw er } - if (!children.length) return fs.chownSync(p, uid, gid) - children.forEach(function (child) { - var pathChild = path.resolve(p, child) - var stats = fs.lstatSync(pathChild) - if (!stats.isSymbolicLink()) - chownrSync(pathChild, uid, gid) - }) - return fs.chownSync(p, uid, gid) + if (children && children.length) + children.forEach(child => chownrKidSync(p, child, uid, gid)) + + return handleEISDirSync(p, uid, gid) } + +module.exports = chownr +chownr.sync = chownrSync diff --git a/node_modules/chownr/package.json b/node_modules/chownr/package.json index f9a67c82435f9..5c125f8447b54 100644 --- a/node_modules/chownr/package.json +++ b/node_modules/chownr/package.json @@ -1,36 +1,32 @@ { - "_args": [ - [ - "chownr@1.0.1", - "/Users/rebecca/code/npm" - ] - ], - "_from": "chownr@1.0.1", - "_id": "chownr@1.0.1", + "_from": "chownr@1.1.4", + "_id": "chownr@1.1.4", "_inBundle": false, - "_integrity": "sha1-4qdQQqlVGQi+vSW4Uj1fl2nXkYE=", + "_integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", "_location": "/chownr", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "chownr@1.0.1", + "raw": "chownr@1.1.4", "name": "chownr", "escapedName": "chownr", - "rawSpec": "1.0.1", + "rawSpec": "1.1.4", "saveSpec": null, - "fetchSpec": "1.0.1" + "fetchSpec": "1.1.4" }, "_requiredBy": [ + "#USER", "/", "/cacache", - "/npm-profile/cacache", - "/npm-registry-fetch/cacache", + "/gentle-fs", + "/pacote", "/tar" ], - "_resolved": "https://registry.npmjs.org/chownr/-/chownr-1.0.1.tgz", - "_spec": "1.0.1", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "_shasum": "6fc9d7b42d32a583596337666e7d08084da2cc6b", + "_spec": "chownr@1.1.4", + "_where": "/Users/darcyclarke/Documents/Repos/npm/cli", "author": { "name": "Isaac Z. Schlueter", "email": "i@izs.me", @@ -39,11 +35,13 @@ "bugs": { "url": "https://github.com/isaacs/chownr/issues" }, + "bundleDependencies": false, + "deprecated": false, "description": "like `chown -R`", "devDependencies": { "mkdirp": "0.3", - "rimraf": "", - "tap": "^1.2.0" + "rimraf": "^2.7.1", + "tap": "^14.10.6" }, "files": [ "chownr.js" @@ -57,7 +55,13 @@ "url": "git://github.com/isaacs/chownr.git" }, "scripts": { - "test": "tap test/*.js" + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preversion": "npm test", + "test": "tap" + }, + "tap": { + "check-coverage": true }, - "version": "1.0.1" + "version": "1.1.4" } diff --git a/node_modules/ci-info/CHANGELOG.md b/node_modules/ci-info/CHANGELOG.md index 859a0ad12a53b..66b9cf0b28e48 100644 --- a/node_modules/ci-info/CHANGELOG.md +++ b/node_modules/ci-info/CHANGELOG.md @@ -1,5 +1,21 @@ # Changelog +## v2.0.0 + +Breaking changes: + +* Drop support for Node.js end-of-life versions: 0.10, 0.12, 4, 5, 7, + and 9 +* Team Foundation Server will now be detected as Azure Pipelines. The + constant `ci.TFS` no longer exists - use `ci.AZURE_PIPELINES` instead +* Remove deprecated `ci.TDDIUM` constant - use `ci.SOLANDO` instead + +New features: + +* feat: support Azure Pipelines ([#23](https://github.com/watson/ci-info/pull/23)) +* feat: support Netlify CI ([#26](https://github.com/watson/ci-info/pull/26)) +* feat: support Bitbucket pipelines PR detection ([#27](https://github.com/watson/ci-info/pull/27)) + ## v1.6.0 * feat: add Sail CI support diff --git a/node_modules/ci-info/README.md b/node_modules/ci-info/README.md index c88be8f82d5df..12c4f6217502c 100644 --- a/node_modules/ci-info/README.md +++ b/node_modules/ci-info/README.md @@ -32,41 +32,42 @@ if (ci.isCI) { Officially supported CI servers: -| Name | Constant | -|------|----------| -| [AWS CodeBuild](https://aws.amazon.com/codebuild/) | `ci.CODEBUILD` | -| [AppVeyor](http://www.appveyor.com) | `ci.APPVEYOR` | -| [Bamboo](https://www.atlassian.com/software/bamboo) by Atlassian | `ci.BAMBOO` | -| [Bitbucket Pipelines](https://bitbucket.org/product/features/pipelines) | `ci.BITBUCKET` | -| [Bitrise](https://www.bitrise.io/) | `ci.BITRISE` | -| [Buddy](https://buddy.works/) | `ci.BUDDY` | -| [Buildkite](https://buildkite.com) | `ci.BUILDKITE` | -| [CircleCI](http://circleci.com) | `ci.CIRCLE` | -| [Cirrus CI](https://cirrus-ci.org) | `ci.CIRRUS` | -| [Codeship](https://codeship.com) | `ci.CODESHIP` | -| [Drone](https://drone.io) | `ci.DRONE` | -| [dsari](https://github.com/rfinnie/dsari) | `ci.DSARI` | -| [GitLab CI](https://about.gitlab.com/gitlab-ci/) | `ci.GITLAB` | -| [GoCD](https://www.go.cd/) | `ci.GOCD` | -| [Hudson](http://hudson-ci.org) | `ci.HUDSON` | -| [Jenkins CI](https://jenkins-ci.org) | `ci.JENKINS` | -| [Magnum CI](https://magnum-ci.com) | `ci.MAGNUM` | -| [Sail CI](https://sail.ci/) | `ci.SAIL` | -| [Semaphore](https://semaphoreci.com) | `ci.SEMAPHORE` | -| [Shippable](https://www.shippable.com/) | `ci.SHIPPABLE` | -| [Solano CI](https://www.solanolabs.com/) | `ci.SOLANO` | -| [Strider CD](https://strider-cd.github.io/) | `ci.STRIDER` | -| [TaskCluster](http://docs.taskcluster.net) | `ci.TASKCLUSTER` | -| [Team Foundation Server](https://www.visualstudio.com/en-us/products/tfs-overview-vs.aspx) by Microsoft | `ci.TFS` | -| [TeamCity](https://www.jetbrains.com/teamcity/) by JetBrains | `ci.TEAMCITY` | -| [Travis CI](http://travis-ci.org) | `ci.TRAVIS` | +| Name | Constant | isPR | +|------|----------|------| +| [AWS CodeBuild](https://aws.amazon.com/codebuild/) | `ci.CODEBUILD` | 🚫 | +| [AppVeyor](http://www.appveyor.com) | `ci.APPVEYOR` | ✅ | +| [Azure Pipelines](https://azure.microsoft.com/en-us/services/devops/pipelines/) | `ci.AZURE_PIPELINES` | ✅ | +| [Bamboo](https://www.atlassian.com/software/bamboo) by Atlassian | `ci.BAMBOO` | 🚫 | +| [Bitbucket Pipelines](https://bitbucket.org/product/features/pipelines) | `ci.BITBUCKET` | ✅ | +| [Bitrise](https://www.bitrise.io/) | `ci.BITRISE` | ✅ | +| [Buddy](https://buddy.works/) | `ci.BUDDY` | ✅ | +| [Buildkite](https://buildkite.com) | `ci.BUILDKITE` | ✅ | +| [CircleCI](http://circleci.com) | `ci.CIRCLE` | ✅ | +| [Cirrus CI](https://cirrus-ci.org) | `ci.CIRRUS` | ✅ | +| [Codeship](https://codeship.com) | `ci.CODESHIP` | 🚫 | +| [Drone](https://drone.io) | `ci.DRONE` | ✅ | +| [dsari](https://github.com/rfinnie/dsari) | `ci.DSARI` | 🚫 | +| [GitLab CI](https://about.gitlab.com/gitlab-ci/) | `ci.GITLAB` | 🚫 | +| [GoCD](https://www.go.cd/) | `ci.GOCD` | 🚫 | +| [Hudson](http://hudson-ci.org) | `ci.HUDSON` | 🚫 | +| [Jenkins CI](https://jenkins-ci.org) | `ci.JENKINS` | ✅ | +| [Magnum CI](https://magnum-ci.com) | `ci.MAGNUM` | 🚫 | +| [Netlify CI](https://www.netlify.com/) | `ci.NETLIFY` | ✅ | +| [Sail CI](https://sail.ci/) | `ci.SAIL` | ✅ | +| [Semaphore](https://semaphoreci.com) | `ci.SEMAPHORE` | ✅ | +| [Shippable](https://www.shippable.com/) | `ci.SHIPPABLE` | ✅ | +| [Solano CI](https://www.solanolabs.com/) | `ci.SOLANO` | ✅ | +| [Strider CD](https://strider-cd.github.io/) | `ci.STRIDER` | 🚫 | +| [TaskCluster](http://docs.taskcluster.net) | `ci.TASKCLUSTER` | 🚫 | +| [TeamCity](https://www.jetbrains.com/teamcity/) by JetBrains | `ci.TEAMCITY` | 🚫 | +| [Travis CI](http://travis-ci.org) | `ci.TRAVIS` | ✅ | ## API ### `ci.name` -A string. Will contain the name of the CI server the code is running on. -If not CI server is detected, it will be `null`. +Returns a string containing name of the CI server the code is running on. +If CI server is not detected, it returns `null`. Don't depend on the value of this string not to change for a specific vendor. If you find your self writing `ci.name === 'Travis CI'`, you @@ -74,8 +75,8 @@ most likely want to use `ci.TRAVIS` instead. ### `ci.isCI` -A boolean. Will be `true` if the code is running on a CI server. -Otherwise `false`. +Returns a boolean. Will be `true` if the code is running on a CI server, +otherwise `false`. Some CI servers not listed here might still trigger the `ci.isCI` boolean to be set to `true` if they use certain vendor neutral @@ -84,15 +85,15 @@ vendor specific boolean will be set to `true`. ### `ci.isPR` -A boolean if PR detection is supported for the current CI server. Will -be `true` if a PR is being tested. Otherwise `false`. If PR detection is +Returns a boolean if PR detection is supported for the current CI server. Will +be `true` if a PR is being tested, otherwise `false`. If PR detection is not supported for the current CI server, the value will be `null`. ### `ci.` -A vendor specific boolean constants is exposed for each support CI +A vendor specific boolean constant is exposed for each support CI vendor. A constant will be `true` if the code is determined to run on -the given CI server. Otherwise `false`. +the given CI server, otherwise `false`. Examples of vendor constants are `ci.TRAVIS` or `ci.APPVEYOR`. For a complete list, see the support table above. diff --git a/node_modules/ci-info/index.js b/node_modules/ci-info/index.js index 27794d49b3f21..9928fee9d34c1 100644 --- a/node_modules/ci-info/index.js +++ b/node_modules/ci-info/index.js @@ -4,7 +4,7 @@ var vendors = require('./vendors.json') var env = process.env -// Used for testinging only +// Used for testing only Object.defineProperty(exports, '_vendors', { value: vendors.map(function (v) { return v.constant }) }) diff --git a/node_modules/ci-info/package.json b/node_modules/ci-info/package.json index ac37a5fcc8555..16e90c59a8603 100644 --- a/node_modules/ci-info/package.json +++ b/node_modules/ci-info/package.json @@ -1,29 +1,28 @@ { - "_from": "ci-info@1.6.0", - "_id": "ci-info@1.6.0", + "_from": "ci-info@2.0.0", + "_id": "ci-info@2.0.0", "_inBundle": false, - "_integrity": "sha512-vsGdkwSCDpWmP80ncATX7iea5DWQemg1UgCW5J8tqjU3lYw4FBYuj89J0CTVomA7BEfvSZd84GmHko+MxFQU2A==", + "_integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==", "_location": "/ci-info", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "ci-info@1.6.0", + "raw": "ci-info@2.0.0", "name": "ci-info", "escapedName": "ci-info", - "rawSpec": "1.6.0", + "rawSpec": "2.0.0", "saveSpec": null, - "fetchSpec": "1.6.0" + "fetchSpec": "2.0.0" }, "_requiredBy": [ "#USER", - "/", - "/is-ci" + "/" ], - "_resolved": "https://registry.npmjs.org/ci-info/-/ci-info-1.6.0.tgz", - "_shasum": "2ca20dbb9ceb32d4524a683303313f0304b1e497", - "_spec": "ci-info@1.6.0", - "_where": "/Users/zkat/Documents/code/work/npm", + "_resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", + "_shasum": "67a9e964be31a51e15e5010d58e6f12834002f46", + "_spec": "ci-info@2.0.0", + "_where": "/Users/aeschright/code/cli", "author": { "name": "Thomas Watson Steen", "email": "w@tson.dk", @@ -34,8 +33,8 @@ }, "bundleDependencies": false, "coordinates": [ - 55.778271, - 12.593091 + 55.778231, + 12.593179 ], "dependencies": {}, "deprecated": false, @@ -63,5 +62,5 @@ "scripts": { "test": "standard && node test.js" }, - "version": "1.6.0" + "version": "2.0.0" } diff --git a/node_modules/ci-info/vendors.json b/node_modules/ci-info/vendors.json index a157b78cea4af..266a724ab2464 100644 --- a/node_modules/ci-info/vendors.json +++ b/node_modules/ci-info/vendors.json @@ -5,6 +5,12 @@ "env": "APPVEYOR", "pr": "APPVEYOR_PULL_REQUEST_NUMBER" }, + { + "name": "Azure Pipelines", + "constant": "AZURE_PIPELINES", + "env": "SYSTEM_TEAMFOUNDATIONCOLLECTIONURI", + "pr": "SYSTEM_PULLREQUEST_PULLREQUESTID" + }, { "name": "Bamboo", "constant": "BAMBOO", @@ -13,7 +19,8 @@ { "name": "Bitbucket Pipelines", "constant": "BITBUCKET", - "env": "BITBUCKET_COMMIT" + "env": "BITBUCKET_COMMIT", + "pr": "BITBUCKET_PR_ID" }, { "name": "Bitrise", @@ -92,6 +99,12 @@ "constant": "MAGNUM", "env": "MAGNUM" }, + { + "name": "Netlify CI", + "constant": "NETLIFY", + "env": "NETLIFY_BUILD_BASE", + "pr": { "env": "PULL_REQUEST", "ne": "false" } + }, { "name": "Sail CI", "constant": "SAIL", @@ -126,23 +139,11 @@ "constant": "TASKCLUSTER", "env": ["TASK_ID", "RUN_ID"] }, - { - "name": "Solano CI", - "constant": "TDDIUM", - "env": "TDDIUM", - "pr": "TDDIUM_PR_ID", - "deprecated": true - }, { "name": "TeamCity", "constant": "TEAMCITY", "env": "TEAMCITY_VERSION" }, - { - "name": "Team Foundation Server", - "constant": "TFS", - "env": "TF_BUILD" - }, { "name": "Travis CI", "constant": "TRAVIS", diff --git a/node_modules/cidr-regex/index.js b/node_modules/cidr-regex/index.js index 190cefa7bf7ca..f141ce14ad0dd 100644 --- a/node_modules/cidr-regex/index.js +++ b/node_modules/cidr-regex/index.js @@ -5,9 +5,9 @@ const ipRegex = require("ip-regex"); const v4 = ipRegex.v4().source + "\\/(3[0-2]|[12]?[0-9])"; const v6 = ipRegex.v6().source + "\\/(12[0-8]|1[01][0-9]|[1-9]?[0-9])"; -const ip = module.exports = opts => opts && opts.exact ? +const cidr = module.exports = opts => opts && opts.exact ? new RegExp(`(?:^${v4}$)|(?:^${v6}$)`) : new RegExp(`(?:${v4})|(?:${v6})`, "g"); -ip.v4 = opts => opts && opts.exact ? new RegExp(`^${v4}$`) : new RegExp(v4, "g"); -ip.v6 = opts => opts && opts.exact ? new RegExp(`^${v6}$`) : new RegExp(v6, "g"); +cidr.v4 = opts => opts && opts.exact ? new RegExp(`^${v4}$`) : new RegExp(v4, "g"); +cidr.v6 = opts => opts && opts.exact ? new RegExp(`^${v6}$`) : new RegExp(v6, "g"); diff --git a/node_modules/cidr-regex/package.json b/node_modules/cidr-regex/package.json index 3aa8b4032c534..4dd1196c17dc0 100644 --- a/node_modules/cidr-regex/package.json +++ b/node_modules/cidr-regex/package.json @@ -1,32 +1,27 @@ { - "_args": [ - [ - "cidr-regex@2.0.9", - "/Users/rebecca/code/npm" - ] - ], - "_from": "cidr-regex@2.0.9", - "_id": "cidr-regex@2.0.9", + "_from": "cidr-regex@^2.0.10", + "_id": "cidr-regex@2.0.10", "_inBundle": false, - "_integrity": "sha512-F7/fBRUU45FnvSPjXdpIrc++WRSBdCiSTlyq4ZNhLKOlHFNWgtzZ0Fd+zrqI/J1j0wmlx/f5ZQDmD2GcbrNcmw==", + "_integrity": "sha512-sB3ogMQXWvreNPbJUZMRApxuRYd+KoIo4RGQ81VatjmMW6WJPo+IJZ2846FGItr9VzKo5w7DXzijPLGtSd0N3Q==", "_location": "/cidr-regex", "_phantomChildren": {}, "_requested": { - "type": "version", + "type": "range", "registry": true, - "raw": "cidr-regex@2.0.9", + "raw": "cidr-regex@^2.0.10", "name": "cidr-regex", "escapedName": "cidr-regex", - "rawSpec": "2.0.9", + "rawSpec": "^2.0.10", "saveSpec": null, - "fetchSpec": "2.0.9" + "fetchSpec": "^2.0.10" }, "_requiredBy": [ "/is-cidr" ], - "_resolved": "https://registry.npmjs.org/cidr-regex/-/cidr-regex-2.0.9.tgz", - "_spec": "2.0.9", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/cidr-regex/-/cidr-regex-2.0.10.tgz", + "_shasum": "af13878bd4ad704de77d6dc800799358b3afa70d", + "_spec": "cidr-regex@^2.0.10", + "_where": "/Users/aeschright/code/cli/node_modules/is-cidr", "author": { "name": "silverwind", "email": "me@silverwind.io" @@ -34,6 +29,7 @@ "bugs": { "url": "https://github.com/silverwind/cidr-regex/issues" }, + "bundleDependencies": false, "contributors": [ { "name": "Felipe Apostol", @@ -44,12 +40,13 @@ "dependencies": { "ip-regex": "^2.1.0" }, + "deprecated": false, "description": "Regular expression for matching IP addresses in CIDR notation", "devDependencies": { - "ava": "^0.25.0", - "eslint": "^4.19.1", - "eslint-config-silverwind": "^1.0.42", - "updates": "^3.0.0" + "eslint": "^5.6.0", + "eslint-config-silverwind": "^2.0.9", + "updates": "^4.3.0", + "ver": "^2.0.1" }, "engines": { "node": ">=4" @@ -77,5 +74,5 @@ "scripts": { "test": "make test" }, - "version": "2.0.9" + "version": "2.0.10" } diff --git a/node_modules/cli-table3/CHANGELOG.md b/node_modules/cli-table3/CHANGELOG.md index 62eb485bdf93d..3f6ba35f8393e 100644 --- a/node_modules/cli-table3/CHANGELOG.md +++ b/node_modules/cli-table3/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +## v0.5.1 (2018-07-19) + +#### :rocket: Enhancement +* [#21](https://github.com/cli-table/cli-table3/pull/21) Import type definition from `@types/cli-table2` ([@Turbo87](https://github.com/Turbo87)) + +#### Committers: 1 +- Tobias Bieniek ([Turbo87](https://github.com/Turbo87)) + + ## v0.5.0 (2018-06-11) #### :boom: Breaking Change @@ -18,10 +27,9 @@ * [#3](https://github.com/cli-table/cli-table3/pull/3) Add `yarn.lock` file. ([@Turbo87](https://github.com/Turbo87)) * [#1](https://github.com/cli-table/cli-table3/pull/1) Skip broken test. ([@Turbo87](https://github.com/Turbo87)) -#### Committers: 3 +#### Committers: 2 - Daniel Ruf ([DanielRuf](https://github.com/DanielRuf)) - Tobias Bieniek ([Turbo87](https://github.com/Turbo87)) -- [dependabot[bot]](https://github.com/apps/dependabot) ## v0.4.0 (2018-06-10) diff --git a/node_modules/cli-table3/index.d.ts b/node_modules/cli-table3/index.d.ts new file mode 100644 index 0000000000000..bdf17e270ea84 --- /dev/null +++ b/node_modules/cli-table3/index.d.ts @@ -0,0 +1,95 @@ +declare namespace CliTable3 { + type CharName = + "top" | + "top-mid" | + "top-left" | + "top-right" | + "bottom" | + "bottom-mid" | + "bottom-left" | + "bottom-right" | + "left" | + "left-mid" | + "mid" | + "mid-mid" | + "right" | + "right-mid" | + "middle"; + + type HorizontalAlignment = "left" | "center" | "right"; + type VerticalAlignment = "top" | "center" | "bottom"; + + interface TableOptions { + truncate: string; + colWidths: Array; + rowHeights: Array; + colAligns: HorizontalAlignment[]; + rowAligns: VerticalAlignment[]; + head: string[]; + wordWrap: boolean; + } + + interface TableInstanceOptions extends TableOptions { + chars: Record; + style: { + "padding-left": number; + "padding-right": number; + head: string[]; + border: string[]; + compact: boolean; + }; + } + + interface TableConstructorOptions extends Partial { + chars?: Partial>; + style?: Partial; + } + + type CellValue = boolean | number | string | null | undefined; + + interface CellOptions { + content: CellValue; + chars?: Partial>; + truncate?: string; + colSpan?: number; + rowSpan?: number; + hAlign?: HorizontalAlignment; + vAlign?: VerticalAlignment; + style?: { + "padding-left"?: number; + "padding-right"?: number; + head?: string[]; + border?: string[]; + }; + } + + interface GenericTable extends Array { + options: TableInstanceOptions; + readonly width: number; + } + + type Table = HorizontalTable | VerticalTable | CrossTable; + type Cell = CellValue | CellOptions; + + type HorizontalTable = GenericTable; + type HorizontalTableRow = Cell[]; + + type VerticalTable = GenericTable; + interface VerticalTableRow { + [name: string]: Cell; + } + + type CrossTable = GenericTable; + interface CrossTableRow { + [name: string]: Cell[]; + } +} + +interface CliTable3 { + new (options?: CliTable3.TableConstructorOptions): CliTable3.Table; + readonly prototype: CliTable3.Table; +} + +declare const CliTable3: CliTable3; + +export = CliTable3; diff --git a/node_modules/cli-table3/package.json b/node_modules/cli-table3/package.json index d0545c93f7df6..6ee81dc3f1b78 100644 --- a/node_modules/cli-table3/package.json +++ b/node_modules/cli-table3/package.json @@ -1,28 +1,29 @@ { - "_from": "cli-table3", - "_id": "cli-table3@0.5.0", + "_from": "cli-table3@0.5.1", + "_id": "cli-table3@0.5.1", "_inBundle": false, - "_integrity": "sha512-c7YHpUyO1SaKaO7kYtxd5NZ8FjAmSK3LpKkuzdwn+2CwpFxBpdoQLm+OAnnCfoEl7onKhN9PKQi1lsHuAIUqGQ==", + "_integrity": "sha512-7Qg2Jrep1S/+Q3EceiZtQcDPWxhAvBw+ERf1162v4sikJrvojMHFqXt8QIVha8UlH9rgU0BeWPytZ9/TzYqlUw==", "_location": "/cli-table3", "_phantomChildren": {}, "_requested": { - "type": "tag", + "type": "version", "registry": true, - "raw": "cli-table3", + "raw": "cli-table3@0.5.1", "name": "cli-table3", "escapedName": "cli-table3", - "rawSpec": "", + "rawSpec": "0.5.1", "saveSpec": null, - "fetchSpec": "latest" + "fetchSpec": "0.5.1" }, "_requiredBy": [ "#USER", - "/" + "/", + "/npm-audit-report" ], - "_resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.5.0.tgz", - "_shasum": "adb2f025715f4466e67629783c8d73e9030eb4bd", - "_spec": "cli-table3", - "_where": "/Users/zkat/Documents/code/work/npm", + "_resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.5.1.tgz", + "_shasum": "0252372d94dfc40dbd8df06005f48f31f656f202", + "_spec": "cli-table3@0.5.1", + "_where": "/Users/aeschright/code/cli", "author": { "name": "James Talmage" }, @@ -54,8 +55,8 @@ "eslint-plugin-prettier": "^2.6.0", "jest": "^23.1.0", "jest-runner-eslint": "^0.6.0", - "lerna-changelog": "^0.7.0", - "prettier": "1.13.5" + "lerna-changelog": "^0.8.0", + "prettier": "1.13.7" }, "directories": { "test": "test" @@ -65,6 +66,7 @@ }, "files": [ "src/", + "index.d.ts", "index.js" ], "homepage": "https://github.com/cli-table/cli-table3", @@ -124,5 +126,6 @@ "test": "jest --color", "test:watch": "jest --color --watchAll --notify" }, - "version": "0.5.0" + "types": "index.d.ts", + "version": "0.5.1" } diff --git a/node_modules/cmd-shim/.npmignore b/node_modules/cmd-shim/.npmignore deleted file mode 100644 index 699b5d4f13693..0000000000000 --- a/node_modules/cmd-shim/.npmignore +++ /dev/null @@ -1,16 +0,0 @@ -lib-cov -*.seed -*.log -*.csv -*.dat -*.out -*.pid -*.gz - -pids -logs -results - -npm-debug.log - -node_modules diff --git a/node_modules/cmd-shim/.travis.yml b/node_modules/cmd-shim/.travis.yml deleted file mode 100644 index 2ca91f28954d7..0000000000000 --- a/node_modules/cmd-shim/.travis.yml +++ /dev/null @@ -1,4 +0,0 @@ -language: node_js -node_js: - - "0.10" - - "0.8" \ No newline at end of file diff --git a/node_modules/cmd-shim/LICENSE b/node_modules/cmd-shim/LICENSE index 0c44ae716db8f..20a4762540923 100644 --- a/node_modules/cmd-shim/LICENSE +++ b/node_modules/cmd-shim/LICENSE @@ -1,27 +1,15 @@ -Copyright (c) Isaac Z. Schlueter ("Author") -All rights reserved. +The ISC License -The BSD License +Copyright (c) npm, Inc. and Contributors -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS -BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR -BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, -WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE -OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN -IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/cmd-shim/README.md b/node_modules/cmd-shim/README.md index ff6745f8e5970..2b8b2f468f2c3 100644 --- a/node_modules/cmd-shim/README.md +++ b/node_modules/cmd-shim/README.md @@ -5,8 +5,8 @@ since symlinks are not suitable for this purpose there. On Unix systems, you should use a symbolic link instead. -[![Build Status](https://img.shields.io/travis/ForbesLindesay/cmd-shim/master.svg)](https://travis-ci.org/ForbesLindesay/cmd-shim) -[![Dependency Status](https://img.shields.io/david/ForbesLindesay/cmd-shim.svg)](https://david-dm.org/ForbesLindesay/cmd-shim) +[![Build Status](https://img.shields.io/travis/npm/cmd-shim/master.svg)](https://travis-ci.org/npm/cmd-shim) +[![Dependency Status](https://img.shields.io/david/npm/cmd-shim.svg)](https://david-dm.org/npm/cmd-shim) [![NPM version](https://img.shields.io/npm/v/cmd-shim.svg)](https://www.npmjs.com/package/cmd-shim) ## Installation diff --git a/node_modules/cmd-shim/index.js b/node_modules/cmd-shim/index.js index 9f22e103a5fe8..010bd38d398d2 100644 --- a/node_modules/cmd-shim/index.js +++ b/node_modules/cmd-shim/index.js @@ -6,7 +6,7 @@ // "#! " // // Write a binroot/pkg.bin + ".cmd" file that has this line in it: -// @ %~dp0 %* +// @ %dp0% %* module.exports = cmdShim cmdShim.ifExists = cmdShimIfExists @@ -15,7 +15,8 @@ var fs = require("graceful-fs") var mkdir = require("mkdirp") , path = require("path") - , shebangExpr = /^#\!\s*(?:\/usr\/bin\/env)?\s*([^ \t]+)(.*)$/ + , toBatchSyntax = require("./lib/to-batch-syntax") + , shebangExpr = /^#\!\s*(?:\/usr\/bin\/env)?\s*([^ \t]+=[^ \t]+\s+)*\s*([^ \t]+)(.*)$/ function cmdShimIfExists (from, to, cb) { fs.stat(from, function (er) { @@ -42,9 +43,10 @@ function cmdShim (from, to, cb) { } function cmdShim_ (from, to, cb) { - var then = times(2, next, cb) + var then = times(3, next, cb) rm(to, then) rm(to + ".cmd", then) + rm(to + ".ps1", then) function next(er) { writeShim(from, to, cb) @@ -60,63 +62,99 @@ function writeShim (from, to, cb) { if (er) return cb(er) fs.readFile(from, "utf8", function (er, data) { - if (er) return writeShim_(from, to, null, null, cb) + if (er) return writeShim_(from, to, null, null, null, cb) var firstLine = data.trim().split(/\r*\n/)[0] , shebang = firstLine.match(shebangExpr) - if (!shebang) return writeShim_(from, to, null, null, cb) - var prog = shebang[1] - , args = shebang[2] || "" - return writeShim_(from, to, prog, args, cb) + if (!shebang) return writeShim_(from, to, null, null, null, cb) + var vars = shebang[1] || "" + , prog = shebang[2] + , args = shebang[3] || "" + return writeShim_(from, to, prog, args, vars, cb) }) }) } -function writeShim_ (from, to, prog, args, cb) { + +function writeShim_ (from, to, prog, args, variables, cb) { var shTarget = path.relative(path.dirname(to), from) , target = shTarget.split("/").join("\\") , longProg , shProg = prog && prog.split("\\").join("/") , shLongProg + , pwshProg = shProg && "\"" + shProg + "$exe\"" + , pwshLongProg shTarget = shTarget.split("\\").join("/") args = args || "" + variables = variables || "" if (!prog) { - prog = "\"%~dp0\\" + target + "\"" + prog = "\"%dp0%\\" + target + "\"" shProg = "\"$basedir/" + shTarget + "\"" + pwshProg = shProg args = "" target = "" shTarget = "" } else { - longProg = "\"%~dp0\\" + prog + ".exe\"" + longProg = "\"%dp0%\\" + prog + ".exe\"" shLongProg = "\"$basedir/" + prog + "\"" - target = "\"%~dp0\\" + target + "\"" + pwshLongProg = "\"$basedir/" + prog + "$exe\"" + target = "\"%dp0%\\" + target + "\"" shTarget = "\"$basedir/" + shTarget + "\"" } - // @IF EXIST "%~dp0\node.exe" ( - // "%~dp0\node.exe" "%~dp0\.\node_modules\npm\bin\npm-cli.js" %* + // @SETLOCAL + // @CALL :find_dp0 + // + // @IF EXIST "%dp0%\node.exe" ( + // @SET "_prog=%dp0%\node.exe" // ) ELSE ( - // SETLOCAL - // SET PATHEXT=%PATHEXT:;.JS;=;% - // node "%~dp0\.\node_modules\npm\bin\npm-cli.js" %* + // @SET "_prog=node" + // @SET PATHEXT=%PATHEXT:;.JS;=;% // ) + // + // "%_prog%" "%dp0%\.\node_modules\npm\bin\npm-cli.js" %* + // @ENDLOCAL + // @EXIT /b %errorlevel% + // + // :find_dp0 + // SET dp0=%~dp0 + // EXIT /b + // + // Subroutine trick to fix https://github.com/npm/cmd-shim/issues/10 + var head = '@ECHO off\r\n' + + 'SETLOCAL\r\n' + + 'CALL :find_dp0\r\n' + var foot = 'ENDLOCAL\r\n' + + 'EXIT /b %errorlevel%\r\n' + + ':find_dp0\r\n' + + 'SET dp0=%~dp0\r\n' + + 'EXIT /b\r\n' + var cmd if (longProg) { - cmd = "@IF EXIST " + longProg + " (\r\n" - + " " + longProg + " " + args + " " + target + " %*\r\n" + shLongProg = shLongProg.trim(); + args = args.trim(); + var variableDeclarationsAsBatch = toBatchSyntax.convertToSetCommands(variables) + cmd = head + + variableDeclarationsAsBatch + + "\r\n" + + "IF EXIST " + longProg + " (\r\n" + + " SET \"_prog=" + longProg.replace(/(^")|("$)/g, '') + "\"\r\n" + ") ELSE (\r\n" - + " @SETLOCAL\r\n" - + " @SET PATHEXT=%PATHEXT:;.JS;=;%\r\n" - + " " + prog + " " + args + " " + target + " %*\r\n" - + ")" + + " SET \"_prog=" + prog.replace(/(^")|("$)/g, '') + "\"\r\n" + + " SET PATHEXT=%PATHEXT:;.JS;=;%\r\n" + + ")\r\n" + + "\r\n" + + "\"%_prog%\" " + args + " " + target + " %*\r\n" + + foot } else { - cmd = "@" + prog + " " + args + " " + target + " %*\r\n" + cmd = head + prog + " " + args + " " + target + " %*\r\n" + foot } // #!/bin/sh // basedir=`dirname "$0"` // // case `uname` in - // *CYGWIN*) basedir=`cygpath -w "$basedir"`;; + // *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; // esac // // if [ -x "$basedir/node.exe" ]; then @@ -130,30 +168,76 @@ function writeShim_ (from, to, prog, args, cb) { var sh = "#!/bin/sh\n" - if (shLongProg) { - sh = sh - + "basedir=$(dirname \"$(echo \"$0\" | sed -e 's,\\\\,/,g')\")\n" - + "\n" - + "case `uname` in\n" - + " *CYGWIN*) basedir=`cygpath -w \"$basedir\"`;;\n" - + "esac\n" - + "\n" + sh = sh + + "basedir=$(dirname \"$(echo \"$0\" | sed -e 's,\\\\,/,g')\")\n" + + "\n" + + "case `uname` in\n" + + " *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w \"$basedir\"`;;\n" + + "esac\n" + + "\n" + if (shLongProg) { sh = sh + "if [ -x "+shLongProg+" ]; then\n" - + " " + shLongProg + " " + args + " " + shTarget + " \"$@\"\n" + + " " + variables + shLongProg + " " + args + " " + shTarget + " \"$@\"\n" + " ret=$?\n" + "else \n" - + " " + shProg + " " + args + " " + shTarget + " \"$@\"\n" + + " " + variables + shProg + " " + args + " " + shTarget + " \"$@\"\n" + " ret=$?\n" + "fi\n" + "exit $ret\n" } else { - sh = shProg + " " + args + " " + shTarget + " \"$@\"\n" + sh = sh + + shProg + " " + args + " " + shTarget + " \"$@\"\n" + "exit $?\n" } - var then = times(2, next, cb) + // #!/usr/bin/env pwsh + // $basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + // + // $ret=0 + // $exe = "" + // if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + // # Fix case when both the Windows and Linux builds of Node + // # are installed in the same directory + // $exe = ".exe" + // } + // if (Test-Path "$basedir/node") { + // & "$basedir/node$exe" "$basedir/node_modules/npm/bin/npm-cli.js" $args + // $ret=$LASTEXITCODE + // } else { + // & "node$exe" "$basedir/node_modules/npm/bin/npm-cli.js" $args + // $ret=$LASTEXITCODE + // } + // exit $ret + var pwsh = "#!/usr/bin/env pwsh\n" + + "$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent\n" + + "\n" + + "$exe=\"\"\n" + + "if ($PSVersionTable.PSVersion -lt \"6.0\" -or $IsWindows) {\n" + + " # Fix case when both the Windows and Linux builds of Node\n" + + " # are installed in the same directory\n" + + " $exe=\".exe\"\n" + + "}\n" + if (shLongProg) { + pwsh = pwsh + + "$ret=0\n" + + "if (Test-Path " + pwshLongProg + ") {\n" + + " & " + pwshLongProg + " " + args + " " + shTarget + " $args\n" + + " $ret=$LASTEXITCODE\n" + + "} else {\n" + + " & " + pwshProg + " " + args + " " + shTarget + " $args\n" + + " $ret=$LASTEXITCODE\n" + + "}\n" + + "exit $ret\n" + } else { + pwsh = pwsh + + "& " + pwshProg + " " + args + " " + shTarget + " $args\n" + + "exit $LASTEXITCODE\n" + } + + var then = times(3, next, cb) + fs.writeFile(to + ".ps1", pwsh, "utf8", then) fs.writeFile(to + ".cmd", cmd, "utf8", then) fs.writeFile(to, sh, "utf8", then) function next () { @@ -162,9 +246,10 @@ function writeShim_ (from, to, prog, args, cb) { } function chmodShim (to, cb) { - var then = times(2, cb, cb) - fs.chmod(to, 0755, then) - fs.chmod(to + ".cmd", 0755, then) + var then = times(3, cb, cb) + fs.chmod(to, "0755", then) + fs.chmod(to + ".cmd", "0755", then) + fs.chmod(to + ".ps1", "0755", then) } function times(n, ok, cb) { diff --git a/node_modules/cmd-shim/lib/to-batch-syntax.js b/node_modules/cmd-shim/lib/to-batch-syntax.js new file mode 100644 index 0000000000000..734be551d2568 --- /dev/null +++ b/node_modules/cmd-shim/lib/to-batch-syntax.js @@ -0,0 +1,51 @@ +exports.replaceDollarWithPercentPair = replaceDollarWithPercentPair +exports.convertToSetCommand = convertToSetCommand +exports.convertToSetCommands = convertToSetCommands + +function convertToSetCommand(key, value) { + var line = "" + key = key || "" + key = key.trim() + value = value || "" + value = value.trim() + if(key && value && value.length > 0) { + line = "@SET " + key + "=" + replaceDollarWithPercentPair(value) + "\r\n" + } + return line +} + +function extractVariableValuePairs(declarations) { + var pairs = {} + declarations.map(function(declaration) { + var split = declaration.split("=") + pairs[split[0]]=split[1] + }) + return pairs +} + +function convertToSetCommands(variableString) { + var variableValuePairs = extractVariableValuePairs(variableString.split(" ")) + var variableDeclarationsAsBatch = "" + Object.keys(variableValuePairs).forEach(function (key) { + variableDeclarationsAsBatch += convertToSetCommand(key, variableValuePairs[key]) + }) + return variableDeclarationsAsBatch +} + +function replaceDollarWithPercentPair(value) { + var dollarExpressions = /\$\{?([^\$@#\?\- \t{}:]+)\}?/g + var result = "" + var startIndex = 0 + do { + var match = dollarExpressions.exec(value) + if(match) { + var betweenMatches = value.substring(startIndex, match.index) || "" + result += betweenMatches + "%" + match[1] + "%" + startIndex = dollarExpressions.lastIndex + } + } while (dollarExpressions.lastIndex > 0) + result += value.substr(startIndex) + return result +} + + diff --git a/node_modules/cmd-shim/package.json b/node_modules/cmd-shim/package.json index 97e73e46fe133..43a7b36a8fdd0 100644 --- a/node_modules/cmd-shim/package.json +++ b/node_modules/cmd-shim/package.json @@ -1,54 +1,60 @@ { - "_args": [ - [ - "cmd-shim@2.0.2", - "/Users/rebecca/code/npm" - ] - ], - "_from": "cmd-shim@2.0.2", - "_id": "cmd-shim@2.0.2", + "_from": "cmd-shim@3.0.3", + "_id": "cmd-shim@3.0.3", "_inBundle": false, - "_integrity": "sha1-b8vamUg6j9FdfTChlspp1oii79s=", + "_integrity": "sha512-DtGg+0xiFhQIntSBRzL2fRQBnmtAVwXIDo4Qq46HPpObYquxMaZS4sb82U9nH91qJrlosC1wa9gwr0QyL/HypA==", "_location": "/cmd-shim", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "cmd-shim@2.0.2", + "raw": "cmd-shim@3.0.3", "name": "cmd-shim", "escapedName": "cmd-shim", - "rawSpec": "2.0.2", + "rawSpec": "3.0.3", "saveSpec": null, - "fetchSpec": "2.0.2" + "fetchSpec": "3.0.3" }, "_requiredBy": [ + "#USER", "/", "/bin-links" ], - "_resolved": "https://registry.npmjs.org/cmd-shim/-/cmd-shim-2.0.2.tgz", - "_spec": "2.0.2", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/cmd-shim/-/cmd-shim-3.0.3.tgz", + "_shasum": "2c35238d3df37d98ecdd7d5f6b8dc6b21cadc7cb", + "_spec": "cmd-shim@3.0.3", + "_where": "/Users/isaacs/dev/npm/cli", "bugs": { - "url": "https://github.com/ForbesLindesay/cmd-shim/issues" + "url": "https://github.com/npm/cmd-shim/issues" }, + "bundleDependencies": false, "dependencies": { "graceful-fs": "^4.1.2", "mkdirp": "~0.5.0" }, + "deprecated": false, "description": "Used in npm for command line application support", "devDependencies": { "rimraf": "~2.2.8", - "tap": "~0.4.11" + "tap": "^12.7.0" }, - "homepage": "https://github.com/ForbesLindesay/cmd-shim#readme", - "license": "BSD-2-Clause", + "files": [ + "index.js", + "lib" + ], + "homepage": "https://github.com/npm/cmd-shim#readme", + "license": "ISC", "name": "cmd-shim", "repository": { "type": "git", - "url": "git+https://github.com/ForbesLindesay/cmd-shim.git" + "url": "git+https://github.com/npm/cmd-shim.git" }, "scripts": { - "test": "tap test/*.js" + "postpublish": "git push origin --follow-tags", + "postversion": "npm publish", + "preversion": "npm test", + "snap": "TAP_SNAPSHOT=1 tap test/*.js --100", + "test": "tap test/*.js --100" }, - "version": "2.0.2" + "version": "3.0.3" } diff --git a/node_modules/cmd-shim/test/00-setup.js b/node_modules/cmd-shim/test/00-setup.js deleted file mode 100644 index 04ec2b256b135..0000000000000 --- a/node_modules/cmd-shim/test/00-setup.js +++ /dev/null @@ -1,34 +0,0 @@ -var test = require('tap').test -var mkdirp = require('mkdirp') -var fs = require('fs') -var path = require('path') -var fixtures = path.resolve(__dirname, 'fixtures') - -var froms = { - 'from.exe': 'exe', - 'from.env': '#!/usr/bin/env node\nconsole.log(/hi/)\n', - 'from.env.args': '#!/usr/bin/env node --expose_gc\ngc()\n', - 'from.sh': '#!/usr/bin/sh\necho hi\n', - 'from.sh.args': '#!/usr/bin/sh -x\necho hi\n' -} - -var cmdShim = require('../') - -test('create fixture', function (t) { - mkdirp(fixtures, function (er) { - if (er) - throw er - t.pass('made dir') - Object.keys(froms).forEach(function (f) { - t.test('write ' + f, function (t) { - fs.writeFile(path.resolve(fixtures, f), froms[f], function (er) { - if (er) - throw er - t.pass('wrote ' + f) - t.end() - }) - }) - }) - t.end() - }) -}) diff --git a/node_modules/cmd-shim/test/basic.js b/node_modules/cmd-shim/test/basic.js deleted file mode 100755 index 09823158b865a..0000000000000 --- a/node_modules/cmd-shim/test/basic.js +++ /dev/null @@ -1,175 +0,0 @@ -var test = require('tap').test -var mkdirp = require('mkdirp') -var fs = require('fs') -var path = require('path') -var fixtures = path.resolve(__dirname, 'fixtures') - -var cmdShim = require('../') - -test('no shebang', function (t) { - var from = path.resolve(fixtures, 'from.exe') - var to = path.resolve(fixtures, 'exe.shim') - cmdShim(from, to, function(er) { - if (er) - throw er - t.equal(fs.readFileSync(to, 'utf8'), - "\"$basedir/from.exe\" \"$@\"\nexit $?\n") - t.equal(fs.readFileSync(to + '.cmd', 'utf8'), - "@\"%~dp0\\from.exe\" %*\r\n") - t.end() - }) -}) - -test('env shebang', function (t) { - var from = path.resolve(fixtures, 'from.env') - var to = path.resolve(fixtures, 'env.shim') - cmdShim(from, to, function(er) { - if (er) - throw er - console.error('%j', fs.readFileSync(to, 'utf8')) - console.error('%j', fs.readFileSync(to + '.cmd', 'utf8')) - - t.equal(fs.readFileSync(to, 'utf8'), - "#!/bin/sh"+ - "\nbasedir=$(dirname \"$(echo \"$0\" | sed -e 's,\\\\,/,g')\")"+ - "\n"+ - "\ncase `uname` in"+ - "\n *CYGWIN*) basedir=`cygpath -w \"$basedir\"`;;"+ - "\nesac"+ - "\n"+ - "\nif [ -x \"$basedir/node\" ]; then"+ - "\n \"$basedir/node\" \"$basedir/from.env\" \"$@\""+ - "\n ret=$?"+ - "\nelse "+ - "\n node \"$basedir/from.env\" \"$@\""+ - "\n ret=$?"+ - "\nfi"+ - "\nexit $ret"+ - "\n") - t.equal(fs.readFileSync(to + '.cmd', 'utf8'), - "@IF EXIST \"%~dp0\\node.exe\" (\r"+ - "\n \"%~dp0\\node.exe\" \"%~dp0\\from.env\" %*\r"+ - "\n) ELSE (\r"+ - "\n @SETLOCAL\r"+ - "\n @SET PATHEXT=%PATHEXT:;.JS;=;%\r"+ - "\n node \"%~dp0\\from.env\" %*\r"+ - "\n)") - t.end() - }) -}) - -test('env shebang with args', function (t) { - var from = path.resolve(fixtures, 'from.env.args') - var to = path.resolve(fixtures, 'env.args.shim') - cmdShim(from, to, function(er) { - if (er) - throw er - console.error('%j', fs.readFileSync(to, 'utf8')) - console.error('%j', fs.readFileSync(to + '.cmd', 'utf8')) - - t.equal(fs.readFileSync(to, 'utf8'), - "#!/bin/sh"+ - "\nbasedir=$(dirname \"$(echo \"$0\" | sed -e 's,\\\\,/,g')\")"+ - "\n"+ - "\ncase `uname` in"+ - "\n *CYGWIN*) basedir=`cygpath -w \"$basedir\"`;;"+ - "\nesac"+ - "\n"+ - "\nif [ -x \"$basedir/node\" ]; then"+ - "\n \"$basedir/node\" --expose_gc \"$basedir/from.env.args\" \"$@\""+ - "\n ret=$?"+ - "\nelse "+ - "\n node --expose_gc \"$basedir/from.env.args\" \"$@\""+ - "\n ret=$?"+ - "\nfi"+ - "\nexit $ret"+ - "\n") - t.equal(fs.readFileSync(to + '.cmd', 'utf8'), - "@IF EXIST \"%~dp0\\node.exe\" (\r"+ - "\n \"%~dp0\\node.exe\" --expose_gc \"%~dp0\\from.env.args\" %*\r"+ - "\n) ELSE (\r"+ - "\n @SETLOCAL\r"+ - "\n @SET PATHEXT=%PATHEXT:;.JS;=;%\r"+ - "\n node --expose_gc \"%~dp0\\from.env.args\" %*\r"+ - "\n)") - t.end() - }) -}) - -test('explicit shebang', function (t) { - var from = path.resolve(fixtures, 'from.sh') - var to = path.resolve(fixtures, 'sh.shim') - cmdShim(from, to, function(er) { - if (er) - throw er - console.error('%j', fs.readFileSync(to, 'utf8')) - console.error('%j', fs.readFileSync(to + '.cmd', 'utf8')) - - t.equal(fs.readFileSync(to, 'utf8'), - "#!/bin/sh" + - "\nbasedir=$(dirname \"$(echo \"$0\" | sed -e 's,\\\\,/,g')\")" + - "\n" + - "\ncase `uname` in" + - "\n *CYGWIN*) basedir=`cygpath -w \"$basedir\"`;;" + - "\nesac" + - "\n" + - "\nif [ -x \"$basedir//usr/bin/sh\" ]; then" + - "\n \"$basedir//usr/bin/sh\" \"$basedir/from.sh\" \"$@\"" + - "\n ret=$?" + - "\nelse " + - "\n /usr/bin/sh \"$basedir/from.sh\" \"$@\"" + - "\n ret=$?" + - "\nfi" + - "\nexit $ret" + - "\n") - - t.equal(fs.readFileSync(to + '.cmd', 'utf8'), - "@IF EXIST \"%~dp0\\/usr/bin/sh.exe\" (\r" + - "\n \"%~dp0\\/usr/bin/sh.exe\" \"%~dp0\\from.sh\" %*\r" + - "\n) ELSE (\r" + - "\n @SETLOCAL\r"+ - "\n @SET PATHEXT=%PATHEXT:;.JS;=;%\r"+ - "\n /usr/bin/sh \"%~dp0\\from.sh\" %*\r" + - "\n)") - t.end() - }) -}) - -test('explicit shebang with args', function (t) { - var from = path.resolve(fixtures, 'from.sh.args') - var to = path.resolve(fixtures, 'sh.args.shim') - cmdShim(from, to, function(er) { - if (er) - throw er - console.error('%j', fs.readFileSync(to, 'utf8')) - console.error('%j', fs.readFileSync(to + '.cmd', 'utf8')) - - t.equal(fs.readFileSync(to, 'utf8'), - "#!/bin/sh" + - "\nbasedir=$(dirname \"$(echo \"$0\" | sed -e 's,\\\\,/,g')\")" + - "\n" + - "\ncase `uname` in" + - "\n *CYGWIN*) basedir=`cygpath -w \"$basedir\"`;;" + - "\nesac" + - "\n" + - "\nif [ -x \"$basedir//usr/bin/sh\" ]; then" + - "\n \"$basedir//usr/bin/sh\" -x \"$basedir/from.sh.args\" \"$@\"" + - "\n ret=$?" + - "\nelse " + - "\n /usr/bin/sh -x \"$basedir/from.sh.args\" \"$@\"" + - "\n ret=$?" + - "\nfi" + - "\nexit $ret" + - "\n") - - t.equal(fs.readFileSync(to + '.cmd', 'utf8'), - "@IF EXIST \"%~dp0\\/usr/bin/sh.exe\" (\r" + - "\n \"%~dp0\\/usr/bin/sh.exe\" -x \"%~dp0\\from.sh.args\" %*\r" + - "\n) ELSE (\r" + - "\n @SETLOCAL\r"+ - "\n @SET PATHEXT=%PATHEXT:;.JS;=;%\r"+ - "\n /usr/bin/sh -x \"%~dp0\\from.sh.args\" %*\r" + - "\n)") - t.end() - }) -}) diff --git a/node_modules/cmd-shim/test/zz-cleanup.js b/node_modules/cmd-shim/test/zz-cleanup.js deleted file mode 100644 index 9425031001cb0..0000000000000 --- a/node_modules/cmd-shim/test/zz-cleanup.js +++ /dev/null @@ -1,13 +0,0 @@ -var test = require('tap').test -var path = require('path') -var fixtures = path.resolve(__dirname, 'fixtures') -var rimraf = require('rimraf') - -test('cleanup', function(t) { - rimraf(fixtures, function(er) { - if (er) - throw er - t.pass('cleaned up') - t.end() - }) -}) diff --git a/node_modules/colors/LICENSE b/node_modules/colors/LICENSE index 3de4e33b48242..17880ff02972b 100644 --- a/node_modules/colors/LICENSE +++ b/node_modules/colors/LICENSE @@ -1,3 +1,5 @@ +MIT License + Original Library - Copyright (c) Marak Squires @@ -20,4 +22,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. \ No newline at end of file +THE SOFTWARE. diff --git a/node_modules/colors/README.md b/node_modules/colors/README.md index 0326aab340af4..4bebb6c92b073 100644 --- a/node_modules/colors/README.md +++ b/node_modules/colors/README.md @@ -1,4 +1,10 @@ -# colors.js [![Build Status](https://travis-ci.org/Marak/colors.js.svg?branch=master)](https://travis-ci.org/Marak/colors.js) +# colors.js +[![Build Status](https://travis-ci.org/Marak/colors.js.svg?branch=master)](https://travis-ci.org/Marak/colors.js) +[![version](https://img.shields.io/npm/v/colors.svg)](https://www.npmjs.org/package/colors) +[![dependencies](https://david-dm.org/Marak/colors.js.svg)](https://david-dm.org/Marak/colors.js) +[![devDependencies](https://david-dm.org/Marak/colors.js/dev-status.svg)](https://david-dm.org/Marak/colors.js#info=devDependencies) + +Please check out the [roadmap](ROADMAP.md) for upcoming features and releases. Please open Issues to provide feedback, and check the `develop` branch for the latest bleeding-edge updates. ## get color and style in your node.js console @@ -163,7 +169,7 @@ console.log(colors.warn("this is a warning")); ``` -You can also combine them: +### Combining Colors ```javascript var colors = require('colors'); diff --git a/node_modules/colors/examples/normal-usage.js b/node_modules/colors/examples/normal-usage.js index 2818741e1f977..cc8d05ff4f23a 100644 --- a/node_modules/colors/examples/normal-usage.js +++ b/node_modules/colors/examples/normal-usage.js @@ -1,34 +1,36 @@ var colors = require('../lib/index'); -console.log("First some yellow text".yellow); +console.log('First some yellow text'.yellow); -console.log("Underline that text".yellow.underline); +console.log('Underline that text'.yellow.underline); -console.log("Make it bold and red".red.bold); +console.log('Make it bold and red'.red.bold); -console.log(("Double Raindows All Day Long").rainbow) +console.log(('Double Raindows All Day Long').rainbow); -console.log("Drop the bass".trap) +console.log('Drop the bass'.trap); -console.log("DROP THE RAINBOW BASS".trap.rainbow) +console.log('DROP THE RAINBOW BASS'.trap.rainbow); +// styles not widely supported +console.log('Chains are also cool.'.bold.italic.underline.red); -console.log('Chains are also cool.'.bold.italic.underline.red); // styles not widely supported - -console.log('So '.green + 'are'.underline + ' ' + 'inverse'.inverse + ' styles! '.yellow.bold); // styles not widely supported -console.log("Zebras are so fun!".zebra); +// styles not widely supported +console.log('So '.green + 'are'.underline + ' ' + 'inverse'.inverse + + ' styles! '.yellow.bold); +console.log('Zebras are so fun!'.zebra); // // Remark: .strikethrough may not work with Mac OS Terminal App // -console.log("This is " + "not".strikethrough + " fun."); +console.log('This is ' + 'not'.strikethrough + ' fun.'); -console.log('Background color attack!'.black.bgWhite) -console.log('Use random styles on everything!'.random) -console.log('America, Heck Yeah!'.america) +console.log('Background color attack!'.black.bgWhite); +console.log('Use random styles on everything!'.random); +console.log('America, Heck Yeah!'.america); -console.log('Setting themes is useful') +console.log('Setting themes is useful'); // // Custom themes @@ -45,30 +47,35 @@ colors.setTheme({ help: 'cyan', warn: 'yellow', debug: 'blue', - error: 'red' + error: 'red', }); // outputs red text -console.log("this is an error".error); +console.log('this is an error'.error); // outputs yellow text -console.log("this is a warning".warn); +console.log('this is a warning'.warn); // outputs grey text -console.log("this is an input".input); +console.log('this is an input'.input); console.log('Generic logging theme as file'.green.bold.underline); // Load a theme from file -colors.setTheme(__dirname + '/../themes/generic-logging.js'); +try { + colors.setTheme(require(__dirname + '/../themes/generic-logging.js')); +} catch (err) { + console.log(err); +} // outputs red text -console.log("this is an error".error); +console.log('this is an error'.error); // outputs yellow text -console.log("this is a warning".warn); +console.log('this is a warning'.warn); // outputs grey text -console.log("this is an input".input); +console.log('this is an input'.input); + +// console.log("Don't summon".zalgo) -//console.log("Don't summon".zalgo) \ No newline at end of file diff --git a/node_modules/colors/examples/safe-string.js b/node_modules/colors/examples/safe-string.js index 111b363a4a7a4..98994873520ff 100644 --- a/node_modules/colors/examples/safe-string.js +++ b/node_modules/colors/examples/safe-string.js @@ -1,41 +1,43 @@ var colors = require('../safe'); -console.log(colors.yellow("First some yellow text")); +console.log(colors.yellow('First some yellow text')); -console.log(colors.yellow.underline("Underline that text")); +console.log(colors.yellow.underline('Underline that text')); -console.log(colors.red.bold("Make it bold and red")); +console.log(colors.red.bold('Make it bold and red')); -console.log(colors.rainbow("Double Raindows All Day Long")) +console.log(colors.rainbow('Double Raindows All Day Long')); -console.log(colors.trap("Drop the bass")) +console.log(colors.trap('Drop the bass')); -console.log(colors.rainbow(colors.trap("DROP THE RAINBOW BASS"))); +console.log(colors.rainbow(colors.trap('DROP THE RAINBOW BASS'))); -console.log(colors.bold.italic.underline.red('Chains are also cool.')); // styles not widely supported +// styles not widely supported +console.log(colors.bold.italic.underline.red('Chains are also cool.')); +// styles not widely supported +console.log(colors.green('So ') + colors.underline('are') + ' ' + + colors.inverse('inverse') + colors.yellow.bold(' styles! ')); -console.log(colors.green('So ') + colors.underline('are') + ' ' + colors.inverse('inverse') + colors.yellow.bold(' styles! ')); // styles not widely supported +console.log(colors.zebra('Zebras are so fun!')); -console.log(colors.zebra("Zebras are so fun!")); - -console.log("This is " + colors.strikethrough("not") + " fun."); +console.log('This is ' + colors.strikethrough('not') + ' fun.'); console.log(colors.black.bgWhite('Background color attack!')); -console.log(colors.random('Use random styles on everything!')) +console.log(colors.random('Use random styles on everything!')); console.log(colors.america('America, Heck Yeah!')); -console.log('Setting themes is useful') +console.log('Setting themes is useful'); // // Custom themes // -//console.log('Generic logging theme as JSON'.green.bold.underline); +// console.log('Generic logging theme as JSON'.green.bold.underline); // Load theme with JSON literal colors.setTheme({ silly: 'rainbow', - input: 'grey', + input: 'blue', verbose: 'cyan', prompt: 'grey', info: 'green', @@ -43,34 +45,33 @@ colors.setTheme({ help: 'cyan', warn: 'yellow', debug: 'blue', - error: 'red' + error: 'red', }); // outputs red text -console.log(colors.error("this is an error")); +console.log(colors.error('this is an error')); // outputs yellow text -console.log(colors.warn("this is a warning")); +console.log(colors.warn('this is a warning')); -// outputs grey text -console.log(colors.input("this is an input")); +// outputs blue text +console.log(colors.input('this is an input')); // console.log('Generic logging theme as file'.green.bold.underline); // Load a theme from file -colors.setTheme(__dirname + '/../themes/generic-logging.js'); +colors.setTheme(require(__dirname + '/../themes/generic-logging.js')); // outputs red text -console.log(colors.error("this is an error")); +console.log(colors.error('this is an error')); // outputs yellow text -console.log(colors.warn("this is a warning")); +console.log(colors.warn('this is a warning')); // outputs grey text -console.log(colors.input("this is an input")); +console.log(colors.input('this is an input')); // console.log(colors.zalgo("Don't summon him")) - diff --git a/node_modules/colors/index.d.ts b/node_modules/colors/index.d.ts new file mode 100644 index 0000000000000..baa70686535a7 --- /dev/null +++ b/node_modules/colors/index.d.ts @@ -0,0 +1,136 @@ +// Type definitions for Colors.js 1.2 +// Project: https://github.com/Marak/colors.js +// Definitions by: Bart van der Schoor , Staffan Eketorp +// Definitions: https://github.com/Marak/colors.js + +export interface Color { + (text: string): string; + + strip: Color; + stripColors: Color; + + black: Color; + red: Color; + green: Color; + yellow: Color; + blue: Color; + magenta: Color; + cyan: Color; + white: Color; + gray: Color; + grey: Color; + + bgBlack: Color; + bgRed: Color; + bgGreen: Color; + bgYellow: Color; + bgBlue: Color; + bgMagenta: Color; + bgCyan: Color; + bgWhite: Color; + + reset: Color; + bold: Color; + dim: Color; + italic: Color; + underline: Color; + inverse: Color; + hidden: Color; + strikethrough: Color; + + rainbow: Color; + zebra: Color; + america: Color; + trap: Color; + random: Color; + zalgo: Color; +} + +export function enable(): void; +export function disable(): void; +export function setTheme(theme: any): void; + +export let enabled: boolean; + +export const strip: Color; +export const stripColors: Color; + +export const black: Color; +export const red: Color; +export const green: Color; +export const yellow: Color; +export const blue: Color; +export const magenta: Color; +export const cyan: Color; +export const white: Color; +export const gray: Color; +export const grey: Color; + +export const bgBlack: Color; +export const bgRed: Color; +export const bgGreen: Color; +export const bgYellow: Color; +export const bgBlue: Color; +export const bgMagenta: Color; +export const bgCyan: Color; +export const bgWhite: Color; + +export const reset: Color; +export const bold: Color; +export const dim: Color; +export const italic: Color; +export const underline: Color; +export const inverse: Color; +export const hidden: Color; +export const strikethrough: Color; + +export const rainbow: Color; +export const zebra: Color; +export const america: Color; +export const trap: Color; +export const random: Color; +export const zalgo: Color; + +declare global { + interface String { + strip: string; + stripColors: string; + + black: string; + red: string; + green: string; + yellow: string; + blue: string; + magenta: string; + cyan: string; + white: string; + gray: string; + grey: string; + + bgBlack: string; + bgRed: string; + bgGreen: string; + bgYellow: string; + bgBlue: string; + bgMagenta: string; + bgCyan: string; + bgWhite: string; + + reset: string; + // @ts-ignore + bold: string; + dim: string; + italic: string; + underline: string; + inverse: string; + hidden: string; + strikethrough: string; + + rainbow: string; + zebra: string; + america: string; + trap: string; + random: string; + zalgo: string; + } +} diff --git a/node_modules/colors/lib/colors.js b/node_modules/colors/lib/colors.js index 790ffd43ef515..7ca90fa903616 100644 --- a/node_modules/colors/lib/colors.js +++ b/node_modules/colors/lib/colors.js @@ -2,7 +2,7 @@ The MIT License (MIT) -Original Library +Original Library - Copyright (c) Marak Squires Additional functionality @@ -33,35 +33,45 @@ module['exports'] = colors; colors.themes = {}; +var util = require('util'); var ansiStyles = colors.styles = require('./styles'); var defineProps = Object.defineProperties; +var newLineRegex = new RegExp(/[\r\n]+/g); -colors.supportsColor = require('./system/supports-colors'); +colors.supportsColor = require('./system/supports-colors').supportsColor; -if (typeof colors.enabled === "undefined") { - colors.enabled = colors.supportsColor; +if (typeof colors.enabled === 'undefined') { + colors.enabled = colors.supportsColor() !== false; } -colors.stripColors = colors.strip = function(str){ - return ("" + str).replace(/\x1B\[\d+m/g, ''); +colors.enable = function() { + colors.enabled = true; }; +colors.disable = function() { + colors.enabled = false; +}; + +colors.stripColors = colors.strip = function(str) { + return ('' + str).replace(/\x1B\[\d+m/g, ''); +}; -var stylize = colors.stylize = function stylize (str, style) { +// eslint-disable-next-line no-unused-vars +var stylize = colors.stylize = function stylize(str, style) { if (!colors.enabled) { return str+''; } return ansiStyles[style].open + str + ansiStyles[style].close; -} +}; var matchOperatorsRe = /[|\\{}()[\]^$+*?.]/g; -var escapeStringRegexp = function (str) { +var escapeStringRegexp = function(str) { if (typeof str !== 'string') { throw new TypeError('Expected a string'); } - return str.replace(matchOperatorsRe, '\\$&'); -} + return str.replace(matchOperatorsRe, '\\$&'); +}; function build(_styles) { var builder = function builder() { @@ -74,15 +84,16 @@ function build(_styles) { return builder; } -var styles = (function () { +var styles = (function() { var ret = {}; ansiStyles.grey = ansiStyles.gray; - Object.keys(ansiStyles).forEach(function (key) { - ansiStyles[key].closeRe = new RegExp(escapeStringRegexp(ansiStyles[key].close), 'g'); + Object.keys(ansiStyles).forEach(function(key) { + ansiStyles[key].closeRe = + new RegExp(escapeStringRegexp(ansiStyles[key].close), 'g'); ret[key] = { - get: function () { + get: function() { return build(this._styles.concat(key)); - } + }, }; }); return ret; @@ -91,78 +102,81 @@ var styles = (function () { var proto = defineProps(function colors() {}, styles); function applyStyle() { - var args = arguments; - var argsLen = args.length; - var str = argsLen !== 0 && String(arguments[0]); - if (argsLen > 1) { - for (var a = 1; a < argsLen; a++) { - str += ' ' + args[a]; + var args = Array.prototype.slice.call(arguments); + + var str = args.map(function(arg) { + if (arg !== undefined && arg.constructor === String) { + return arg; + } else { + return util.inspect(arg); } - } + }).join(' '); if (!colors.enabled || !str) { return str; } + var newLinesPresent = str.indexOf('\n') != -1; + var nestedStyles = this._styles; var i = nestedStyles.length; while (i--) { var code = ansiStyles[nestedStyles[i]]; str = code.open + str.replace(code.closeRe, code.open) + code.close; + if (newLinesPresent) { + str = str.replace(newLineRegex, function(match) { + return code.close + match + code.open; + }); + } } return str; } -function applyTheme (theme) { +colors.setTheme = function(theme) { + if (typeof theme === 'string') { + console.log('colors.setTheme now only accepts an object, not a string. ' + + 'If you are trying to set a theme from a file, it is now your (the ' + + 'caller\'s) responsibility to require the file. The old syntax ' + + 'looked like colors.setTheme(__dirname + ' + + '\'/../themes/generic-logging.js\'); The new syntax looks like '+ + 'colors.setTheme(require(__dirname + ' + + '\'/../themes/generic-logging.js\'));'); + return; + } for (var style in theme) { - (function(style){ - colors[style] = function(str){ - if (typeof theme[style] === 'object'){ + (function(style) { + colors[style] = function(str) { + if (typeof theme[style] === 'object') { var out = str; - for (var i in theme[style]){ + for (var i in theme[style]) { out = colors[theme[style][i]](out); } return out; } return colors[theme[style]](str); }; - })(style) - } -} - -colors.setTheme = function (theme) { - if (typeof theme === 'string') { - try { - colors.themes[theme] = require(theme); - applyTheme(colors.themes[theme]); - return colors.themes[theme]; - } catch (err) { - console.log(err); - return err; - } - } else { - applyTheme(theme); + })(style); } }; function init() { var ret = {}; - Object.keys(styles).forEach(function (name) { + Object.keys(styles).forEach(function(name) { ret[name] = { - get: function () { + get: function() { return build([name]); - } + }, }; }); return ret; } -var sequencer = function sequencer (map, str) { - var exploded = str.split(""), i = 0; +var sequencer = function sequencer(map, str) { + var exploded = str.split(''); exploded = exploded.map(map); - return exploded.join(""); + return exploded.join(''); }; // custom formatter methods @@ -171,17 +185,17 @@ colors.zalgo = require('./custom/zalgo'); // maps colors.maps = {}; -colors.maps.america = require('./maps/america'); -colors.maps.zebra = require('./maps/zebra'); -colors.maps.rainbow = require('./maps/rainbow'); -colors.maps.random = require('./maps/random') +colors.maps.america = require('./maps/america')(colors); +colors.maps.zebra = require('./maps/zebra')(colors); +colors.maps.rainbow = require('./maps/rainbow')(colors); +colors.maps.random = require('./maps/random')(colors); for (var map in colors.maps) { - (function(map){ - colors[map] = function (str) { + (function(map) { + colors[map] = function(str) { return sequencer(colors.maps[map], str); - } - })(map) + }; + })(map); } -defineProps(colors, init()); \ No newline at end of file +defineProps(colors, init()); diff --git a/node_modules/colors/lib/custom/trap.js b/node_modules/colors/lib/custom/trap.js index 3f0914373817f..fbccf88dede0b 100644 --- a/node_modules/colors/lib/custom/trap.js +++ b/node_modules/colors/lib/custom/trap.js @@ -1,45 +1,46 @@ -module['exports'] = function runTheTrap (text, options) { - var result = ""; - text = text || "Run the trap, drop the bass"; +module['exports'] = function runTheTrap(text, options) { + var result = ''; + text = text || 'Run the trap, drop the bass'; text = text.split(''); var trap = { - a: ["\u0040", "\u0104", "\u023a", "\u0245", "\u0394", "\u039b", "\u0414"], - b: ["\u00df", "\u0181", "\u0243", "\u026e", "\u03b2", "\u0e3f"], - c: ["\u00a9", "\u023b", "\u03fe"], - d: ["\u00d0", "\u018a", "\u0500" , "\u0501" ,"\u0502", "\u0503"], - e: ["\u00cb", "\u0115", "\u018e", "\u0258", "\u03a3", "\u03be", "\u04bc", "\u0a6c"], - f: ["\u04fa"], - g: ["\u0262"], - h: ["\u0126", "\u0195", "\u04a2", "\u04ba", "\u04c7", "\u050a"], - i: ["\u0f0f"], - j: ["\u0134"], - k: ["\u0138", "\u04a0", "\u04c3", "\u051e"], - l: ["\u0139"], - m: ["\u028d", "\u04cd", "\u04ce", "\u0520", "\u0521", "\u0d69"], - n: ["\u00d1", "\u014b", "\u019d", "\u0376", "\u03a0", "\u048a"], - o: ["\u00d8", "\u00f5", "\u00f8", "\u01fe", "\u0298", "\u047a", "\u05dd", "\u06dd", "\u0e4f"], - p: ["\u01f7", "\u048e"], - q: ["\u09cd"], - r: ["\u00ae", "\u01a6", "\u0210", "\u024c", "\u0280", "\u042f"], - s: ["\u00a7", "\u03de", "\u03df", "\u03e8"], - t: ["\u0141", "\u0166", "\u0373"], - u: ["\u01b1", "\u054d"], - v: ["\u05d8"], - w: ["\u0428", "\u0460", "\u047c", "\u0d70"], - x: ["\u04b2", "\u04fe", "\u04fc", "\u04fd"], - y: ["\u00a5", "\u04b0", "\u04cb"], - z: ["\u01b5", "\u0240"] - } - text.forEach(function(c){ + a: ['\u0040', '\u0104', '\u023a', '\u0245', '\u0394', '\u039b', '\u0414'], + b: ['\u00df', '\u0181', '\u0243', '\u026e', '\u03b2', '\u0e3f'], + c: ['\u00a9', '\u023b', '\u03fe'], + d: ['\u00d0', '\u018a', '\u0500', '\u0501', '\u0502', '\u0503'], + e: ['\u00cb', '\u0115', '\u018e', '\u0258', '\u03a3', '\u03be', '\u04bc', + '\u0a6c'], + f: ['\u04fa'], + g: ['\u0262'], + h: ['\u0126', '\u0195', '\u04a2', '\u04ba', '\u04c7', '\u050a'], + i: ['\u0f0f'], + j: ['\u0134'], + k: ['\u0138', '\u04a0', '\u04c3', '\u051e'], + l: ['\u0139'], + m: ['\u028d', '\u04cd', '\u04ce', '\u0520', '\u0521', '\u0d69'], + n: ['\u00d1', '\u014b', '\u019d', '\u0376', '\u03a0', '\u048a'], + o: ['\u00d8', '\u00f5', '\u00f8', '\u01fe', '\u0298', '\u047a', '\u05dd', + '\u06dd', '\u0e4f'], + p: ['\u01f7', '\u048e'], + q: ['\u09cd'], + r: ['\u00ae', '\u01a6', '\u0210', '\u024c', '\u0280', '\u042f'], + s: ['\u00a7', '\u03de', '\u03df', '\u03e8'], + t: ['\u0141', '\u0166', '\u0373'], + u: ['\u01b1', '\u054d'], + v: ['\u05d8'], + w: ['\u0428', '\u0460', '\u047c', '\u0d70'], + x: ['\u04b2', '\u04fe', '\u04fc', '\u04fd'], + y: ['\u00a5', '\u04b0', '\u04cb'], + z: ['\u01b5', '\u0240'], + }; + text.forEach(function(c) { c = c.toLowerCase(); - var chars = trap[c] || [" "]; + var chars = trap[c] || [' ']; var rand = Math.floor(Math.random() * chars.length); - if (typeof trap[c] !== "undefined") { + if (typeof trap[c] !== 'undefined') { result += trap[c][rand]; } else { result += c; } }); return result; - -} +}; diff --git a/node_modules/colors/lib/custom/zalgo.js b/node_modules/colors/lib/custom/zalgo.js index 1538c3b49388c..0ef2b01195635 100644 --- a/node_modules/colors/lib/custom/zalgo.js +++ b/node_modules/colors/lib/custom/zalgo.js @@ -1,8 +1,8 @@ // please no module['exports'] = function zalgo(text, options) { - text = text || " he is here "; + text = text || ' he is here '; var soul = { - "up" : [ + 'up': [ '̍', '̎', '̄', '̅', '̿', '̑', '̆', '̐', '͒', '͗', '͑', '̇', @@ -15,9 +15,9 @@ module['exports'] = function zalgo(text, options) { 'ͦ', 'ͧ', 'ͨ', 'ͩ', 'ͪ', 'ͫ', 'ͬ', 'ͭ', 'ͮ', 'ͯ', '̾', '͛', - '͆', '̚' + '͆', '̚', ], - "down" : [ + 'down': [ '̖', '̗', '̘', '̙', '̜', '̝', '̞', '̟', '̠', '̤', '̥', '̦', @@ -27,70 +27,75 @@ module['exports'] = function zalgo(text, options) { '̺', '̻', '̼', 'ͅ', '͇', '͈', '͉', '͍', '͎', '͓', '͔', '͕', - '͖', '͙', '͚', '̣' + '͖', '͙', '͚', '̣', ], - "mid" : [ + 'mid': [ '̕', '̛', '̀', '́', '͘', '̡', '̢', '̧', '̨', '̴', '̵', '̶', '͜', '͝', '͞', '͟', '͠', '͢', '̸', - '̷', '͡', ' ҉' - ] - }, - all = [].concat(soul.up, soul.down, soul.mid), - zalgo = {}; + '̷', '͡', ' ҉', + ], + }; + var all = [].concat(soul.up, soul.down, soul.mid); function randomNumber(range) { var r = Math.floor(Math.random() * range); return r; } - function is_char(character) { + function isChar(character) { var bool = false; - all.filter(function (i) { + all.filter(function(i) { bool = (i === character); }); return bool; } - + function heComes(text, options) { - var result = '', counts, l; + var result = ''; + var counts; + var l; options = options || {}; - options["up"] = typeof options["up"] !== 'undefined' ? options["up"] : true; - options["mid"] = typeof options["mid"] !== 'undefined' ? options["mid"] : true; - options["down"] = typeof options["down"] !== 'undefined' ? options["down"] : true; - options["size"] = typeof options["size"] !== 'undefined' ? options["size"] : "maxi"; + options['up'] = + typeof options['up'] !== 'undefined' ? options['up'] : true; + options['mid'] = + typeof options['mid'] !== 'undefined' ? options['mid'] : true; + options['down'] = + typeof options['down'] !== 'undefined' ? options['down'] : true; + options['size'] = + typeof options['size'] !== 'undefined' ? options['size'] : 'maxi'; text = text.split(''); for (l in text) { - if (is_char(l)) { + if (isChar(l)) { continue; } result = result + text[l]; - counts = {"up" : 0, "down" : 0, "mid" : 0}; + counts = {'up': 0, 'down': 0, 'mid': 0}; switch (options.size) { - case 'mini': - counts.up = randomNumber(8); - counts.mid = randomNumber(2); - counts.down = randomNumber(8); - break; - case 'maxi': - counts.up = randomNumber(16) + 3; - counts.mid = randomNumber(4) + 1; - counts.down = randomNumber(64) + 3; - break; - default: - counts.up = randomNumber(8) + 1; - counts.mid = randomNumber(6) / 2; - counts.down = randomNumber(8) + 1; - break; + case 'mini': + counts.up = randomNumber(8); + counts.mid = randomNumber(2); + counts.down = randomNumber(8); + break; + case 'maxi': + counts.up = randomNumber(16) + 3; + counts.mid = randomNumber(4) + 1; + counts.down = randomNumber(64) + 3; + break; + default: + counts.up = randomNumber(8) + 1; + counts.mid = randomNumber(6) / 2; + counts.down = randomNumber(8) + 1; + break; } - var arr = ["up", "mid", "down"]; + var arr = ['up', 'mid', 'down']; for (var d in arr) { var index = arr[d]; - for (var i = 0 ; i <= counts[index]; i++) { + for (var i = 0; i <= counts[index]; i++) { if (options[index]) { result = result + soul[index][randomNumber(soul[index].length)]; } @@ -101,4 +106,5 @@ module['exports'] = function zalgo(text, options) { } // don't summon him return heComes(text, options); -} +}; + diff --git a/node_modules/colors/lib/extendStringPrototype.js b/node_modules/colors/lib/extendStringPrototype.js index 67374a1c22d10..46fd386a915a6 100644 --- a/node_modules/colors/lib/extendStringPrototype.js +++ b/node_modules/colors/lib/extendStringPrototype.js @@ -1,51 +1,42 @@ var colors = require('./colors'); -module['exports'] = function () { - +module['exports'] = function() { // // Extends prototype of native string object to allow for "foo".red syntax // - var addProperty = function (color, func) { + var addProperty = function(color, func) { String.prototype.__defineGetter__(color, func); }; - var sequencer = function sequencer (map, str) { - return function () { - var exploded = this.split(""), i = 0; - exploded = exploded.map(map); - return exploded.join(""); - } - }; - - addProperty('strip', function () { + addProperty('strip', function() { return colors.strip(this); }); - addProperty('stripColors', function () { + addProperty('stripColors', function() { return colors.strip(this); }); - addProperty("trap", function(){ + addProperty('trap', function() { return colors.trap(this); }); - addProperty("zalgo", function(){ + addProperty('zalgo', function() { return colors.zalgo(this); }); - addProperty("zebra", function(){ + addProperty('zebra', function() { return colors.zebra(this); }); - addProperty("rainbow", function(){ + addProperty('rainbow', function() { return colors.rainbow(this); }); - addProperty("random", function(){ + addProperty('random', function() { return colors.random(this); }); - addProperty("america", function(){ + addProperty('america', function() { return colors.america(this); }); @@ -53,8 +44,8 @@ module['exports'] = function () { // Iterate through all default styles and colors // var x = Object.keys(colors.styles); - x.forEach(function (style) { - addProperty(style, function () { + x.forEach(function(style) { + addProperty(style, function() { return colors.stylize(this, style); }); }); @@ -65,49 +56,55 @@ module['exports'] = function () { // on String that you should not overwrite. // var stringPrototypeBlacklist = [ - '__defineGetter__', '__defineSetter__', '__lookupGetter__', '__lookupSetter__', 'charAt', 'constructor', - 'hasOwnProperty', 'isPrototypeOf', 'propertyIsEnumerable', 'toLocaleString', 'toString', 'valueOf', 'charCodeAt', - 'indexOf', 'lastIndexof', 'length', 'localeCompare', 'match', 'replace', 'search', 'slice', 'split', 'substring', - 'toLocaleLowerCase', 'toLocaleUpperCase', 'toLowerCase', 'toUpperCase', 'trim', 'trimLeft', 'trimRight' + '__defineGetter__', '__defineSetter__', '__lookupGetter__', + '__lookupSetter__', 'charAt', 'constructor', 'hasOwnProperty', + 'isPrototypeOf', 'propertyIsEnumerable', 'toLocaleString', 'toString', + 'valueOf', 'charCodeAt', 'indexOf', 'lastIndexOf', 'length', + 'localeCompare', 'match', 'repeat', 'replace', 'search', 'slice', + 'split', 'substring', 'toLocaleLowerCase', 'toLocaleUpperCase', + 'toLowerCase', 'toUpperCase', 'trim', 'trimLeft', 'trimRight', ]; - Object.keys(theme).forEach(function (prop) { + Object.keys(theme).forEach(function(prop) { if (stringPrototypeBlacklist.indexOf(prop) !== -1) { - console.log('warn: '.red + ('String.prototype' + prop).magenta + ' is probably something you don\'t want to override. Ignoring style name'); - } - else { + console.log('warn: '.red + ('String.prototype' + prop).magenta + + ' is probably something you don\'t want to override. ' + + 'Ignoring style name'); + } else { if (typeof(theme[prop]) === 'string') { colors[prop] = colors[theme[prop]]; - addProperty(prop, function () { - return colors[theme[prop]](this); + addProperty(prop, function() { + return colors[prop](this); }); - } - else { - addProperty(prop, function () { - var ret = this; + } else { + var themePropApplicator = function(str) { + var ret = str || this; for (var t = 0; t < theme[prop].length; t++) { ret = colors[theme[prop][t]](ret); } return ret; - }); + }; + addProperty(prop, themePropApplicator); + colors[prop] = function(str) { + return themePropApplicator(str); + }; } } }); } - colors.setTheme = function (theme) { + colors.setTheme = function(theme) { if (typeof theme === 'string') { - try { - colors.themes[theme] = require(theme); - applyTheme(colors.themes[theme]); - return colors.themes[theme]; - } catch (err) { - console.log(err); - return err; - } + console.log('colors.setTheme now only accepts an object, not a string. ' + + 'If you are trying to set a theme from a file, it is now your (the ' + + 'caller\'s) responsibility to require the file. The old syntax ' + + 'looked like colors.setTheme(__dirname + ' + + '\'/../themes/generic-logging.js\'); The new syntax looks like '+ + 'colors.setTheme(require(__dirname + ' + + '\'/../themes/generic-logging.js\'));'); + return; } else { applyTheme(theme); } }; - -}; \ No newline at end of file +}; diff --git a/node_modules/colors/lib/index.js b/node_modules/colors/lib/index.js index fd0956d03adb6..9df5ab7df3077 100644 --- a/node_modules/colors/lib/index.js +++ b/node_modules/colors/lib/index.js @@ -1,12 +1,13 @@ var colors = require('./colors'); module['exports'] = colors; -// Remark: By default, colors will add style properties to String.prototype +// Remark: By default, colors will add style properties to String.prototype. // -// If you don't wish to extend String.prototype you can do this instead and native String will not be touched +// If you don't wish to extend String.prototype, you can do this instead and +// native String will not be touched: // // var colors = require('colors/safe); // colors.red("foo") // // -require('./extendStringPrototype')(); \ No newline at end of file +require('./extendStringPrototype')(); diff --git a/node_modules/colors/lib/maps/america.js b/node_modules/colors/lib/maps/america.js index a07d8327ff2f4..dc96903328989 100644 --- a/node_modules/colors/lib/maps/america.js +++ b/node_modules/colors/lib/maps/america.js @@ -1,12 +1,10 @@ -var colors = require('../colors'); - -module['exports'] = (function() { - return function (letter, i, exploded) { - if(letter === " ") return letter; - switch(i%3) { +module['exports'] = function(colors) { + return function(letter, i, exploded) { + if (letter === ' ') return letter; + switch (i%3) { case 0: return colors.red(letter); - case 1: return colors.white(letter) - case 2: return colors.blue(letter) + case 1: return colors.white(letter); + case 2: return colors.blue(letter); } - } -})(); \ No newline at end of file + }; +}; diff --git a/node_modules/colors/lib/maps/rainbow.js b/node_modules/colors/lib/maps/rainbow.js index a7ce24e6c15ba..2b00ac0ac998e 100644 --- a/node_modules/colors/lib/maps/rainbow.js +++ b/node_modules/colors/lib/maps/rainbow.js @@ -1,13 +1,12 @@ -var colors = require('../colors'); - -module['exports'] = (function () { - var rainbowColors = ['red', 'yellow', 'green', 'blue', 'magenta']; //RoY G BiV - return function (letter, i, exploded) { - if (letter === " ") { +module['exports'] = function(colors) { + // RoY G BiV + var rainbowColors = ['red', 'yellow', 'green', 'blue', 'magenta']; + return function(letter, i, exploded) { + if (letter === ' ') { return letter; } else { return colors[rainbowColors[i++ % rainbowColors.length]](letter); } }; -})(); +}; diff --git a/node_modules/colors/lib/maps/random.js b/node_modules/colors/lib/maps/random.js index 5cd101fae2b3d..6f8f2f8e1e416 100644 --- a/node_modules/colors/lib/maps/random.js +++ b/node_modules/colors/lib/maps/random.js @@ -1,8 +1,10 @@ -var colors = require('../colors'); - -module['exports'] = (function () { - var available = ['underline', 'inverse', 'grey', 'yellow', 'red', 'green', 'blue', 'white', 'cyan', 'magenta']; +module['exports'] = function(colors) { + var available = ['underline', 'inverse', 'grey', 'yellow', 'red', 'green', + 'blue', 'white', 'cyan', 'magenta']; return function(letter, i, exploded) { - return letter === " " ? letter : colors[available[Math.round(Math.random() * (available.length - 1))]](letter); + return letter === ' ' ? letter : + colors[ + available[Math.round(Math.random() * (available.length - 2))] + ](letter); }; -})(); \ No newline at end of file +}; diff --git a/node_modules/colors/lib/maps/zebra.js b/node_modules/colors/lib/maps/zebra.js index bf7dcdead0722..fa73623544a82 100644 --- a/node_modules/colors/lib/maps/zebra.js +++ b/node_modules/colors/lib/maps/zebra.js @@ -1,5 +1,5 @@ -var colors = require('../colors'); - -module['exports'] = function (letter, i, exploded) { - return i % 2 === 0 ? letter : colors.inverse(letter); -}; \ No newline at end of file +module['exports'] = function(colors) { + return function(letter, i, exploded) { + return i % 2 === 0 ? letter : colors.inverse(letter); + }; +}; diff --git a/node_modules/colors/lib/styles.js b/node_modules/colors/lib/styles.js index 067d59070c2a2..02db9acf7c7db 100644 --- a/node_modules/colors/lib/styles.js +++ b/node_modules/colors/lib/styles.js @@ -65,13 +65,13 @@ var codes = { blueBG: [44, 49], magentaBG: [45, 49], cyanBG: [46, 49], - whiteBG: [47, 49] + whiteBG: [47, 49], }; -Object.keys(codes).forEach(function (key) { +Object.keys(codes).forEach(function(key) { var val = codes[key]; var style = styles[key] = []; style.open = '\u001b[' + val[0] + 'm'; style.close = '\u001b[' + val[1] + 'm'; -}); \ No newline at end of file +}); diff --git a/node_modules/colors/lib/system/has-flag.js b/node_modules/colors/lib/system/has-flag.js new file mode 100644 index 0000000000000..a347dd4d7a697 --- /dev/null +++ b/node_modules/colors/lib/system/has-flag.js @@ -0,0 +1,35 @@ +/* +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ + +'use strict'; + +module.exports = function(flag, argv) { + argv = argv || process.argv; + + var terminatorPos = argv.indexOf('--'); + var prefix = /^-{1,2}/.test(flag) ? '' : '--'; + var pos = argv.indexOf(prefix + flag); + + return pos !== -1 && (terminatorPos === -1 ? true : pos < terminatorPos); +}; diff --git a/node_modules/colors/lib/system/supports-colors.js b/node_modules/colors/lib/system/supports-colors.js index 3e008aa93a6a6..f1f9c8ff3da28 100644 --- a/node_modules/colors/lib/system/supports-colors.js +++ b/node_modules/colors/lib/system/supports-colors.js @@ -23,39 +23,129 @@ THE SOFTWARE. */ -var argv = process.argv; +'use strict'; -module.exports = (function () { - if (argv.indexOf('--no-color') !== -1 || - argv.indexOf('--color=false') !== -1) { +var os = require('os'); +var hasFlag = require('./has-flag.js'); + +var env = process.env; + +var forceColor = void 0; +if (hasFlag('no-color') || hasFlag('no-colors') || hasFlag('color=false')) { + forceColor = false; +} else if (hasFlag('color') || hasFlag('colors') || hasFlag('color=true') + || hasFlag('color=always')) { + forceColor = true; +} +if ('FORCE_COLOR' in env) { + forceColor = env.FORCE_COLOR.length === 0 + || parseInt(env.FORCE_COLOR, 10) !== 0; +} + +function translateLevel(level) { + if (level === 0) { return false; } - if (argv.indexOf('--color') !== -1 || - argv.indexOf('--color=true') !== -1 || - argv.indexOf('--color=always') !== -1) { - return true; + return { + level: level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3, + }; +} + +function supportsColor(stream) { + if (forceColor === false) { + return 0; } - if (process.stdout && !process.stdout.isTTY) { - return false; + if (hasFlag('color=16m') || hasFlag('color=full') + || hasFlag('color=truecolor')) { + return 3; + } + + if (hasFlag('color=256')) { + return 2; + } + + if (stream && !stream.isTTY && forceColor !== true) { + return 0; } + var min = forceColor ? 1 : 0; + if (process.platform === 'win32') { - return true; + // Node.js 7.5.0 is the first version of Node.js to include a patch to + // libuv that enables 256 color output on Windows. Anything earlier and it + // won't work. However, here we target Node.js 8 at minimum as it is an LTS + // release, and Node.js 7 is not. Windows 10 build 10586 is the first + // Windows release that supports 256 colors. Windows 10 build 14931 is the + // first release that supports 16m/TrueColor. + var osRelease = os.release().split('.'); + if (Number(process.versions.node.split('.')[0]) >= 8 + && Number(osRelease[0]) >= 10 && Number(osRelease[2]) >= 10586) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; + } + + return 1; + } + + if ('CI' in env) { + if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI'].some(function(sign) { + return sign in env; + }) || env.CI_NAME === 'codeship') { + return 1; + } + + return min; } - if ('COLORTERM' in process.env) { - return true; + if ('TEAMCITY_VERSION' in env) { + return (/^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0 + ); } - if (process.env.TERM === 'dumb') { - return false; + if ('TERM_PROGRAM' in env) { + var version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); + + switch (env.TERM_PROGRAM) { + case 'iTerm.app': + return version >= 3 ? 3 : 2; + case 'Hyper': + return 3; + case 'Apple_Terminal': + return 2; + // No default + } + } + + if (/-256(color)?$/i.test(env.TERM)) { + return 2; } - if (/^screen|^xterm|^vt100|color|ansi|cygwin|linux/i.test(process.env.TERM)) { - return true; + if (/^screen|^xterm|^vt100|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; } - return false; -})(); \ No newline at end of file + if ('COLORTERM' in env) { + return 1; + } + + if (env.TERM === 'dumb') { + return min; + } + + return min; +} + +function getSupportLevel(stream) { + var level = supportsColor(stream); + return translateLevel(level); +} + +module.exports = { + supportsColor: getSupportLevel, + stdout: getSupportLevel(process.stdout), + stderr: getSupportLevel(process.stderr), +}; diff --git a/node_modules/colors/package.json b/node_modules/colors/package.json index 554cbd22e19dc..712ab466de4eb 100644 --- a/node_modules/colors/package.json +++ b/node_modules/colors/package.json @@ -1,40 +1,46 @@ { - "_args": [ - [ - "colors@1.1.2", - "/Users/rebecca/code/npm" - ] - ], - "_from": "colors@1.1.2", - "_id": "colors@1.1.2", + "_from": "colors@^1.1.2", + "_id": "colors@1.3.3", "_inBundle": false, - "_integrity": "sha1-FopHAXVran9RoSzgyXv6KMCE7WM=", + "_integrity": "sha512-mmGt/1pZqYRjMxB1axhTo16/snVZ5krrKkcmMeVKxzECMMXoCgnvTPp10QgHfcbQZw8Dq2jMNG6je4JlWU0gWg==", "_location": "/colors", - "_optional": true, "_phantomChildren": {}, "_requested": { - "type": "version", + "type": "range", "registry": true, - "raw": "colors@1.1.2", + "raw": "colors@^1.1.2", "name": "colors", "escapedName": "colors", - "rawSpec": "1.1.2", + "rawSpec": "^1.1.2", "saveSpec": null, - "fetchSpec": "1.1.2" + "fetchSpec": "^1.1.2" }, "_requiredBy": [ "/cli-table3" ], - "_resolved": "https://registry.npmjs.org/colors/-/colors-1.1.2.tgz", - "_spec": "1.1.2", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/colors/-/colors-1.3.3.tgz", + "_shasum": "39e005d546afe01e01f9c4ca8fa50f686a01205d", + "_spec": "colors@^1.1.2", + "_where": "/Users/aeschright/code/cli/node_modules/cli-table3", "author": { "name": "Marak Squires" }, "bugs": { "url": "https://github.com/Marak/colors.js/issues" }, + "bundleDependencies": false, + "contributors": [ + { + "name": "DABH", + "url": "https://github.com/DABH" + } + ], + "deprecated": false, "description": "get colors in your node.js console", + "devDependencies": { + "eslint": "^5.2.0", + "eslint-config-google": "^0.11.0" + }, "engines": { "node": ">=0.1.90" }, @@ -43,7 +49,9 @@ "lib", "LICENSE", "safe.js", - "themes" + "themes", + "index.d.ts", + "safe.d.ts" ], "homepage": "https://github.com/Marak/colors.js", "keywords": [ @@ -52,14 +60,15 @@ "colors" ], "license": "MIT", - "main": "lib", + "main": "lib/index.js", "name": "colors", "repository": { "type": "git", "url": "git+ssh://git@github.com/Marak/colors.js.git" }, "scripts": { + "lint": "eslint . --fix", "test": "node tests/basic-test.js && node tests/safe-test.js" }, - "version": "1.1.2" + "version": "1.3.3" } diff --git a/node_modules/colors/safe.d.ts b/node_modules/colors/safe.d.ts new file mode 100644 index 0000000000000..2bafc27984e0e --- /dev/null +++ b/node_modules/colors/safe.d.ts @@ -0,0 +1,48 @@ +// Type definitions for Colors.js 1.2 +// Project: https://github.com/Marak/colors.js +// Definitions by: Bart van der Schoor , Staffan Eketorp +// Definitions: https://github.com/Marak/colors.js + +export const enabled: boolean; +export function enable(): void; +export function disable(): void; +export function setTheme(theme: any): void; + +export function strip(str: string): string; +export function stripColors(str: string): string; + +export function black(str: string): string; +export function red(str: string): string; +export function green(str: string): string; +export function yellow(str: string): string; +export function blue(str: string): string; +export function magenta(str: string): string; +export function cyan(str: string): string; +export function white(str: string): string; +export function gray(str: string): string; +export function grey(str: string): string; + +export function bgBlack(str: string): string; +export function bgRed(str: string): string; +export function bgGreen(str: string): string; +export function bgYellow(str: string): string; +export function bgBlue(str: string): string; +export function bgMagenta(str: string): string; +export function bgCyan(str: string): string; +export function bgWhite(str: string): string; + +export function reset(str: string): string; +export function bold(str: string): string; +export function dim(str: string): string; +export function italic(str: string): string; +export function underline(str: string): string; +export function inverse(str: string): string; +export function hidden(str: string): string; +export function strikethrough(str: string): string; + +export function rainbow(str: string): string; +export function zebra(str: string): string; +export function america(str: string): string; +export function trap(str: string): string; +export function random(str: string): string; +export function zalgo(str: string): string; diff --git a/node_modules/colors/safe.js b/node_modules/colors/safe.js index a6a1f3ab47f06..a013d54246485 100644 --- a/node_modules/colors/safe.js +++ b/node_modules/colors/safe.js @@ -1,9 +1,10 @@ // -// Remark: Requiring this file will use the "safe" colors API which will not touch String.prototype +// Remark: Requiring this file will use the "safe" colors API, +// which will not touch String.prototype. // -// var colors = require('colors/safe); +// var colors = require('colors/safe'); // colors.red("foo") // // var colors = require('./lib/colors'); -module['exports'] = colors; \ No newline at end of file +module['exports'] = colors; diff --git a/node_modules/colors/themes/generic-logging.js b/node_modules/colors/themes/generic-logging.js index 571972c1baa82..63adfe4ac31f9 100644 --- a/node_modules/colors/themes/generic-logging.js +++ b/node_modules/colors/themes/generic-logging.js @@ -8,5 +8,5 @@ module['exports'] = { help: 'cyan', warn: 'yellow', debug: 'blue', - error: 'red' -}; \ No newline at end of file + error: 'red', +}; diff --git a/node_modules/concat-stream/node_modules/readable-stream/.travis.yml b/node_modules/concat-stream/node_modules/readable-stream/.travis.yml new file mode 100644 index 0000000000000..40992555bf5cc --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/.travis.yml @@ -0,0 +1,55 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test +script: "npm run $TASK" +env: + global: + - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= + - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/concat-stream/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/concat-stream/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 0000000000000..f478d58dca85b --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/concat-stream/node_modules/readable-stream/GOVERNANCE.md b/node_modules/concat-stream/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 0000000000000..16ffb93f24bec --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/node_modules/concat-stream/node_modules/readable-stream/LICENSE b/node_modules/concat-stream/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000000000..2873b3b2e5950 --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/node_modules/concat-stream/node_modules/readable-stream/README.md b/node_modules/concat-stream/node_modules/readable-stream/README.md new file mode 100644 index 0000000000000..23fe3f3e3009a --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/README.md @@ -0,0 +1,58 @@ +# readable-stream + +***Node-core v8.11.1 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) + +```bash +npm install --save readable-stream +``` + +***Node-core streams for userland*** + +This package is a mirror of the Streams2 and Streams3 implementations in +Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.11.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> + - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> + - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D +* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> diff --git a/node_modules/concat-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/concat-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 0000000000000..83275f192e407 --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,60 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section + + diff --git a/node_modules/concat-stream/node_modules/readable-stream/duplex-browser.js b/node_modules/concat-stream/node_modules/readable-stream/duplex-browser.js new file mode 100644 index 0000000000000..f8b2db83dbe73 --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/duplex-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_duplex.js'); diff --git a/node_modules/concat-stream/node_modules/readable-stream/duplex.js b/node_modules/concat-stream/node_modules/readable-stream/duplex.js new file mode 100644 index 0000000000000..46924cbfdf538 --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require('./readable').Duplex diff --git a/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000000000..a1ca813e5acbd --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,131 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); + + pna.nextTick(cb, err); +}; \ No newline at end of file diff --git a/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000000000..a9c835884828d --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,47 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000000000..bf34ac65e1108 --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1019 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Readable; + +/**/ +var isArray = require('isarray'); +/**/ + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = require('./internal/streams/BufferList'); +var destroyImpl = require('./internal/streams/destroy'); +var StringDecoder; + +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } + } + + return needMoreData(state); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, unpipeInfo); + }return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this, unpipeInfo); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000000000..5d1f8b876d98c --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,214 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } + + ts.writechunk = null; + ts.writecb = null; + + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + + cb(er); + + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; + + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); + + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000000000..b3f4e85a2f6e3 --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,687 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +var destroyImpl = require('./internal/streams/destroy'); + +util.inherits(Writable, Stream); + +function nop() {} + +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = corkReq; + } else { + state.corkedRequestsFree = corkReq; + } +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); + +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; \ No newline at end of file diff --git a/node_modules/concat-stream/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/concat-stream/node_modules/readable-stream/lib/internal/streams/BufferList.js new file mode 100644 index 0000000000000..aefc68bd90b9c --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/lib/internal/streams/BufferList.js @@ -0,0 +1,79 @@ +'use strict'; + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var Buffer = require('safe-buffer').Buffer; +var util = require('util'); + +function copyBuffer(src, target, offset) { + src.copy(target, offset); +} + +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; + + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; + + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; + + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; + + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; + + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + if (this.length === 1) return this.head.data; + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }; + + return BufferList; +}(); + +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; +} \ No newline at end of file diff --git a/node_modules/concat-stream/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/concat-stream/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 0000000000000..5a0a0d88cec6f --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,74 @@ +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { + pna.nextTick(emitErrorNT, this, err); + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + pna.nextTick(emitErrorNT, _this, err); + if (_this._writableState) { + _this._writableState.errorEmitted = true; + } + } else if (cb) { + cb(err); + } + }); + + return this; +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy +}; \ No newline at end of file diff --git a/node_modules/concat-stream/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/concat-stream/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 0000000000000..9332a3fdae706 --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/node_modules/concat-stream/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/concat-stream/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 0000000000000..ce2ad5b6ee57f --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/node_modules/concat-stream/node_modules/readable-stream/package.json b/node_modules/concat-stream/node_modules/readable-stream/package.json new file mode 100644 index 0000000000000..9f2a5ca7ca993 --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/package.json @@ -0,0 +1,81 @@ +{ + "_from": "readable-stream@^2.2.2", + "_id": "readable-stream@2.3.6", + "_inBundle": false, + "_integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "_location": "/concat-stream/readable-stream", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "readable-stream@^2.2.2", + "name": "readable-stream", + "escapedName": "readable-stream", + "rawSpec": "^2.2.2", + "saveSpec": null, + "fetchSpec": "^2.2.2" + }, + "_requiredBy": [ + "/concat-stream" + ], + "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "_shasum": "b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf", + "_spec": "readable-stream@^2.2.2", + "_where": "/Users/aeschright/code/cli/node_modules/concat-stream", + "browser": { + "util": false, + "./readable.js": "./readable-browser.js", + "./writable.js": "./writable-browser.js", + "./duplex.js": "./duplex-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "bugs": { + "url": "https://github.com/nodejs/readable-stream/issues" + }, + "bundleDependencies": false, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "deprecated": false, + "description": "Streams3, a user-land copy of the stream library from Node.js", + "devDependencies": { + "assert": "^1.4.0", + "babel-polyfill": "^6.9.1", + "buffer": "^4.9.0", + "lolex": "^2.3.2", + "nyc": "^6.4.0", + "tap": "^0.7.0", + "tape": "^4.8.0" + }, + "homepage": "https://github.com/nodejs/readable-stream#readme", + "keywords": [ + "readable", + "stream", + "pipe" + ], + "license": "MIT", + "main": "readable.js", + "name": "readable-stream", + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js" + }, + "version": "2.3.6" +} diff --git a/node_modules/concat-stream/node_modules/readable-stream/passthrough.js b/node_modules/concat-stream/node_modules/readable-stream/passthrough.js new file mode 100644 index 0000000000000..ffd791d7ff275 --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require('./readable').PassThrough diff --git a/node_modules/concat-stream/node_modules/readable-stream/readable-browser.js b/node_modules/concat-stream/node_modules/readable-stream/readable-browser.js new file mode 100644 index 0000000000000..e50372592ee6c --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,7 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/node_modules/concat-stream/node_modules/readable-stream/readable.js b/node_modules/concat-stream/node_modules/readable-stream/readable.js new file mode 100644 index 0000000000000..ec89ec5330649 --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/readable.js @@ -0,0 +1,19 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); +} diff --git a/node_modules/concat-stream/node_modules/readable-stream/transform.js b/node_modules/concat-stream/node_modules/readable-stream/transform.js new file mode 100644 index 0000000000000..b1baba26da03d --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require('./readable').Transform diff --git a/node_modules/concat-stream/node_modules/readable-stream/writable-browser.js b/node_modules/concat-stream/node_modules/readable-stream/writable-browser.js new file mode 100644 index 0000000000000..ebdde6a85dcb1 --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/writable-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_writable.js'); diff --git a/node_modules/concat-stream/node_modules/readable-stream/writable.js b/node_modules/concat-stream/node_modules/readable-stream/writable.js new file mode 100644 index 0000000000000..3211a6f80d1ab --- /dev/null +++ b/node_modules/concat-stream/node_modules/readable-stream/writable.js @@ -0,0 +1,8 @@ +var Stream = require("stream") +var Writable = require("./lib/_stream_writable.js") + +if (process.env.READABLE_STREAM === 'disable') { + module.exports = Stream && Stream.Writable || Writable +} else { + module.exports = Writable +} diff --git a/node_modules/concat-stream/node_modules/string_decoder/.travis.yml b/node_modules/concat-stream/node_modules/string_decoder/.travis.yml new file mode 100644 index 0000000000000..3347a72546505 --- /dev/null +++ b/node_modules/concat-stream/node_modules/string_decoder/.travis.yml @@ -0,0 +1,50 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test diff --git a/node_modules/concat-stream/node_modules/string_decoder/LICENSE b/node_modules/concat-stream/node_modules/string_decoder/LICENSE new file mode 100644 index 0000000000000..778edb20730ef --- /dev/null +++ b/node_modules/concat-stream/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/node_modules/concat-stream/node_modules/string_decoder/README.md b/node_modules/concat-stream/node_modules/string_decoder/README.md new file mode 100644 index 0000000000000..5fd58315ed588 --- /dev/null +++ b/node_modules/concat-stream/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/node_modules/concat-stream/node_modules/string_decoder/lib/string_decoder.js b/node_modules/concat-stream/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 0000000000000..2e89e63f7933e --- /dev/null +++ b/node_modules/concat-stream/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/node_modules/concat-stream/node_modules/string_decoder/package.json b/node_modules/concat-stream/node_modules/string_decoder/package.json new file mode 100644 index 0000000000000..fcc3a4bdf3b02 --- /dev/null +++ b/node_modules/concat-stream/node_modules/string_decoder/package.json @@ -0,0 +1,59 @@ +{ + "_from": "string_decoder@~1.1.1", + "_id": "string_decoder@1.1.1", + "_inBundle": false, + "_integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "_location": "/concat-stream/string_decoder", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "string_decoder@~1.1.1", + "name": "string_decoder", + "escapedName": "string_decoder", + "rawSpec": "~1.1.1", + "saveSpec": null, + "fetchSpec": "~1.1.1" + }, + "_requiredBy": [ + "/concat-stream/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "_shasum": "9cf1611ba62685d7030ae9e4ba34149c3af03fc8", + "_spec": "string_decoder@~1.1.1", + "_where": "/Users/aeschright/code/cli/node_modules/concat-stream/node_modules/readable-stream", + "bugs": { + "url": "https://github.com/nodejs/string_decoder/issues" + }, + "bundleDependencies": false, + "dependencies": { + "safe-buffer": "~5.1.0" + }, + "deprecated": false, + "description": "The string_decoder module from Node core", + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT", + "main": "lib/string_decoder.js", + "name": "string_decoder", + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "test": "tap test/parallel/*.js && node test/verify-dependencies" + }, + "version": "1.1.1" +} diff --git a/node_modules/copy-concurrently/node_modules/aproba/LICENSE b/node_modules/copy-concurrently/node_modules/aproba/LICENSE new file mode 100644 index 0000000000000..f4be44d881b2d --- /dev/null +++ b/node_modules/copy-concurrently/node_modules/aproba/LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/copy-concurrently/node_modules/aproba/README.md b/node_modules/copy-concurrently/node_modules/aproba/README.md new file mode 100644 index 0000000000000..0bfc594c56a37 --- /dev/null +++ b/node_modules/copy-concurrently/node_modules/aproba/README.md @@ -0,0 +1,94 @@ +aproba +====== + +A ridiculously light-weight function argument validator + +``` +var validate = require("aproba") + +function myfunc(a, b, c) { + // `a` must be a string, `b` a number, `c` a function + validate('SNF', arguments) // [a,b,c] is also valid +} + +myfunc('test', 23, function () {}) // ok +myfunc(123, 23, function () {}) // type error +myfunc('test', 23) // missing arg error +myfunc('test', 23, function () {}, true) // too many args error + +``` + +Valid types are: + +| type | description +| :--: | :---------- +| * | matches any type +| A | `Array.isArray` OR an `arguments` object +| S | typeof == string +| N | typeof == number +| F | typeof == function +| O | typeof == object and not type A and not type E +| B | typeof == boolean +| E | `instanceof Error` OR `null` **(special: see below)** +| Z | == `null` + +Validation failures throw one of three exception types, distinguished by a +`code` property of `EMISSINGARG`, `EINVALIDTYPE` or `ETOOMANYARGS`. + +If you pass in an invalid type then it will throw with a code of +`EUNKNOWNTYPE`. + +If an **error** argument is found and is not null then the remaining +arguments are optional. That is, if you say `ESO` then that's like using a +non-magical `E` in: `E|ESO|ZSO`. + +### But I have optional arguments?! + +You can provide more than one signature by separating them with pipes `|`. +If any signature matches the arguments then they'll be considered valid. + +So for example, say you wanted to write a signature for +`fs.createWriteStream`. The docs for it describe it thusly: + +``` +fs.createWriteStream(path[, options]) +``` + +This would be a signature of `SO|S`. That is, a string and and object, or +just a string. + +Now, if you read the full `fs` docs, you'll see that actually path can ALSO +be a buffer. And options can be a string, that is: +``` +path | +options | +``` + +To reproduce this you have to fully enumerate all of the possible +combinations and that implies a signature of `SO|SS|OO|OS|S|O`. The +awkwardness is a feature: It reminds you of the complexity you're adding to +your API when you do this sort of thing. + + +### Browser support + +This has no dependencies and should work in browsers, though you'll have +noisier stack traces. + +### Why this exists + +I wanted a very simple argument validator. It needed to do two things: + +1. Be more concise and easier to use than assertions + +2. Not encourage an infinite bikeshed of DSLs + +This is why types are specified by a single character and there's no such +thing as an optional argument. + +This is not intended to validate user data. This is specifically about +asserting the interface of your functions. + +If you need greater validation, I encourage you to write them by hand or +look elsewhere. + diff --git a/node_modules/copy-concurrently/node_modules/aproba/index.js b/node_modules/copy-concurrently/node_modules/aproba/index.js new file mode 100644 index 0000000000000..6f3f797c09a75 --- /dev/null +++ b/node_modules/copy-concurrently/node_modules/aproba/index.js @@ -0,0 +1,105 @@ +'use strict' + +function isArguments (thingy) { + return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee') +} + +var types = { + '*': {label: 'any', check: function () { return true }}, + A: {label: 'array', check: function (thingy) { return Array.isArray(thingy) || isArguments(thingy) }}, + S: {label: 'string', check: function (thingy) { return typeof thingy === 'string' }}, + N: {label: 'number', check: function (thingy) { return typeof thingy === 'number' }}, + F: {label: 'function', check: function (thingy) { return typeof thingy === 'function' }}, + O: {label: 'object', check: function (thingy) { return typeof thingy === 'object' && thingy != null && !types.A.check(thingy) && !types.E.check(thingy) }}, + B: {label: 'boolean', check: function (thingy) { return typeof thingy === 'boolean' }}, + E: {label: 'error', check: function (thingy) { return thingy instanceof Error }}, + Z: {label: 'null', check: function (thingy) { return thingy == null }} +} + +function addSchema (schema, arity) { + var group = arity[schema.length] = arity[schema.length] || [] + if (group.indexOf(schema) === -1) group.push(schema) +} + +var validate = module.exports = function (rawSchemas, args) { + if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length) + if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas') + if (!args) throw missingRequiredArg(1, 'args') + if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas) + if (!types.A.check(args)) throw invalidType(1, ['array'], args) + var schemas = rawSchemas.split('|') + var arity = {} + + schemas.forEach(function (schema) { + for (var ii = 0; ii < schema.length; ++ii) { + var type = schema[ii] + if (!types[type]) throw unknownType(ii, type) + } + if (/E.*E/.test(schema)) throw moreThanOneError(schema) + addSchema(schema, arity) + if (/E/.test(schema)) { + addSchema(schema.replace(/E.*$/, 'E'), arity) + addSchema(schema.replace(/E/, 'Z'), arity) + if (schema.length === 1) addSchema('', arity) + } + }) + var matching = arity[args.length] + if (!matching) { + throw wrongNumberOfArgs(Object.keys(arity), args.length) + } + for (var ii = 0; ii < args.length; ++ii) { + var newMatching = matching.filter(function (schema) { + var type = schema[ii] + var typeCheck = types[type].check + return typeCheck(args[ii]) + }) + if (!newMatching.length) { + var labels = matching.map(function (schema) { + return types[schema[ii]].label + }).filter(function (schema) { return schema != null }) + throw invalidType(ii, labels, args[ii]) + } + matching = newMatching + } +} + +function missingRequiredArg (num) { + return newException('EMISSINGARG', 'Missing required argument #' + (num + 1)) +} + +function unknownType (num, type) { + return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1)) +} + +function invalidType (num, expectedTypes, value) { + var valueType + Object.keys(types).forEach(function (typeCode) { + if (types[typeCode].check(value)) valueType = types[typeCode].label + }) + return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' + + englishList(expectedTypes) + ' but got ' + valueType) +} + +function englishList (list) { + return list.join(', ').replace(/, ([^,]+)$/, ' or $1') +} + +function wrongNumberOfArgs (expected, got) { + var english = englishList(expected) + var args = expected.every(function (ex) { return ex.length === 1 }) + ? 'argument' + : 'arguments' + return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got) +} + +function moreThanOneError (schema) { + return newException('ETOOMANYERRORTYPES', + 'Only one error type per argument signature is allowed, more than one found in "' + schema + '"') +} + +function newException (code, msg) { + var e = new Error(msg) + e.code = code + if (Error.captureStackTrace) Error.captureStackTrace(e, validate) + return e +} diff --git a/node_modules/copy-concurrently/node_modules/aproba/package.json b/node_modules/copy-concurrently/node_modules/aproba/package.json new file mode 100644 index 0000000000000..e16eea157f345 --- /dev/null +++ b/node_modules/copy-concurrently/node_modules/aproba/package.json @@ -0,0 +1,62 @@ +{ + "_from": "aproba@^1.1.1", + "_id": "aproba@1.2.0", + "_inBundle": false, + "_integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", + "_location": "/copy-concurrently/aproba", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "aproba@^1.1.1", + "name": "aproba", + "escapedName": "aproba", + "rawSpec": "^1.1.1", + "saveSpec": null, + "fetchSpec": "^1.1.1" + }, + "_requiredBy": [ + "/copy-concurrently" + ], + "_resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "_shasum": "6802e6264efd18c790a1b0d517f0f2627bf2c94a", + "_spec": "aproba@^1.1.1", + "_where": "/Users/aeschright/code/cli/node_modules/copy-concurrently", + "author": { + "name": "Rebecca Turner", + "email": "me@re-becca.org" + }, + "bugs": { + "url": "https://github.com/iarna/aproba/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "A ridiculously light-weight argument validator (now browser friendly)", + "devDependencies": { + "standard": "^10.0.3", + "tap": "^10.0.2" + }, + "directories": { + "test": "test" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/iarna/aproba", + "keywords": [ + "argument", + "validate" + ], + "license": "ISC", + "main": "index.js", + "name": "aproba", + "repository": { + "type": "git", + "url": "git+https://github.com/iarna/aproba.git" + }, + "scripts": { + "test": "standard && tap -j3 test/*.js" + }, + "version": "1.2.0" +} diff --git a/node_modules/block-stream/LICENSE b/node_modules/cross-spawn/node_modules/lru-cache/LICENSE similarity index 100% rename from node_modules/block-stream/LICENSE rename to node_modules/cross-spawn/node_modules/lru-cache/LICENSE diff --git a/node_modules/cross-spawn/node_modules/lru-cache/README.md b/node_modules/cross-spawn/node_modules/lru-cache/README.md new file mode 100644 index 0000000000000..d660dd5747abe --- /dev/null +++ b/node_modules/cross-spawn/node_modules/lru-cache/README.md @@ -0,0 +1,158 @@ +# lru cache + +A cache object that deletes the least-recently-used items. + +[![Build Status](https://travis-ci.org/isaacs/node-lru-cache.svg?branch=master)](https://travis-ci.org/isaacs/node-lru-cache) [![Coverage Status](https://coveralls.io/repos/isaacs/node-lru-cache/badge.svg?service=github)](https://coveralls.io/github/isaacs/node-lru-cache) + +## Installation: + +```javascript +npm install lru-cache --save +``` + +## Usage: + +```javascript +var LRU = require("lru-cache") + , options = { max: 500 + , length: function (n, key) { return n * 2 + key.length } + , dispose: function (key, n) { n.close() } + , maxAge: 1000 * 60 * 60 } + , cache = LRU(options) + , otherCache = LRU(50) // sets just the max size + +cache.set("key", "value") +cache.get("key") // "value" + +// non-string keys ARE fully supported +// but note that it must be THE SAME object, not +// just a JSON-equivalent object. +var someObject = { a: 1 } +cache.set(someObject, 'a value') +// Object keys are not toString()-ed +cache.set('[object Object]', 'a different value') +assert.equal(cache.get(someObject), 'a value') +// A similar object with same keys/values won't work, +// because it's a different object identity +assert.equal(cache.get({ a: 1 }), undefined) + +cache.reset() // empty the cache +``` + +If you put more stuff in it, then items will fall out. + +If you try to put an oversized thing in it, then it'll fall out right +away. + +## Options + +* `max` The maximum size of the cache, checked by applying the length + function to all values in the cache. Not setting this is kind of + silly, since that's the whole purpose of this lib, but it defaults + to `Infinity`. +* `maxAge` Maximum age in ms. Items are not pro-actively pruned out + as they age, but if you try to get an item that is too old, it'll + drop it and return undefined instead of giving it to you. +* `length` Function that is used to calculate the length of stored + items. If you're storing strings or buffers, then you probably want + to do something like `function(n, key){return n.length}`. The default is + `function(){return 1}`, which is fine if you want to store `max` + like-sized things. The item is passed as the first argument, and + the key is passed as the second argumnet. +* `dispose` Function that is called on items when they are dropped + from the cache. This can be handy if you want to close file + descriptors or do other cleanup tasks when items are no longer + accessible. Called with `key, value`. It's called *before* + actually removing the item from the internal cache, so if you want + to immediately put it back in, you'll have to do that in a + `nextTick` or `setTimeout` callback or it won't do anything. +* `stale` By default, if you set a `maxAge`, it'll only actually pull + stale items out of the cache when you `get(key)`. (That is, it's + not pre-emptively doing a `setTimeout` or anything.) If you set + `stale:true`, it'll return the stale value before deleting it. If + you don't set this, then it'll return `undefined` when you try to + get a stale entry, as if it had already been deleted. +* `noDisposeOnSet` By default, if you set a `dispose()` method, then + it'll be called whenever a `set()` operation overwrites an existing + key. If you set this option, `dispose()` will only be called when a + key falls out of the cache, not when it is overwritten. + +## API + +* `set(key, value, maxAge)` +* `get(key) => value` + + Both of these will update the "recently used"-ness of the key. + They do what you think. `maxAge` is optional and overrides the + cache `maxAge` option if provided. + + If the key is not found, `get()` will return `undefined`. + + The key and val can be any value. + +* `peek(key)` + + Returns the key value (or `undefined` if not found) without + updating the "recently used"-ness of the key. + + (If you find yourself using this a lot, you *might* be using the + wrong sort of data structure, but there are some use cases where + it's handy.) + +* `del(key)` + + Deletes a key out of the cache. + +* `reset()` + + Clear the cache entirely, throwing away all values. + +* `has(key)` + + Check if a key is in the cache, without updating the recent-ness + or deleting it for being stale. + +* `forEach(function(value,key,cache), [thisp])` + + Just like `Array.prototype.forEach`. Iterates over all the keys + in the cache, in order of recent-ness. (Ie, more recently used + items are iterated over first.) + +* `rforEach(function(value,key,cache), [thisp])` + + The same as `cache.forEach(...)` but items are iterated over in + reverse order. (ie, less recently used items are iterated over + first.) + +* `keys()` + + Return an array of the keys in the cache. + +* `values()` + + Return an array of the values in the cache. + +* `length` + + Return total length of objects in cache taking into account + `length` options function. + +* `itemCount` + + Return total quantity of objects currently in cache. Note, that + `stale` (see options) items are returned as part of this item + count. + +* `dump()` + + Return an array of the cache entries ready for serialization and usage + with 'destinationCache.load(arr)`. + +* `load(cacheEntriesArray)` + + Loads another cache entries array, obtained with `sourceCache.dump()`, + into the cache. The destination cache is reset before loading new entries + +* `prune()` + + Manually iterates over the entire cache proactively pruning old entries diff --git a/node_modules/cross-spawn/node_modules/lru-cache/index.js b/node_modules/cross-spawn/node_modules/lru-cache/index.js new file mode 100644 index 0000000000000..bd35b53589381 --- /dev/null +++ b/node_modules/cross-spawn/node_modules/lru-cache/index.js @@ -0,0 +1,468 @@ +'use strict' + +module.exports = LRUCache + +// This will be a proper iterable 'Map' in engines that support it, +// or a fakey-fake PseudoMap in older versions. +var Map = require('pseudomap') +var util = require('util') + +// A linked list to keep track of recently-used-ness +var Yallist = require('yallist') + +// use symbols if possible, otherwise just _props +var hasSymbol = typeof Symbol === 'function' && process.env._nodeLRUCacheForceNoSymbol !== '1' +var makeSymbol +if (hasSymbol) { + makeSymbol = function (key) { + return Symbol(key) + } +} else { + makeSymbol = function (key) { + return '_' + key + } +} + +var MAX = makeSymbol('max') +var LENGTH = makeSymbol('length') +var LENGTH_CALCULATOR = makeSymbol('lengthCalculator') +var ALLOW_STALE = makeSymbol('allowStale') +var MAX_AGE = makeSymbol('maxAge') +var DISPOSE = makeSymbol('dispose') +var NO_DISPOSE_ON_SET = makeSymbol('noDisposeOnSet') +var LRU_LIST = makeSymbol('lruList') +var CACHE = makeSymbol('cache') + +function naiveLength () { return 1 } + +// lruList is a yallist where the head is the youngest +// item, and the tail is the oldest. the list contains the Hit +// objects as the entries. +// Each Hit object has a reference to its Yallist.Node. This +// never changes. +// +// cache is a Map (or PseudoMap) that matches the keys to +// the Yallist.Node object. +function LRUCache (options) { + if (!(this instanceof LRUCache)) { + return new LRUCache(options) + } + + if (typeof options === 'number') { + options = { max: options } + } + + if (!options) { + options = {} + } + + var max = this[MAX] = options.max + // Kind of weird to have a default max of Infinity, but oh well. + if (!max || + !(typeof max === 'number') || + max <= 0) { + this[MAX] = Infinity + } + + var lc = options.length || naiveLength + if (typeof lc !== 'function') { + lc = naiveLength + } + this[LENGTH_CALCULATOR] = lc + + this[ALLOW_STALE] = options.stale || false + this[MAX_AGE] = options.maxAge || 0 + this[DISPOSE] = options.dispose + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false + this.reset() +} + +// resize the cache when the max changes. +Object.defineProperty(LRUCache.prototype, 'max', { + set: function (mL) { + if (!mL || !(typeof mL === 'number') || mL <= 0) { + mL = Infinity + } + this[MAX] = mL + trim(this) + }, + get: function () { + return this[MAX] + }, + enumerable: true +}) + +Object.defineProperty(LRUCache.prototype, 'allowStale', { + set: function (allowStale) { + this[ALLOW_STALE] = !!allowStale + }, + get: function () { + return this[ALLOW_STALE] + }, + enumerable: true +}) + +Object.defineProperty(LRUCache.prototype, 'maxAge', { + set: function (mA) { + if (!mA || !(typeof mA === 'number') || mA < 0) { + mA = 0 + } + this[MAX_AGE] = mA + trim(this) + }, + get: function () { + return this[MAX_AGE] + }, + enumerable: true +}) + +// resize the cache when the lengthCalculator changes. +Object.defineProperty(LRUCache.prototype, 'lengthCalculator', { + set: function (lC) { + if (typeof lC !== 'function') { + lC = naiveLength + } + if (lC !== this[LENGTH_CALCULATOR]) { + this[LENGTH_CALCULATOR] = lC + this[LENGTH] = 0 + this[LRU_LIST].forEach(function (hit) { + hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) + this[LENGTH] += hit.length + }, this) + } + trim(this) + }, + get: function () { return this[LENGTH_CALCULATOR] }, + enumerable: true +}) + +Object.defineProperty(LRUCache.prototype, 'length', { + get: function () { return this[LENGTH] }, + enumerable: true +}) + +Object.defineProperty(LRUCache.prototype, 'itemCount', { + get: function () { return this[LRU_LIST].length }, + enumerable: true +}) + +LRUCache.prototype.rforEach = function (fn, thisp) { + thisp = thisp || this + for (var walker = this[LRU_LIST].tail; walker !== null;) { + var prev = walker.prev + forEachStep(this, fn, walker, thisp) + walker = prev + } +} + +function forEachStep (self, fn, node, thisp) { + var hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) { + hit = undefined + } + } + if (hit) { + fn.call(thisp, hit.value, hit.key, self) + } +} + +LRUCache.prototype.forEach = function (fn, thisp) { + thisp = thisp || this + for (var walker = this[LRU_LIST].head; walker !== null;) { + var next = walker.next + forEachStep(this, fn, walker, thisp) + walker = next + } +} + +LRUCache.prototype.keys = function () { + return this[LRU_LIST].toArray().map(function (k) { + return k.key + }, this) +} + +LRUCache.prototype.values = function () { + return this[LRU_LIST].toArray().map(function (k) { + return k.value + }, this) +} + +LRUCache.prototype.reset = function () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(function (hit) { + this[DISPOSE](hit.key, hit.value) + }, this) + } + + this[CACHE] = new Map() // hash of items by key + this[LRU_LIST] = new Yallist() // list of items in order of use recency + this[LENGTH] = 0 // length of items in the list +} + +LRUCache.prototype.dump = function () { + return this[LRU_LIST].map(function (hit) { + if (!isStale(this, hit)) { + return { + k: hit.key, + v: hit.value, + e: hit.now + (hit.maxAge || 0) + } + } + }, this).toArray().filter(function (h) { + return h + }) +} + +LRUCache.prototype.dumpLru = function () { + return this[LRU_LIST] +} + +/* istanbul ignore next */ +LRUCache.prototype.inspect = function (n, opts) { + var str = 'LRUCache {' + var extras = false + + var as = this[ALLOW_STALE] + if (as) { + str += '\n allowStale: true' + extras = true + } + + var max = this[MAX] + if (max && max !== Infinity) { + if (extras) { + str += ',' + } + str += '\n max: ' + util.inspect(max, opts) + extras = true + } + + var maxAge = this[MAX_AGE] + if (maxAge) { + if (extras) { + str += ',' + } + str += '\n maxAge: ' + util.inspect(maxAge, opts) + extras = true + } + + var lc = this[LENGTH_CALCULATOR] + if (lc && lc !== naiveLength) { + if (extras) { + str += ',' + } + str += '\n length: ' + util.inspect(this[LENGTH], opts) + extras = true + } + + var didFirst = false + this[LRU_LIST].forEach(function (item) { + if (didFirst) { + str += ',\n ' + } else { + if (extras) { + str += ',\n' + } + didFirst = true + str += '\n ' + } + var key = util.inspect(item.key).split('\n').join('\n ') + var val = { value: item.value } + if (item.maxAge !== maxAge) { + val.maxAge = item.maxAge + } + if (lc !== naiveLength) { + val.length = item.length + } + if (isStale(this, item)) { + val.stale = true + } + + val = util.inspect(val, opts).split('\n').join('\n ') + str += key + ' => ' + val + }) + + if (didFirst || extras) { + str += '\n' + } + str += '}' + + return str +} + +LRUCache.prototype.set = function (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE] + + var now = maxAge ? Date.now() : 0 + var len = this[LENGTH_CALCULATOR](value, key) + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)) + return false + } + + var node = this[CACHE].get(key) + var item = node.value + + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) { + this[DISPOSE](key, item.value) + } + } + + item.now = now + item.maxAge = maxAge + item.value = value + this[LENGTH] += len - item.length + item.length = len + this.get(key) + trim(this) + return true + } + + var hit = new Entry(key, value, len, now, maxAge) + + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) { + this[DISPOSE](key, value) + } + return false + } + + this[LENGTH] += hit.length + this[LRU_LIST].unshift(hit) + this[CACHE].set(key, this[LRU_LIST].head) + trim(this) + return true +} + +LRUCache.prototype.has = function (key) { + if (!this[CACHE].has(key)) return false + var hit = this[CACHE].get(key).value + if (isStale(this, hit)) { + return false + } + return true +} + +LRUCache.prototype.get = function (key) { + return get(this, key, true) +} + +LRUCache.prototype.peek = function (key) { + return get(this, key, false) +} + +LRUCache.prototype.pop = function () { + var node = this[LRU_LIST].tail + if (!node) return null + del(this, node) + return node.value +} + +LRUCache.prototype.del = function (key) { + del(this, this[CACHE].get(key)) +} + +LRUCache.prototype.load = function (arr) { + // reset the cache + this.reset() + + var now = Date.now() + // A previous serialized cache has the most recent items first + for (var l = arr.length - 1; l >= 0; l--) { + var hit = arr[l] + var expiresAt = hit.e || 0 + if (expiresAt === 0) { + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + } else { + var maxAge = expiresAt - now + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge) + } + } + } +} + +LRUCache.prototype.prune = function () { + var self = this + this[CACHE].forEach(function (value, key) { + get(self, key, false) + }) +} + +function get (self, key, doUse) { + var node = self[CACHE].get(key) + if (node) { + var hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) hit = undefined + } else { + if (doUse) { + self[LRU_LIST].unshiftNode(node) + } + } + if (hit) hit = hit.value + } + return hit +} + +function isStale (self, hit) { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) { + return false + } + var stale = false + var diff = Date.now() - hit.now + if (hit.maxAge) { + stale = diff > hit.maxAge + } else { + stale = self[MAX_AGE] && (diff > self[MAX_AGE]) + } + return stale +} + +function trim (self) { + if (self[LENGTH] > self[MAX]) { + for (var walker = self[LRU_LIST].tail; + self[LENGTH] > self[MAX] && walker !== null;) { + // We know that we're about to delete this one, and also + // what the next least recently used key will be, so just + // go ahead and set it now. + var prev = walker.prev + del(self, walker) + walker = prev + } + } +} + +function del (self, node) { + if (node) { + var hit = node.value + if (self[DISPOSE]) { + self[DISPOSE](hit.key, hit.value) + } + self[LENGTH] -= hit.length + self[CACHE].delete(hit.key) + self[LRU_LIST].removeNode(node) + } +} + +// classy, since V8 prefers predictable objects. +function Entry (key, value, length, now, maxAge) { + this.key = key + this.value = value + this.length = length + this.now = now + this.maxAge = maxAge || 0 +} diff --git a/node_modules/cross-spawn/node_modules/lru-cache/package.json b/node_modules/cross-spawn/node_modules/lru-cache/package.json new file mode 100644 index 0000000000000..092742d4d6e4d --- /dev/null +++ b/node_modules/cross-spawn/node_modules/lru-cache/package.json @@ -0,0 +1,71 @@ +{ + "_from": "lru-cache@^4.0.1", + "_id": "lru-cache@4.1.5", + "_inBundle": false, + "_integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "_location": "/cross-spawn/lru-cache", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "lru-cache@^4.0.1", + "name": "lru-cache", + "escapedName": "lru-cache", + "rawSpec": "^4.0.1", + "saveSpec": null, + "fetchSpec": "^4.0.1" + }, + "_requiredBy": [ + "/cross-spawn" + ], + "_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "_shasum": "8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd", + "_spec": "lru-cache@^4.0.1", + "_where": "/Users/isaacs/dev/npm/cli/node_modules/cross-spawn", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me" + }, + "bugs": { + "url": "https://github.com/isaacs/node-lru-cache/issues" + }, + "bundleDependencies": false, + "dependencies": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + }, + "deprecated": false, + "description": "A cache object that deletes the least-recently-used items.", + "devDependencies": { + "benchmark": "^2.1.4", + "standard": "^12.0.1", + "tap": "^12.1.0" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/isaacs/node-lru-cache#readme", + "keywords": [ + "mru", + "lru", + "cache" + ], + "license": "ISC", + "main": "index.js", + "name": "lru-cache", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-lru-cache.git" + }, + "scripts": { + "coveragerport": "tap --coverage-report=html", + "lintfix": "standard --fix test/*.js index.js", + "postpublish": "git push origin --all; git push origin --tags", + "posttest": "standard test/*.js index.js", + "postversion": "npm publish --tag=legacy", + "preversion": "npm test", + "snap": "TAP_SNAPSHOT=1 tap test/*.js -J", + "test": "tap test/*.js --100 -J" + }, + "version": "4.1.5" +} diff --git a/node_modules/fstream/LICENSE b/node_modules/cross-spawn/node_modules/yallist/LICENSE similarity index 100% rename from node_modules/fstream/LICENSE rename to node_modules/cross-spawn/node_modules/yallist/LICENSE diff --git a/node_modules/tar/node_modules/yallist/README.md b/node_modules/cross-spawn/node_modules/yallist/README.md similarity index 100% rename from node_modules/tar/node_modules/yallist/README.md rename to node_modules/cross-spawn/node_modules/yallist/README.md diff --git a/node_modules/cross-spawn/node_modules/yallist/iterator.js b/node_modules/cross-spawn/node_modules/yallist/iterator.js new file mode 100644 index 0000000000000..4a15bf22c4003 --- /dev/null +++ b/node_modules/cross-spawn/node_modules/yallist/iterator.js @@ -0,0 +1,7 @@ +var Yallist = require('./yallist.js') + +Yallist.prototype[Symbol.iterator] = function* () { + for (let walker = this.head; walker; walker = walker.next) { + yield walker.value + } +} diff --git a/node_modules/cross-spawn/node_modules/yallist/package.json b/node_modules/cross-spawn/node_modules/yallist/package.json new file mode 100644 index 0000000000000..de575bcbb85a8 --- /dev/null +++ b/node_modules/cross-spawn/node_modules/yallist/package.json @@ -0,0 +1,62 @@ +{ + "_from": "yallist@^2.1.2", + "_id": "yallist@2.1.2", + "_inBundle": false, + "_integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=", + "_location": "/cross-spawn/yallist", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "yallist@^2.1.2", + "name": "yallist", + "escapedName": "yallist", + "rawSpec": "^2.1.2", + "saveSpec": null, + "fetchSpec": "^2.1.2" + }, + "_requiredBy": [ + "/cross-spawn/lru-cache" + ], + "_resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "_shasum": "1c11f9218f076089a47dd512f93c6699a6a81d52", + "_spec": "yallist@^2.1.2", + "_where": "/Users/isaacs/dev/npm/cli/node_modules/cross-spawn/node_modules/lru-cache", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/isaacs/yallist/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Yet Another Linked List", + "devDependencies": { + "tap": "^10.3.0" + }, + "directories": { + "test": "test" + }, + "files": [ + "yallist.js", + "iterator.js" + ], + "homepage": "https://github.com/isaacs/yallist#readme", + "license": "ISC", + "main": "yallist.js", + "name": "yallist", + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/yallist.git" + }, + "scripts": { + "postpublish": "git push origin --all; git push origin --tags", + "postversion": "npm publish", + "preversion": "npm test", + "test": "tap test/*.js --100" + }, + "version": "2.1.2" +} diff --git a/node_modules/tar/node_modules/yallist/yallist.js b/node_modules/cross-spawn/node_modules/yallist/yallist.js similarity index 98% rename from node_modules/tar/node_modules/yallist/yallist.js rename to node_modules/cross-spawn/node_modules/yallist/yallist.js index b0ab36cf31b7a..518d23330b936 100644 --- a/node_modules/tar/node_modules/yallist/yallist.js +++ b/node_modules/cross-spawn/node_modules/yallist/yallist.js @@ -1,4 +1,3 @@ -'use strict' module.exports = Yallist Yallist.Node = Node @@ -369,8 +368,3 @@ function Node (value, prev, next, list) { this.next = null } } - -try { - // add if support for Symbol.iterator is present - require('./iterator.js')(Yallist) -} catch (er) {} diff --git a/node_modules/define-properties/.editorconfig b/node_modules/define-properties/.editorconfig new file mode 100644 index 0000000000000..eaa214161f5cd --- /dev/null +++ b/node_modules/define-properties/.editorconfig @@ -0,0 +1,13 @@ +root = true + +[*] +indent_style = tab; +insert_final_newline = true; +quote_type = auto; +space_after_anonymous_functions = true; +space_after_control_statements = true; +spaces_around_operators = true; +trim_trailing_whitespace = true; +spaces_in_brackets = false; +end_of_line = lf; + diff --git a/node_modules/define-properties/.jscs.json b/node_modules/define-properties/.jscs.json new file mode 100644 index 0000000000000..6f2d7f9ff9b1f --- /dev/null +++ b/node_modules/define-properties/.jscs.json @@ -0,0 +1,175 @@ +{ + "es3": true, + + "additionalRules": [], + + "requireSemicolons": true, + + "disallowMultipleSpaces": true, + + "disallowIdentifierNames": [], + + "requireCurlyBraces": { + "allExcept": [], + "keywords": ["if", "else", "for", "while", "do", "try", "catch"] + }, + + "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"], + + "disallowSpaceAfterKeywords": [], + + "disallowSpaceBeforeComma": true, + "disallowSpaceAfterComma": false, + "disallowSpaceBeforeSemicolon": true, + + "disallowNodeTypes": [ + "DebuggerStatement", + "LabeledStatement", + "SwitchCase", + "SwitchStatement", + "WithStatement" + ], + + "requireObjectKeysOnNewLine": { "allExcept": ["sameLine"] }, + + "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true }, + "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true }, + "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true }, + + "requireSpaceBetweenArguments": true, + + "disallowSpacesInsideParentheses": true, + + "disallowSpacesInsideArrayBrackets": true, + + "disallowQuotedKeysInObjects": { "allExcept": ["reserved"] }, + + "disallowSpaceAfterObjectKeys": true, + + "requireCommaBeforeLineBreak": true, + + "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"], + "requireSpaceAfterPrefixUnaryOperators": [], + + "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"], + "requireSpaceBeforePostfixUnaryOperators": [], + + "disallowSpaceBeforeBinaryOperators": [], + "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + + "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + "disallowSpaceAfterBinaryOperators": [], + + "disallowImplicitTypeConversion": ["binary", "string"], + + "disallowKeywords": ["with", "eval"], + + "requireKeywordsOnNewLine": [], + "disallowKeywordsOnNewLine": ["else"], + + "requireLineFeedAtFileEnd": true, + + "disallowTrailingWhitespace": true, + + "disallowTrailingComma": true, + + "excludeFiles": ["node_modules/**", "vendor/**"], + + "disallowMultipleLineStrings": true, + + "requireDotNotation": { "allExcept": ["keywords"] }, + + "requireParenthesesAroundIIFE": true, + + "validateLineBreaks": "LF", + + "validateQuoteMarks": { + "escape": true, + "mark": "'" + }, + + "disallowOperatorBeforeLineBreak": [], + + "requireSpaceBeforeKeywords": [ + "do", + "for", + "if", + "else", + "switch", + "case", + "try", + "catch", + "finally", + "while", + "with", + "return" + ], + + "validateAlignedFunctionParameters": { + "lineBreakAfterOpeningBraces": true, + "lineBreakBeforeClosingBraces": true + }, + + "requirePaddingNewLinesBeforeExport": true, + + "validateNewlineAfterArrayElements": { + "maximum": 3 + }, + + "requirePaddingNewLinesAfterUseStrict": true, + + "disallowArrowFunctions": true, + + "disallowMultiLineTernary": true, + + "validateOrderInObjectKeys": "asc-insensitive", + + "disallowIdenticalDestructuringNames": true, + + "disallowNestedTernaries": { "maxLevel": 1 }, + + "requireSpaceAfterComma": { "allExcept": ["trailing"] }, + "requireAlignedMultilineParams": false, + + "requireSpacesInGenerator": { + "afterStar": true + }, + + "disallowSpacesInGenerator": { + "beforeStar": true + }, + + "disallowVar": false, + + "requireArrayDestructuring": false, + + "requireEnhancedObjectLiterals": false, + + "requireObjectDestructuring": false, + + "requireEarlyReturn": false, + + "requireCapitalizedConstructorsNew": { + "allExcept": ["Function", "String", "Object", "Symbol", "Number", "Date", "RegExp", "Error", "Boolean", "Array"] + }, + + "requireImportAlphabetized": false, + + "requireSpaceBeforeObjectValues": true, + "requireSpaceBeforeDestructuredValues": true, + + "disallowSpacesInsideTemplateStringPlaceholders": true, + + "disallowArrayDestructuringReturn": false, + + "requireNewlineBeforeSingleStatementsInIf": false, + + "disallowUnusedVariables": true, + + "requireSpacesInsideImportedObjectBraces": true, + + "requireUseStrict": true +} + diff --git a/node_modules/define-properties/.travis.yml b/node_modules/define-properties/.travis.yml new file mode 100644 index 0000000000000..ec72d5f3d36b7 --- /dev/null +++ b/node_modules/define-properties/.travis.yml @@ -0,0 +1,233 @@ +language: node_js +os: + - linux +node_js: + - "10.8" + - "9.11" + - "8.11" + - "7.10" + - "6.14" + - "5.12" + - "4.9" + - "iojs-v3.3" + - "iojs-v2.5" + - "iojs-v1.8" + - "0.12" + - "0.10" + - "0.8" +before_install: + - 'case "${TRAVIS_NODE_VERSION}" in 0.*) export NPM_CONFIG_STRICT_SSL=false ;; esac' + - 'nvm install-latest-npm' +install: + - 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ] || [ "${TRAVIS_NODE_VERSION}" = "0.9" ]; then nvm install --latest-npm 0.8 && npm install && nvm use "${TRAVIS_NODE_VERSION}"; else npm install; fi;' +script: + - 'if [ -n "${PRETEST-}" ]; then npm run pretest ; fi' + - 'if [ -n "${POSTTEST-}" ]; then npm run posttest ; fi' + - 'if [ -n "${COVERAGE-}" ]; then npm run coverage ; fi' + - 'if [ -n "${TEST-}" ]; then npm run tests-only ; fi' +sudo: false +env: + - TEST=true +matrix: + fast_finish: true + include: + - node_js: "lts/*" + env: PRETEST=true + - node_js: "lts/*" + env: POSTTEST=true + - node_js: "4" + env: COVERAGE=true + - node_js: "10.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.13" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.12" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.4" + env: TEST=true ALLOW_FAILURE=true + allow_failures: + - os: osx + - env: TEST=true ALLOW_FAILURE=true + - env: COVERAGE=true diff --git a/node_modules/define-properties/CHANGELOG.md b/node_modules/define-properties/CHANGELOG.md new file mode 100644 index 0000000000000..5cad1e26a0194 --- /dev/null +++ b/node_modules/define-properties/CHANGELOG.md @@ -0,0 +1,44 @@ +1.1.3 / 2018-08-14 +================= + * [Refactor] use a for loop instead of `foreach` to make for smaller bundle sizes + * [Robustness] cache `Array.prototype.concat` and `Object.defineProperty` + * [Deps] update `object-keys` + * [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `nsp`, `tape`, `jscs`; remove unused eccheck script + dep + * [Tests] use pretest/posttest for linting/security + * [Tests] fix npm upgrades on older nodes + +1.1.2 / 2015-10-14 +================= + * [Docs] Switch from vb.teelaun.ch to versionbadg.es for the npm version badge SVG + * [Deps] Update `object-keys` + * [Dev Deps] update `jscs`, `tape`, `eslint`, `@ljharb/eslint-config`, `nsp` + * [Tests] up to `io.js` `v3.3`, `node` `v4.2` + +1.1.1 / 2015-07-21 +================= + * [Deps] Update `object-keys` + * [Dev Deps] Update `tape`, `eslint` + * [Tests] Test on `io.js` `v2.4` + +1.1.0 / 2015-07-01 +================= + * [New] Add support for symbol-valued properties. + * [Dev Deps] Update `nsp`, `eslint` + * [Tests] Test up to `io.js` `v2.3` + +1.0.3 / 2015-05-30 +================= + * Using a more reliable check for supported property descriptors. + +1.0.2 / 2015-05-23 +================= + * Test up to `io.js` `v2.0` + * Update `tape`, `jscs`, `nsp`, `eslint`, `object-keys`, `editorconfig-tools`, `covert` + +1.0.1 / 2015-01-06 +================= + * Update `object-keys` to fix ES3 support + +1.0.0 / 2015-01-04 +================= + * v1.0.0 diff --git a/node_modules/define-properties/LICENSE b/node_modules/define-properties/LICENSE new file mode 100644 index 0000000000000..8c271c14b62fa --- /dev/null +++ b/node_modules/define-properties/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (C) 2015 Jordan Harband + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/define-properties/README.md b/node_modules/define-properties/README.md new file mode 100644 index 0000000000000..33b6111f16185 --- /dev/null +++ b/node_modules/define-properties/README.md @@ -0,0 +1,86 @@ +#define-properties [![Version Badge][npm-version-svg]][package-url] + +[![Build Status][travis-svg]][travis-url] +[![dependency status][deps-svg]][deps-url] +[![dev dependency status][dev-deps-svg]][dev-deps-url] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][npm-badge-png]][package-url] + +[![browser support][testling-svg]][testling-url] + +Define multiple non-enumerable properties at once. Uses `Object.defineProperty` when available; falls back to standard assignment in older engines. +Existing properties are not overridden. Accepts a map of property names to a predicate that, when true, force-overrides. + +## Example + +```js +var define = require('define-properties'); +var assert = require('assert'); + +var obj = define({ a: 1, b: 2 }, { + a: 10, + b: 20, + c: 30 +}); +assert(obj.a === 1); +assert(obj.b === 2); +assert(obj.c === 30); +if (define.supportsDescriptors) { + assert.deepEqual(Object.keys(obj), ['a', 'b']); + assert.deepEqual(Object.getOwnPropertyDescriptor(obj, 'c'), { + configurable: true, + enumerable: false, + value: 30, + writable: false + }); +} +``` + +Then, with predicates: +```js +var define = require('define-properties'); +var assert = require('assert'); + +var obj = define({ a: 1, b: 2, c: 3 }, { + a: 10, + b: 20, + c: 30 +}, { + a: function () { return false; }, + b: function () { return true; } +}); +assert(obj.a === 1); +assert(obj.b === 20); +assert(obj.c === 3); +if (define.supportsDescriptors) { + assert.deepEqual(Object.keys(obj), ['a', 'c']); + assert.deepEqual(Object.getOwnPropertyDescriptor(obj, 'b'), { + configurable: true, + enumerable: false, + value: 20, + writable: false + }); +} +``` + +## Tests +Simply clone the repo, `npm install`, and run `npm test` + +[package-url]: https://npmjs.org/package/define-properties +[npm-version-svg]: http://versionbadg.es/ljharb/define-properties.svg +[travis-svg]: https://travis-ci.org/ljharb/define-properties.svg +[travis-url]: https://travis-ci.org/ljharb/define-properties +[deps-svg]: https://david-dm.org/ljharb/define-properties.svg +[deps-url]: https://david-dm.org/ljharb/define-properties +[dev-deps-svg]: https://david-dm.org/ljharb/define-properties/dev-status.svg +[dev-deps-url]: https://david-dm.org/ljharb/define-properties#info=devDependencies +[testling-svg]: https://ci.testling.com/ljharb/define-properties.png +[testling-url]: https://ci.testling.com/ljharb/define-properties +[npm-badge-png]: https://nodei.co/npm/define-properties.png?downloads=true&stars=true +[license-image]: http://img.shields.io/npm/l/define-properties.svg +[license-url]: LICENSE +[downloads-image]: http://img.shields.io/npm/dm/define-properties.svg +[downloads-url]: http://npm-stat.com/charts.html?package=define-properties + diff --git a/node_modules/define-properties/index.js b/node_modules/define-properties/index.js new file mode 100644 index 0000000000000..cb3ae1c7cb398 --- /dev/null +++ b/node_modules/define-properties/index.js @@ -0,0 +1,58 @@ +'use strict'; + +var keys = require('object-keys'); +var hasSymbols = typeof Symbol === 'function' && typeof Symbol('foo') === 'symbol'; + +var toStr = Object.prototype.toString; +var concat = Array.prototype.concat; +var origDefineProperty = Object.defineProperty; + +var isFunction = function (fn) { + return typeof fn === 'function' && toStr.call(fn) === '[object Function]'; +}; + +var arePropertyDescriptorsSupported = function () { + var obj = {}; + try { + origDefineProperty(obj, 'x', { enumerable: false, value: obj }); + // eslint-disable-next-line no-unused-vars, no-restricted-syntax + for (var _ in obj) { // jscs:ignore disallowUnusedVariables + return false; + } + return obj.x === obj; + } catch (e) { /* this is IE 8. */ + return false; + } +}; +var supportsDescriptors = origDefineProperty && arePropertyDescriptorsSupported(); + +var defineProperty = function (object, name, value, predicate) { + if (name in object && (!isFunction(predicate) || !predicate())) { + return; + } + if (supportsDescriptors) { + origDefineProperty(object, name, { + configurable: true, + enumerable: false, + value: value, + writable: true + }); + } else { + object[name] = value; + } +}; + +var defineProperties = function (object, map) { + var predicates = arguments.length > 2 ? arguments[2] : {}; + var props = keys(map); + if (hasSymbols) { + props = concat.call(props, Object.getOwnPropertySymbols(map)); + } + for (var i = 0; i < props.length; i += 1) { + defineProperty(object, props[i], map[props[i]], predicates[props[i]]); + } +}; + +defineProperties.supportsDescriptors = !!supportsDescriptors; + +module.exports = defineProperties; diff --git a/node_modules/define-properties/package.json b/node_modules/define-properties/package.json new file mode 100644 index 0000000000000..c6b2f975b775e --- /dev/null +++ b/node_modules/define-properties/package.json @@ -0,0 +1,69 @@ +{ + "name": "define-properties", + "version": "1.1.3", + "author": "Jordan Harband", + "description": "Define multiple non-enumerable properties at once. Uses `Object.defineProperty` when available; falls back to standard assignment in older engines.", + "license": "MIT", + "main": "index.js", + "scripts": { + "pretest": "npm run --silent lint", + "test": "npm run --silent tests-only", + "posttest": "npm run --silent security", + "tests-only": "node test/index.js", + "coverage": "covert test/*.js", + "coverage-quiet": "covert test/*.js --quiet", + "lint": "npm run --silent jscs && npm run --silent eslint", + "jscs": "jscs test/*.js *.js", + "eslint": "eslint test/*.js *.js", + "security": "nsp check" + }, + "repository": { + "type": "git", + "url": "git://github.com/ljharb/define-properties.git" + }, + "keywords": [ + "Object.defineProperty", + "Object.defineProperties", + "object", + "property descriptor", + "descriptor", + "define", + "ES5" + ], + "dependencies": { + "object-keys": "^1.0.12" + }, + "devDependencies": { + "@ljharb/eslint-config": "^13.0.0", + "covert": "^1.1.0", + "eslint": "^5.3.0", + "jscs": "^3.0.7", + "nsp": "^3.2.1", + "tape": "^4.9.0" + }, + "testling": { + "files": "test/index.js", + "browsers": [ + "iexplore/6.0..latest", + "firefox/3.0..6.0", + "firefox/15.0..latest", + "firefox/nightly", + "chrome/4.0..10.0", + "chrome/20.0..latest", + "chrome/canary", + "opera/10.0..latest", + "opera/next", + "safari/4.0..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2" + ] + }, + "engines": { + "node": ">= 0.4" + } + +,"_resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz" +,"_integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==" +,"_from": "define-properties@1.1.3" +} \ No newline at end of file diff --git a/node_modules/define-properties/test/index.js b/node_modules/define-properties/test/index.js new file mode 100644 index 0000000000000..3387f6bc7bbe9 --- /dev/null +++ b/node_modules/define-properties/test/index.js @@ -0,0 +1,125 @@ +'use strict'; + +var define = require('../'); +var test = require('tape'); +var keys = require('object-keys'); + +var arePropertyDescriptorsSupported = function () { + var obj = { a: 1 }; + try { + Object.defineProperty(obj, 'x', { value: obj }); + return obj.x === obj; + } catch (e) { /* this is IE 8. */ + return false; + } +}; +var descriptorsSupported = !!Object.defineProperty && arePropertyDescriptorsSupported(); + +var hasSymbols = typeof Symbol === 'function' && typeof Symbol('foo') === 'symbol'; + +test('defineProperties', function (dt) { + dt.test('with descriptor support', { skip: !descriptorsSupported }, function (t) { + var getDescriptor = function (value) { + return { + configurable: true, + enumerable: false, + value: value, + writable: true + }; + }; + + var obj = { + a: 1, + b: 2, + c: 3 + }; + t.deepEqual(keys(obj), ['a', 'b', 'c'], 'all literal-set keys start enumerable'); + define(obj, { + b: 3, + c: 4, + d: 5 + }); + t.deepEqual(obj, { + a: 1, + b: 2, + c: 3 + }, 'existing properties were not overridden'); + t.deepEqual(Object.getOwnPropertyDescriptor(obj, 'd'), getDescriptor(5), 'new property "d" was added and is not enumerable'); + t.deepEqual(['a', 'b', 'c'], keys(obj), 'new keys are not enumerable'); + + define(obj, { + a: 2, + b: 3, + c: 4 + }, { + a: function () { return true; }, + b: function () { return false; } + }); + t.deepEqual(obj, { + b: 2, + c: 3 + }, 'properties only overriden when predicate exists and returns true'); + t.deepEqual(Object.getOwnPropertyDescriptor(obj, 'd'), getDescriptor(5), 'existing property "d" remained and is not enumerable'); + t.deepEqual(Object.getOwnPropertyDescriptor(obj, 'a'), getDescriptor(2), 'existing property "a" was overridden and is not enumerable'); + t.deepEqual(['b', 'c'], keys(obj), 'overridden keys are not enumerable'); + + t.end(); + }); + + dt.test('without descriptor support', { skip: descriptorsSupported }, function (t) { + var obj = { + a: 1, + b: 2, + c: 3 + }; + define(obj, { + b: 3, + c: 4, + d: 5 + }); + t.deepEqual(obj, { + a: 1, + b: 2, + c: 3, + d: 5 + }, 'existing properties were not overridden, new properties were added'); + + define(obj, { + a: 2, + b: 3, + c: 4 + }, { + a: function () { return true; }, + b: function () { return false; } + }); + t.deepEqual(obj, { + a: 2, + b: 2, + c: 3, + d: 5 + }, 'properties only overriden when predicate exists and returns true'); + + t.end(); + }); + + dt.end(); +}); + +test('symbols', { skip: !hasSymbols }, function (t) { + var sym = Symbol('foo'); + var obj = {}; + var aValue = {}; + var bValue = {}; + var properties = { a: aValue }; + properties[sym] = bValue; + + define(obj, properties); + + t.deepEqual(Object.keys(obj), [], 'object has no enumerable keys'); + t.deepEqual(Object.getOwnPropertyNames(obj), ['a'], 'object has non-enumerable "a" key'); + t.deepEqual(Object.getOwnPropertySymbols(obj), [sym], 'object has non-enumerable symbol key'); + t.equal(obj.a, aValue, 'string keyed value is defined'); + t.equal(obj[sym], bValue, 'symbol keyed value is defined'); + + t.end(); +}); diff --git a/node_modules/duplexify/node_modules/readable-stream/.travis.yml b/node_modules/duplexify/node_modules/readable-stream/.travis.yml new file mode 100644 index 0000000000000..40992555bf5cc --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/.travis.yml @@ -0,0 +1,55 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test +script: "npm run $TASK" +env: + global: + - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= + - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/duplexify/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/duplexify/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 0000000000000..f478d58dca85b --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/duplexify/node_modules/readable-stream/GOVERNANCE.md b/node_modules/duplexify/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 0000000000000..16ffb93f24bec --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/node_modules/duplexify/node_modules/readable-stream/LICENSE b/node_modules/duplexify/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000000000..2873b3b2e5950 --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/node_modules/duplexify/node_modules/readable-stream/README.md b/node_modules/duplexify/node_modules/readable-stream/README.md new file mode 100644 index 0000000000000..23fe3f3e3009a --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/README.md @@ -0,0 +1,58 @@ +# readable-stream + +***Node-core v8.11.1 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) + +```bash +npm install --save readable-stream +``` + +***Node-core streams for userland*** + +This package is a mirror of the Streams2 and Streams3 implementations in +Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.11.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> + - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> + - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D +* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> diff --git a/node_modules/duplexify/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/duplexify/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 0000000000000..83275f192e407 --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,60 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section + + diff --git a/node_modules/duplexify/node_modules/readable-stream/duplex-browser.js b/node_modules/duplexify/node_modules/readable-stream/duplex-browser.js new file mode 100644 index 0000000000000..f8b2db83dbe73 --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/duplex-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_duplex.js'); diff --git a/node_modules/duplexify/node_modules/readable-stream/duplex.js b/node_modules/duplexify/node_modules/readable-stream/duplex.js new file mode 100644 index 0000000000000..46924cbfdf538 --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require('./readable').Duplex diff --git a/node_modules/duplexify/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/duplexify/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000000000..a1ca813e5acbd --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,131 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); + + pna.nextTick(cb, err); +}; \ No newline at end of file diff --git a/node_modules/duplexify/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/duplexify/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000000000..a9c835884828d --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,47 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/duplexify/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/duplexify/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000000000..bf34ac65e1108 --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1019 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Readable; + +/**/ +var isArray = require('isarray'); +/**/ + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = require('./internal/streams/BufferList'); +var destroyImpl = require('./internal/streams/destroy'); +var StringDecoder; + +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } + } + + return needMoreData(state); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, unpipeInfo); + }return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this, unpipeInfo); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/duplexify/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/duplexify/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000000000..5d1f8b876d98c --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,214 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } + + ts.writechunk = null; + ts.writecb = null; + + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + + cb(er); + + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; + + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); + + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000000000..b3f4e85a2f6e3 --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,687 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +var destroyImpl = require('./internal/streams/destroy'); + +util.inherits(Writable, Stream); + +function nop() {} + +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = corkReq; + } else { + state.corkedRequestsFree = corkReq; + } +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); + +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; \ No newline at end of file diff --git a/node_modules/duplexify/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/duplexify/node_modules/readable-stream/lib/internal/streams/BufferList.js new file mode 100644 index 0000000000000..aefc68bd90b9c --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/lib/internal/streams/BufferList.js @@ -0,0 +1,79 @@ +'use strict'; + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var Buffer = require('safe-buffer').Buffer; +var util = require('util'); + +function copyBuffer(src, target, offset) { + src.copy(target, offset); +} + +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; + + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; + + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; + + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; + + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; + + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + if (this.length === 1) return this.head.data; + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }; + + return BufferList; +}(); + +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; +} \ No newline at end of file diff --git a/node_modules/duplexify/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/duplexify/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 0000000000000..5a0a0d88cec6f --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,74 @@ +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { + pna.nextTick(emitErrorNT, this, err); + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + pna.nextTick(emitErrorNT, _this, err); + if (_this._writableState) { + _this._writableState.errorEmitted = true; + } + } else if (cb) { + cb(err); + } + }); + + return this; +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy +}; \ No newline at end of file diff --git a/node_modules/duplexify/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/duplexify/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 0000000000000..9332a3fdae706 --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/node_modules/duplexify/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/duplexify/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 0000000000000..ce2ad5b6ee57f --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/node_modules/duplexify/node_modules/readable-stream/package.json b/node_modules/duplexify/node_modules/readable-stream/package.json new file mode 100644 index 0000000000000..e0a80537228c9 --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/package.json @@ -0,0 +1,81 @@ +{ + "_from": "readable-stream@^2.0.0", + "_id": "readable-stream@2.3.6", + "_inBundle": false, + "_integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "_location": "/duplexify/readable-stream", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "readable-stream@^2.0.0", + "name": "readable-stream", + "escapedName": "readable-stream", + "rawSpec": "^2.0.0", + "saveSpec": null, + "fetchSpec": "^2.0.0" + }, + "_requiredBy": [ + "/duplexify" + ], + "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "_shasum": "b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf", + "_spec": "readable-stream@^2.0.0", + "_where": "/Users/aeschright/code/cli/node_modules/duplexify", + "browser": { + "util": false, + "./readable.js": "./readable-browser.js", + "./writable.js": "./writable-browser.js", + "./duplex.js": "./duplex-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "bugs": { + "url": "https://github.com/nodejs/readable-stream/issues" + }, + "bundleDependencies": false, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "deprecated": false, + "description": "Streams3, a user-land copy of the stream library from Node.js", + "devDependencies": { + "assert": "^1.4.0", + "babel-polyfill": "^6.9.1", + "buffer": "^4.9.0", + "lolex": "^2.3.2", + "nyc": "^6.4.0", + "tap": "^0.7.0", + "tape": "^4.8.0" + }, + "homepage": "https://github.com/nodejs/readable-stream#readme", + "keywords": [ + "readable", + "stream", + "pipe" + ], + "license": "MIT", + "main": "readable.js", + "name": "readable-stream", + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js" + }, + "version": "2.3.6" +} diff --git a/node_modules/duplexify/node_modules/readable-stream/passthrough.js b/node_modules/duplexify/node_modules/readable-stream/passthrough.js new file mode 100644 index 0000000000000..ffd791d7ff275 --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require('./readable').PassThrough diff --git a/node_modules/duplexify/node_modules/readable-stream/readable-browser.js b/node_modules/duplexify/node_modules/readable-stream/readable-browser.js new file mode 100644 index 0000000000000..e50372592ee6c --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,7 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/node_modules/duplexify/node_modules/readable-stream/readable.js b/node_modules/duplexify/node_modules/readable-stream/readable.js new file mode 100644 index 0000000000000..ec89ec5330649 --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/readable.js @@ -0,0 +1,19 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); +} diff --git a/node_modules/duplexify/node_modules/readable-stream/transform.js b/node_modules/duplexify/node_modules/readable-stream/transform.js new file mode 100644 index 0000000000000..b1baba26da03d --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require('./readable').Transform diff --git a/node_modules/duplexify/node_modules/readable-stream/writable-browser.js b/node_modules/duplexify/node_modules/readable-stream/writable-browser.js new file mode 100644 index 0000000000000..ebdde6a85dcb1 --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/writable-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_writable.js'); diff --git a/node_modules/duplexify/node_modules/readable-stream/writable.js b/node_modules/duplexify/node_modules/readable-stream/writable.js new file mode 100644 index 0000000000000..3211a6f80d1ab --- /dev/null +++ b/node_modules/duplexify/node_modules/readable-stream/writable.js @@ -0,0 +1,8 @@ +var Stream = require("stream") +var Writable = require("./lib/_stream_writable.js") + +if (process.env.READABLE_STREAM === 'disable') { + module.exports = Stream && Stream.Writable || Writable +} else { + module.exports = Writable +} diff --git a/node_modules/duplexify/node_modules/string_decoder/.travis.yml b/node_modules/duplexify/node_modules/string_decoder/.travis.yml new file mode 100644 index 0000000000000..3347a72546505 --- /dev/null +++ b/node_modules/duplexify/node_modules/string_decoder/.travis.yml @@ -0,0 +1,50 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test diff --git a/node_modules/duplexify/node_modules/string_decoder/LICENSE b/node_modules/duplexify/node_modules/string_decoder/LICENSE new file mode 100644 index 0000000000000..778edb20730ef --- /dev/null +++ b/node_modules/duplexify/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/node_modules/duplexify/node_modules/string_decoder/README.md b/node_modules/duplexify/node_modules/string_decoder/README.md new file mode 100644 index 0000000000000..5fd58315ed588 --- /dev/null +++ b/node_modules/duplexify/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/node_modules/duplexify/node_modules/string_decoder/lib/string_decoder.js b/node_modules/duplexify/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 0000000000000..2e89e63f7933e --- /dev/null +++ b/node_modules/duplexify/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/node_modules/duplexify/node_modules/string_decoder/package.json b/node_modules/duplexify/node_modules/string_decoder/package.json new file mode 100644 index 0000000000000..cccb055a2fa29 --- /dev/null +++ b/node_modules/duplexify/node_modules/string_decoder/package.json @@ -0,0 +1,59 @@ +{ + "_from": "string_decoder@~1.1.1", + "_id": "string_decoder@1.1.1", + "_inBundle": false, + "_integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "_location": "/duplexify/string_decoder", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "string_decoder@~1.1.1", + "name": "string_decoder", + "escapedName": "string_decoder", + "rawSpec": "~1.1.1", + "saveSpec": null, + "fetchSpec": "~1.1.1" + }, + "_requiredBy": [ + "/duplexify/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "_shasum": "9cf1611ba62685d7030ae9e4ba34149c3af03fc8", + "_spec": "string_decoder@~1.1.1", + "_where": "/Users/aeschright/code/cli/node_modules/duplexify/node_modules/readable-stream", + "bugs": { + "url": "https://github.com/nodejs/string_decoder/issues" + }, + "bundleDependencies": false, + "dependencies": { + "safe-buffer": "~5.1.0" + }, + "deprecated": false, + "description": "The string_decoder module from Node core", + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT", + "main": "lib/string_decoder.js", + "name": "string_decoder", + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "test": "tap test/parallel/*.js && node test/verify-dependencies" + }, + "version": "1.1.1" +} diff --git a/node_modules/env-paths/index.d.ts b/node_modules/env-paths/index.d.ts new file mode 100644 index 0000000000000..e57fa8f661f1f --- /dev/null +++ b/node_modules/env-paths/index.d.ts @@ -0,0 +1,69 @@ +declare namespace envPaths { + export interface Options { + /** + __Don't use this option unless you really have to!__ + + Suffix appended to the project name to avoid name conflicts with native apps. Pass an empty string to disable it. + + @default 'nodejs' + */ + readonly suffix?: string; + } + + export interface Paths { + /** + Directory for data files. + */ + readonly data: string; + + /** + Directory for data files. + */ + readonly config: string; + + /** + Directory for non-essential data files. + */ + readonly cache: string; + + /** + Directory for log files. + */ + readonly log: string; + + /** + Directory for temporary files. + */ + readonly temp: string; + } +} + +declare const envPaths: { + /** + Get paths for storing things like data, config, cache, etc. + + @param name - Name of your project. Used to generate the paths. + @returns The paths to use for your project on current OS. + + @example + ``` + import envPaths = require('env-paths'); + + const paths = envPaths('MyApp'); + + paths.data; + //=> '/home/sindresorhus/.local/share/MyApp-nodejs' + + paths.config + //=> '/home/sindresorhus/.config/MyApp-nodejs' + ``` + */ + (name: string, options?: envPaths.Options): envPaths.Paths; + + // TODO: Remove this for the next major release, refactor the whole definition to: + // declare function envPaths(name: string, options?: envPaths.Options): envPaths.Paths; + // export = envPaths; + default: typeof envPaths; +}; + +export = envPaths; diff --git a/node_modules/env-paths/index.js b/node_modules/env-paths/index.js new file mode 100644 index 0000000000000..7e7b50baa4416 --- /dev/null +++ b/node_modules/env-paths/index.js @@ -0,0 +1,74 @@ +'use strict'; +const path = require('path'); +const os = require('os'); + +const homedir = os.homedir(); +const tmpdir = os.tmpdir(); +const {env} = process; + +const macos = name => { + const library = path.join(homedir, 'Library'); + + return { + data: path.join(library, 'Application Support', name), + config: path.join(library, 'Preferences', name), + cache: path.join(library, 'Caches', name), + log: path.join(library, 'Logs', name), + temp: path.join(tmpdir, name) + }; +}; + +const windows = name => { + const appData = env.APPDATA || path.join(homedir, 'AppData', 'Roaming'); + const localAppData = env.LOCALAPPDATA || path.join(homedir, 'AppData', 'Local'); + + return { + // Data/config/cache/log are invented by me as Windows isn't opinionated about this + data: path.join(localAppData, name, 'Data'), + config: path.join(appData, name, 'Config'), + cache: path.join(localAppData, name, 'Cache'), + log: path.join(localAppData, name, 'Log'), + temp: path.join(tmpdir, name) + }; +}; + +// https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html +const linux = name => { + const username = path.basename(homedir); + + return { + data: path.join(env.XDG_DATA_HOME || path.join(homedir, '.local', 'share'), name), + config: path.join(env.XDG_CONFIG_HOME || path.join(homedir, '.config'), name), + cache: path.join(env.XDG_CACHE_HOME || path.join(homedir, '.cache'), name), + // https://wiki.debian.org/XDGBaseDirectorySpecification#state + log: path.join(env.XDG_STATE_HOME || path.join(homedir, '.local', 'state'), name), + temp: path.join(tmpdir, username, name) + }; +}; + +const envPaths = (name, options) => { + if (typeof name !== 'string') { + throw new TypeError(`Expected string, got ${typeof name}`); + } + + options = Object.assign({suffix: 'nodejs'}, options); + + if (options.suffix) { + // Add suffix to prevent possible conflict with native apps + name += `-${options.suffix}`; + } + + if (process.platform === 'darwin') { + return macos(name); + } + + if (process.platform === 'win32') { + return windows(name); + } + + return linux(name); +}; + +module.exports = envPaths; +// TODO: Remove this for the next major release +module.exports.default = envPaths; diff --git a/node_modules/env-paths/license b/node_modules/env-paths/license new file mode 100644 index 0000000000000..e7af2f77107d7 --- /dev/null +++ b/node_modules/env-paths/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/env-paths/package.json b/node_modules/env-paths/package.json new file mode 100644 index 0000000000000..6af6711479dff --- /dev/null +++ b/node_modules/env-paths/package.json @@ -0,0 +1,77 @@ +{ + "_from": "env-paths@^2.2.0", + "_id": "env-paths@2.2.0", + "_inBundle": false, + "_integrity": "sha512-6u0VYSCo/OW6IoD5WCLLy9JUGARbamfSavcNXry/eu8aHVFei6CD3Sw+VGX5alea1i9pgPHW0mbu6Xj0uBh7gA==", + "_location": "/env-paths", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "env-paths@^2.2.0", + "name": "env-paths", + "escapedName": "env-paths", + "rawSpec": "^2.2.0", + "saveSpec": null, + "fetchSpec": "^2.2.0" + }, + "_requiredBy": [ + "/node-gyp" + ], + "_resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.0.tgz", + "_shasum": "cdca557dc009152917d6166e2febe1f039685e43", + "_spec": "env-paths@^2.2.0", + "_where": "/Users/mperrotte/npminc/cli/node_modules/node-gyp", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/env-paths/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Get paths for storing things like data, config, cache, etc", + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.1", + "xo": "^0.24.0" + }, + "engines": { + "node": ">=6" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "homepage": "https://github.com/sindresorhus/env-paths#readme", + "keywords": [ + "common", + "user", + "paths", + "env", + "environment", + "directory", + "dir", + "appdir", + "path", + "data", + "config", + "cache", + "logs", + "temp", + "linux", + "unix" + ], + "license": "MIT", + "name": "env-paths", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/env-paths.git" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "version": "2.2.0" +} diff --git a/node_modules/env-paths/readme.md b/node_modules/env-paths/readme.md new file mode 100644 index 0000000000000..ec3439316f9a1 --- /dev/null +++ b/node_modules/env-paths/readme.md @@ -0,0 +1,76 @@ +# env-paths [![Build Status](https://travis-ci.org/sindresorhus/env-paths.svg?branch=master)](https://travis-ci.org/sindresorhus/env-paths) + +> Get paths for storing things like data, config, cache, etc + +Uses the correct OS-specific paths. Most developers get this wrong. + + +## Install + +``` +$ npm install env-paths +``` + + +## Usage + +```js +const envPaths = require('env-paths'); + +const paths = envPaths('MyApp'); + +paths.data; +//=> '/home/sindresorhus/.local/share/MyApp-nodejs' + +paths.config +//=> '/home/sindresorhus/.config/MyApp-nodejs' +``` + + +## API + +### paths = envPaths(name, [options]) + +#### name + +Type: `string` + +Name of your project. Used to generate the paths. + +#### options + +Type: `Object` + +##### suffix + +Type: `string`
    +Default: `'nodejs'` + +**Don't use this option unless you really have to!**
    +Suffix appended to the project name to avoid name conflicts with native +apps. Pass an empty string to disable it. + +### paths.data + +Directory for data files. + +### paths.config + +Directory for config files. + +### paths.cache + +Directory for non-essential data files. + +### paths.log + +Directory for log files. + +### paths.temp + +Directory for temporary files. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/es-abstract/.editorconfig b/node_modules/es-abstract/.editorconfig new file mode 100644 index 0000000000000..eaa214161f5cd --- /dev/null +++ b/node_modules/es-abstract/.editorconfig @@ -0,0 +1,13 @@ +root = true + +[*] +indent_style = tab; +insert_final_newline = true; +quote_type = auto; +space_after_anonymous_functions = true; +space_after_control_statements = true; +spaces_around_operators = true; +trim_trailing_whitespace = true; +spaces_in_brackets = false; +end_of_line = lf; + diff --git a/node_modules/es-abstract/.jscs.json b/node_modules/es-abstract/.jscs.json new file mode 100644 index 0000000000000..857f88f1d5f8e --- /dev/null +++ b/node_modules/es-abstract/.jscs.json @@ -0,0 +1,174 @@ +{ + "es3": true, + + "additionalRules": [], + + "requireSemicolons": true, + + "disallowMultipleSpaces": true, + + "disallowIdentifierNames": [], + + "requireCurlyBraces": { + "allExcept": [], + "keywords": ["if", "else", "for", "while", "do", "try", "catch"] + }, + + "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"], + + "disallowSpaceAfterKeywords": [], + + "disallowSpaceBeforeComma": true, + "disallowSpaceAfterComma": false, + "disallowSpaceBeforeSemicolon": true, + + "disallowNodeTypes": [ + "DebuggerStatement", + "ForInStatement", + "LabeledStatement", + "SwitchCase", + "SwitchStatement", + "WithStatement" + ], + + "requireObjectKeysOnNewLine": { "allExcept": ["sameLine"] }, + + "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true }, + "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true }, + "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true }, + + "requireSpaceBetweenArguments": true, + + "disallowSpacesInsideParentheses": true, + + "disallowSpacesInsideArrayBrackets": true, + + "disallowQuotedKeysInObjects": { "allExcept": ["reserved"] }, + + "disallowSpaceAfterObjectKeys": true, + + "requireCommaBeforeLineBreak": true, + + "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"], + "requireSpaceAfterPrefixUnaryOperators": [], + + "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"], + "requireSpaceBeforePostfixUnaryOperators": [], + + "disallowSpaceBeforeBinaryOperators": [], + "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + + "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + "disallowSpaceAfterBinaryOperators": [], + + "disallowImplicitTypeConversion": ["binary", "string"], + + "disallowKeywords": ["with", "eval"], + + "requireKeywordsOnNewLine": [], + "disallowKeywordsOnNewLine": ["else"], + + "requireLineFeedAtFileEnd": true, + + "disallowTrailingWhitespace": true, + + "disallowTrailingComma": true, + + "excludeFiles": ["node_modules/**", "vendor/**"], + + "disallowMultipleLineStrings": true, + + "requireDotNotation": { "allExcept": ["keywords"] }, + + "requireParenthesesAroundIIFE": true, + + "validateLineBreaks": "LF", + + "validateQuoteMarks": { + "escape": true, + "mark": "'" + }, + + "disallowOperatorBeforeLineBreak": [], + + "requireSpaceBeforeKeywords": [ + "do", + "for", + "if", + "else", + "switch", + "case", + "try", + "catch", + "finally", + "while", + "with", + "return" + ], + + "validateAlignedFunctionParameters": { + "lineBreakAfterOpeningBraces": true, + "lineBreakBeforeClosingBraces": true + }, + + "requirePaddingNewLinesBeforeExport": true, + + "validateNewlineAfterArrayElements": { + "maximum": 9 + }, + + "requirePaddingNewLinesAfterUseStrict": true, + + "disallowArrowFunctions": true, + + "disallowMultiLineTernary": false, + + "validateOrderInObjectKeys": false, + + "disallowIdenticalDestructuringNames": true, + + "disallowNestedTernaries": { "maxLevel": 1 }, + + "requireSpaceAfterComma": { "allExcept": ["trailing"] }, + "requireAlignedMultilineParams": false, + + "requireSpacesInGenerator": { + "afterStar": true + }, + + "disallowSpacesInGenerator": { + "beforeStar": true + }, + + "disallowVar": false, + + "requireArrayDestructuring": false, + + "requireEnhancedObjectLiterals": false, + + "requireObjectDestructuring": false, + + "requireEarlyReturn": false, + + "requireCapitalizedConstructorsNew": false, + + "requireImportAlphabetized": false, + + "requireSpaceBeforeObjectValues": true, + "requireSpaceBeforeDestructuredValues": true, + + "disallowSpacesInsideTemplateStringPlaceholders": true, + + "disallowArrayDestructuringReturn": false, + + "requireNewlineBeforeSingleStatementsInIf": false, + + "disallowUnusedVariables": true, + + "requireSpacesInsideImportedObjectBraces": true, + + "requireUseStrict": true +} + diff --git a/node_modules/es-abstract/.nycrc b/node_modules/es-abstract/.nycrc new file mode 100644 index 0000000000000..1b02cf1848d84 --- /dev/null +++ b/node_modules/es-abstract/.nycrc @@ -0,0 +1,14 @@ +{ + "all": true, + "check-coverage": true, + "reporter": ["text-summary", "text", "html", "json"], + "lines": 87.03, + "statements": 86.87, + "functions": 82.43, + "branches": 76.06, + "exclude": [ + "coverage", + "operations", + "test" + ] +} diff --git a/node_modules/es-abstract/.travis.yml b/node_modules/es-abstract/.travis.yml new file mode 100644 index 0000000000000..ce65a2f98159a --- /dev/null +++ b/node_modules/es-abstract/.travis.yml @@ -0,0 +1,232 @@ +language: node_js +os: + - linux +node_js: + - "10.2" + - "9.11" + - "8.11" + - "7.10" + - "6.14" + - "5.12" + - "4.9" + - "iojs-v3.3" + - "iojs-v2.5" + - "iojs-v1.8" + - "0.12" + - "0.10" + - "0.8" + - "0.6" +cache: + directories: + - "$HOME/.npm" + - "$(nvm cache dir)" + - "$(nvm_version_path $(nvm_version_remote 0.4))" + - "$(nvm_version_path $(nvm_version_remote 0.6))" + - "$(nvm_version_path $(nvm_version_remote 0.10))" +before_install: + - 'case "${TRAVIS_NODE_VERSION}" in 0.*) export NPM_CONFIG_STRICT_SSL=false ;; esac' + - 'nvm install-latest-npm' +install: + - 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ] || [ "${TRAVIS_NODE_VERSION}" = "0.9" ]; then nvm install --latest-npm 0.8 && npm install && nvm use "${TRAVIS_NODE_VERSION}"; else npm install; fi;' +script: + - 'if [ -n "${PRETEST-}" ]; then npm run pretest ; fi' + - 'if [ -n "${POSTTEST-}" ]; then npm run posttest ; fi' + - 'if [ -n "${COVERAGE-}" ]; then npm run coverage && bash <(curl -s https://codecov.io/bash) -f coverage/*.json; fi' + - 'if [ -n "${TEST-}" ]; then npm run tests-only ; fi' +sudo: false +env: + - TEST=true +matrix: + fast_finish: true + include: + - node_js: "lts/*" + env: PRETEST=true + - node_js: "lts/*" + env: POSTTEST=true + - node_js: "0.8" + env: COVERAGE=true + - node_js: "0.12" + env: COVERAGE=true + - node_js: "4" + env: COVERAGE=true + - node_js: "8" + env: COVERAGE=true + - node_js: "10.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.13" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.12" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.4" + env: TEST=true ALLOW_FAILURE=true + allow_failures: + - os: osx + - env: TEST=true ALLOW_FAILURE=true diff --git a/node_modules/es-abstract/CHANGELOG.md b/node_modules/es-abstract/CHANGELOG.md new file mode 100644 index 0000000000000..daa7b660a8cfc --- /dev/null +++ b/node_modules/es-abstract/CHANGELOG.md @@ -0,0 +1,175 @@ +1.12.0 / 2018-05-31 +================= + * [New] add `GetIntrinsic` entry point + * [New] `ES2015`+: add `ObjectCreate` + * [Robustness]: `ES2015+`: ensure `Math.{abs,floor}` and `Function.call` are cached + +1.11.0 / 2018-03-21 +================= + * [New] `ES2015+`: add iterator abstract ops + * [Dev Deps] update `eslint`, `nsp`, `object.assign`, `semver`, `tape` + * [Tests] up to `node` `v9.8`, `v8.10`, `v6.13` + +1.10.0 / 2017-11-24 +================= + * [New] ES2015+: `AdvanceStringIndex` + * [Dev Deps] update `eslint`, `nsp` + * [Tests] require node 0.6 to pass again + * [Tests] up to `node` `v9.2`, `v8.9`, `v6.12`; use `nvm install-latest-npm`; pin included builds to LTS + +1.9.0 / 2017-09-30 +================= + * [New] `es2015+`: add `ArraySpeciesCreate` + * [New] ES2015+: add `CreateDataProperty` and `CreateDataPropertyOrThrow` + * [Tests] consolidate duplicated tests + * [Tests] increase coverage + * [Dev Deps] update `nsp`, `eslint` + +1.8.2 / 2017-09-03 +================= + * [Fix] `es2015`+: `ToNumber`: provide the proper hint for Date objects (#27) + * [Dev Deps] update `eslint` + +1.8.1 / 2017-08-30 +================= + * [Fix] ES2015+: `ToPropertyKey`: should return a symbol for Symbols (#26) + * [Deps] update `function-bind` + * [Dev Deps] update `eslint`, `@ljharb/eslint-config` + * [Docs] github broke markdown parsing + +1.8.0 / 2017-08-04 +================= + * [New] add ES2017 + * [New] move es6+ to es2015+; leave es6/es7 as aliases + * [New] ES5+: add `IsPropertyDescriptor`, `IsAccessorDescriptor`, `IsDataDescriptor`, `IsGenericDescriptor`, `FromPropertyDescriptor`, `ToPropertyDescriptor` + * [New] ES2015+: add `CompletePropertyDescriptor`, `Set`, `HasOwnProperty`, `HasProperty`, `IsConcatSpreadable`, `Invoke`, `CreateIterResultObject`, `RegExpExec` + * [Fix] es7/es2016: do not mutate ES6 + * [Fix] assign helper only supports one source + * [Deps] update `is-regex` + * [Dev Deps] update `nsp`, `eslint`, `@ljharb/eslint-config` + * [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `nsp`, `semver`, `tape` + * [Tests] add tests for missing and excess operations + * [Tests] add codecov for coverage + * [Tests] up to `node` `v8.2`, `v7.10`, `v6.11`, `v4.8`; newer npm breaks on older node + * [Tests] use same lists of value types across tests; ensure tests are the same when ops are the same + * [Tests] ES2015: add ToNumber symbol tests + * [Tests] switch to `nyc` for code coverage + * [Tests] make IsRegExp tests consistent across editions + +1.7.0 / 2017-01-22 +================= + * [New] ES6: Add `GetMethod` (#16) + * [New] ES6: Add `GetV` (#16) + * [New] ES6: Add `Get` (#17) + * [Tests] up to `node` `v7.4`, `v6.9`, `v4.6`; improve test matrix + * [Dev Deps] update `tape`, `nsp`, `eslint`, `@ljharb/eslint-config`, `safe-publish-latest` + +1.6.1 / 2016-08-21 +================= + * [Fix] ES6: IsConstructor should return true for `class` constructors. + +1.6.0 / 2016-08-20 +================= + * [New] ES5 / ES6: add `Type` + * [New] ES6: `SpeciesConstructor` + * [Dev Deps] update `jscs`, `nsp`, `eslint`, `@ljharb/eslint-config`, `semver`; add `safe-publish-latest` + * [Tests] up to `node` `v6.4`, `v5.12`, `v4.5` + +1.5.1 / 2016-05-30 +================= + * [Fix] `ES.IsRegExp`: actually look up `Symbol.match` on the argument + * [Refactor] create `isNaN` helper + * [Deps] update `is-callable`, `function-bind` + * [Deps] update `es-to-primitive`, fix ES5 tests + * [Dev Deps] update `jscs`, `eslint`, `@ljharb/eslint-config`, `tape`, `nsp` + * [Tests] up to `node` `v6.2`, `v5.11`, `v4.4` + * [Tests] use pretest/posttest for linting/security + +1.5.0 / 2015-12-27 +================= + * [New] adds `Symbol.toPrimitive` support via `es-to-primitive` + * [Deps] update `is-callable`, `es-to-primitive` + * [Dev Deps] update `jscs`, `nsp`, `eslint`, `@ljharb/eslint-config`, `semver`, `tape` + * [Tests] up to `node` `v5.3` + +1.4.3 / 2015-11-04 +================= + * [Fix] `ES6.ToNumber`: should give `NaN` for explicitly signed hex strings (#4) + * [Refactor] `ES6.ToNumber`: No need to double-trim + * [Refactor] group tests better + * [Tests] should still pass on `node` `v0.8` + +1.4.2 / 2015-11-02 +================= + * [Fix] ensure `ES.ToNumber` trims whitespace, and does not trim non-whitespace (#3) + +1.4.1 / 2015-10-31 +================= + * [Fix] ensure only 0-1 are valid binary and 0-7 are valid octal digits (#2) + * [Dev Deps] update `tape`, `jscs`, `nsp`, `eslint`, `@ljharb/eslint-config` + * [Tests] on `node` `v5.0` + * [Tests] fix npm upgrades for older node versions + * package.json: use object form of "authors", add "contributors" + +1.4.0 / 2015-09-26 +================= + * [Deps] update `is-callable` + * [Dev Deps] update `tape`, `jscs`, `eslint`, `@ljharb/eslint-config` + * [Tests] on `node` `v4.2` + * [New] Add `SameValueNonNumber` to ES7 + +1.3.2 / 2015-09-26 +================= + * [Fix] Fix `ES6.IsRegExp` to properly handle `Symbol.match`, per spec. + * [Tests] up to `io.js` `v3.3`, `node` `v4.1` + * [Dev Deps] update `tape`, `jscs`, `nsp`, `eslint`, `@ljharb/eslint-config`, `semver` + +1.3.1 / 2015-08-15 +================= + * [Fix] Ensure that objects that `toString` to a binary or octal literal also convert properly + +1.3.0 / 2015-08-15 +================= + * [New] ES6’s ToNumber now supports binary and octal literals. + * [Dev Deps] update `jscs`, `eslint`, `@ljharb/eslint-config`, `tape` + * [Docs] Switch from vb.teelaun.ch to versionbadg.es for the npm version badge SVG + * [Tests] up to `io.js` `v3.0` + +1.2.2 / 2015-07-28 +================= + * [Fix] Both `ES5.CheckObjectCoercible` and `ES6.RequireObjectCoercible` return the value if they don't throw. + * [Tests] Test on latest `io.js` versions. + * [Dev Deps] Update `eslint`, `jscs`, `tape`, `semver`, `covert`, `nsp` + +1.2.1 / 2015-03-20 +================= + * Fix `isFinite` helper. + +1.2.0 / 2015-03-19 +================= + * Use `es-to-primitive` for ToPrimitive methods. + * Test on latest `io.js` versions; allow failures on all but 2 latest `node`/`io.js` versions. + +1.1.2 / 2015-03-20 +================= + * Fix isFinite helper. + +1.1.1 / 2015-03-19 +================= + * Fix isPrimitive check for functions + * Update `eslint`, `editorconfig-tools`, `semver`, `nsp` + +1.1.0 / 2015-02-17 +================= + * Add ES7 export (non-default). + * All grade A-supported `node`/`iojs` versions now ship with an `npm` that understands `^`. + * Test on `iojs-v1.2`. + +1.0.1 / 2015-01-30 +================= + * Use `is-callable` instead of an internal function. + * Update `tape`, `jscs`, `nsp`, `eslint` + +1.0.0 / 2015-01-10 +================= + * v1.0.0 diff --git a/node_modules/es-abstract/GetIntrinsic.js b/node_modules/es-abstract/GetIntrinsic.js new file mode 100644 index 0000000000000..62dbf05d6379a --- /dev/null +++ b/node_modules/es-abstract/GetIntrinsic.js @@ -0,0 +1,177 @@ +'use strict'; + +/* globals + Set, + Map, + WeakSet, + WeakMap, + + Promise, + + Symbol, + Proxy, + + Atomics, + SharedArrayBuffer, + + ArrayBuffer, + DataView, + Uint8Array, + Float32Array, + Float64Array, + Int8Array, + Int16Array, + Int32Array, + Uint8ClampedArray, + Uint16Array, + Uint32Array, +*/ + +var undefined; // eslint-disable-line no-shadow-restricted-names + +var ThrowTypeError = Object.getOwnPropertyDescriptor + ? (function () { return Object.getOwnPropertyDescriptor(arguments, 'callee').get; }()) + : function () { throw new TypeError(); }; + +var hasSymbols = typeof Symbol === 'function' && typeof Symbol.iterator === 'symbol'; + +var getProto = Object.getPrototypeOf || function (x) { return x.__proto__; }; // eslint-disable-line no-proto + +var generator; // = function * () {}; +var generatorFunction = generator ? getProto(generator) : undefined; +var asyncFn; // async function() {}; +var asyncFunction = asyncFn ? asyncFn.constructor : undefined; +var asyncGen; // async function * () {}; +var asyncGenFunction = asyncGen ? getProto(asyncGen) : undefined; +var asyncGenIterator = asyncGen ? asyncGen() : undefined; + +var TypedArray = typeof Uint8Array === 'undefined' ? undefined : getProto(Uint8Array); + +var INTRINSICS = { + '$ %Array%': Array, + '$ %ArrayBuffer%': typeof ArrayBuffer === 'undefined' ? undefined : ArrayBuffer, + '$ %ArrayBufferPrototype%': typeof ArrayBuffer === 'undefined' ? undefined : ArrayBuffer.prototype, + '$ %ArrayIteratorPrototype%': hasSymbols ? getProto([][Symbol.iterator]()) : undefined, + '$ %ArrayPrototype%': Array.prototype, + '$ %ArrayProto_entries%': Array.prototype.entries, + '$ %ArrayProto_forEach%': Array.prototype.forEach, + '$ %ArrayProto_keys%': Array.prototype.keys, + '$ %ArrayProto_values%': Array.prototype.values, + '$ %AsyncFromSyncIteratorPrototype%': undefined, + '$ %AsyncFunction%': asyncFunction, + '$ %AsyncFunctionPrototype%': asyncFunction ? asyncFunction.prototype : undefined, + '$ %AsyncGenerator%': asyncGen ? getProto(asyncGenIterator) : undefined, + '$ %AsyncGeneratorFunction%': asyncGenFunction, + '$ %AsyncGeneratorPrototype%': asyncGenFunction ? asyncGenFunction.prototype : undefined, + '$ %AsyncIteratorPrototype%': asyncGenIterator && hasSymbols && Symbol.asyncIterator ? asyncGenIterator[Symbol.asyncIterator]() : undefined, + '$ %Atomics%': typeof Atomics === 'undefined' ? undefined : Atomics, + '$ %Boolean%': Boolean, + '$ %BooleanPrototype%': Boolean.prototype, + '$ %DataView%': typeof DataView === 'undefined' ? undefined : DataView, + '$ %DataViewPrototype%': typeof DataView === 'undefined' ? undefined : DataView.prototype, + '$ %Date%': Date, + '$ %DatePrototype%': Date.prototype, + '$ %decodeURI%': decodeURI, + '$ %decodeURIComponent%': decodeURIComponent, + '$ %encodeURI%': encodeURI, + '$ %encodeURIComponent%': encodeURIComponent, + '$ %Error%': Error, + '$ %ErrorPrototype%': Error.prototype, + '$ %eval%': eval, // eslint-disable-line no-eval + '$ %EvalError%': EvalError, + '$ %EvalErrorPrototype%': EvalError.prototype, + '$ %Float32Array%': typeof Float32Array === 'undefined' ? undefined : Float32Array, + '$ %Float32ArrayPrototype%': typeof Float32Array === 'undefined' ? undefined : Float32Array.prototype, + '$ %Float64Array%': typeof Float64Array === 'undefined' ? undefined : Float64Array, + '$ %Float64ArrayPrototype%': typeof Float64Array === 'undefined' ? undefined : Float64Array.prototype, + '$ %Function%': Function, + '$ %FunctionPrototype%': Function.prototype, + '$ %Generator%': generator ? getProto(generator()) : undefined, + '$ %GeneratorFunction%': generatorFunction, + '$ %GeneratorPrototype%': generatorFunction ? generatorFunction.prototype : undefined, + '$ %Int8Array%': typeof Int8Array === 'undefined' ? undefined : Int8Array, + '$ %Int8ArrayPrototype%': typeof Int8Array === 'undefined' ? undefined : Int8Array.prototype, + '$ %Int16Array%': typeof Int16Array === 'undefined' ? undefined : Int16Array, + '$ %Int16ArrayPrototype%': typeof Int16Array === 'undefined' ? undefined : Int8Array.prototype, + '$ %Int32Array%': typeof Int32Array === 'undefined' ? undefined : Int32Array, + '$ %Int32ArrayPrototype%': typeof Int32Array === 'undefined' ? undefined : Int32Array.prototype, + '$ %isFinite%': isFinite, + '$ %isNaN%': isNaN, + '$ %IteratorPrototype%': hasSymbols ? getProto(getProto([][Symbol.iterator]())) : undefined, + '$ %JSON%': JSON, + '$ %JSONParse%': JSON.parse, + '$ %Map%': typeof Map === 'undefined' ? undefined : Map, + '$ %MapIteratorPrototype%': typeof Map === 'undefined' || !hasSymbols ? undefined : getProto(new Map()[Symbol.iterator]()), + '$ %MapPrototype%': typeof Map === 'undefined' ? undefined : Map.prototype, + '$ %Math%': Math, + '$ %Number%': Number, + '$ %NumberPrototype%': Number.prototype, + '$ %Object%': Object, + '$ %ObjectPrototype%': Object.prototype, + '$ %ObjProto_toString%': Object.prototype.toString, + '$ %ObjProto_valueOf%': Object.prototype.valueOf, + '$ %parseFloat%': parseFloat, + '$ %parseInt%': parseInt, + '$ %Promise%': typeof Promise === 'undefined' ? undefined : Promise, + '$ %PromisePrototype%': typeof Promise === 'undefined' ? undefined : Promise.prototype, + '$ %PromiseProto_then%': typeof Promise === 'undefined' ? undefined : Promise.prototype.then, + '$ %Promise_all%': typeof Promise === 'undefined' ? undefined : Promise.all, + '$ %Promise_reject%': typeof Promise === 'undefined' ? undefined : Promise.reject, + '$ %Promise_resolve%': typeof Promise === 'undefined' ? undefined : Promise.resolve, + '$ %Proxy%': typeof Proxy === 'undefined' ? undefined : Proxy, + '$ %RangeError%': RangeError, + '$ %RangeErrorPrototype%': RangeError.prototype, + '$ %ReferenceError%': ReferenceError, + '$ %ReferenceErrorPrototype%': ReferenceError.prototype, + '$ %Reflect%': typeof Reflect === 'undefined' ? undefined : Reflect, + '$ %RegExp%': RegExp, + '$ %RegExpPrototype%': RegExp.prototype, + '$ %Set%': typeof Set === 'undefined' ? undefined : Set, + '$ %SetIteratorPrototype%': typeof Set === 'undefined' || !hasSymbols ? undefined : getProto(new Set()[Symbol.iterator]()), + '$ %SetPrototype%': typeof Set === 'undefined' ? undefined : Set.prototype, + '$ %SharedArrayBuffer%': typeof SharedArrayBuffer === 'undefined' ? undefined : SharedArrayBuffer, + '$ %SharedArrayBufferPrototype%': typeof SharedArrayBuffer === 'undefined' ? undefined : SharedArrayBuffer.prototype, + '$ %String%': String, + '$ %StringIteratorPrototype%': hasSymbols ? getProto(''[Symbol.iterator]()) : undefined, + '$ %StringPrototype%': String.prototype, + '$ %Symbol%': hasSymbols ? Symbol : undefined, + '$ %SymbolPrototype%': hasSymbols ? Symbol.prototype : undefined, + '$ %SyntaxError%': SyntaxError, + '$ %SyntaxErrorPrototype%': SyntaxError.prototype, + '$ %ThrowTypeError%': ThrowTypeError, + '$ %TypedArray%': TypedArray, + '$ %TypedArrayPrototype%': TypedArray ? TypedArray.prototype : undefined, + '$ %TypeError%': TypeError, + '$ %TypeErrorPrototype%': TypeError.prototype, + '$ %Uint8Array%': typeof Uint8Array === 'undefined' ? undefined : Uint8Array, + '$ %Uint8ArrayPrototype%': typeof Uint8Array === 'undefined' ? undefined : Uint8Array.prototype, + '$ %Uint8ClampedArray%': typeof Uint8ClampedArray === 'undefined' ? undefined : Uint8ClampedArray, + '$ %Uint8ClampedArrayPrototype%': typeof Uint8ClampedArray === 'undefined' ? undefined : Uint8ClampedArray.prototype, + '$ %Uint16Array%': typeof Uint16Array === 'undefined' ? undefined : Uint16Array, + '$ %Uint16ArrayPrototype%': typeof Uint16Array === 'undefined' ? undefined : Uint16Array.prototype, + '$ %Uint32Array%': typeof Uint32Array === 'undefined' ? undefined : Uint32Array, + '$ %Uint32ArrayPrototype%': typeof Uint32Array === 'undefined' ? undefined : Uint32Array.prototype, + '$ %URIError%': URIError, + '$ %URIErrorPrototype%': URIError.prototype, + '$ %WeakMap%': typeof WeakMap === 'undefined' ? undefined : WeakMap, + '$ %WeakMapPrototype%': typeof WeakMap === 'undefined' ? undefined : WeakMap.prototype, + '$ %WeakSet%': typeof WeakSet === 'undefined' ? undefined : WeakSet, + '$ %WeakSetPrototype%': typeof WeakSet === 'undefined' ? undefined : WeakSet.prototype +}; + +module.exports = function GetIntrinsic(name, allowMissing) { + if (arguments.length > 1 && typeof allowMissing !== 'boolean') { + throw new TypeError('"allowMissing" argument must be a boolean'); + } + + var key = '$ ' + name; + if (!(key in INTRINSICS)) { + throw new SyntaxError('intrinsic ' + name + ' does not exist!'); + } + + // istanbul ignore if // hopefully this is impossible to test :-) + if (typeof INTRINSICS[key] === 'undefined' && !allowMissing) { + throw new TypeError('intrinsic ' + name + ' exists, but is not available. Please file an issue!'); + } + return INTRINSICS[key]; +}; diff --git a/node_modules/es-abstract/LICENSE b/node_modules/es-abstract/LICENSE new file mode 100644 index 0000000000000..8c271c14b62fa --- /dev/null +++ b/node_modules/es-abstract/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (C) 2015 Jordan Harband + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/es-abstract/Makefile b/node_modules/es-abstract/Makefile new file mode 100644 index 0000000000000..959bbd49c1def --- /dev/null +++ b/node_modules/es-abstract/Makefile @@ -0,0 +1,61 @@ +# Since we rely on paths relative to the makefile location, abort if make isn't being run from there. +$(if $(findstring /,$(MAKEFILE_LIST)),$(error Please only invoke this makefile from the directory it resides in)) + + # The files that need updating when incrementing the version number. +VERSIONED_FILES := *.js */*.js *.json README* + + +# Add the local npm packages' bin folder to the PATH, so that `make` can find them, when invoked directly. +# Note that rather than using `$(npm bin)` the 'node_modules/.bin' path component is hard-coded, so that invocation works even from an environment +# where npm is (temporarily) unavailable due to having deactivated an nvm instance loaded into the calling shell in order to avoid interference with tests. +export PATH := $(shell printf '%s' "$$PWD/node_modules/.bin:$$PATH") +UTILS := semver +# Make sure that all required utilities can be located. +UTIL_CHECK := $(or $(shell PATH="$(PATH)" which $(UTILS) >/dev/null && echo 'ok'),$(error Did you forget to run `npm install` after cloning the repo? At least one of the required supporting utilities not found: $(UTILS))) + +# Default target (by virtue of being the first non '.'-prefixed in the file). +.PHONY: _no-target-specified +_no-target-specified: + $(error Please specify the target to make - `make list` shows targets. Alternatively, use `npm test` to run the default tests; `npm run` shows all tests) + +# Lists all targets defined in this makefile. +.PHONY: list +list: + @$(MAKE) -pRrn : -f $(MAKEFILE_LIST) 2>/dev/null | awk -v RS= -F: '/^# File/,/^# Finished Make data base/ {if ($$1 !~ "^[#.]") {print $$1}}' | command grep -v -e '^[^[:alnum:]]' -e '^$@$$command ' | sort + +# All-tests target: invokes the specified test suites for ALL shells defined in $(SHELLS). +.PHONY: test +test: + @npm test + +.PHONY: _ensure-tag +_ensure-tag: +ifndef TAG + $(error Please invoke with `make TAG= release`, where is either an increment specifier (patch, minor, major, prepatch, preminor, premajor, prerelease), or an explicit major.minor.patch version number) +endif + +CHANGELOG_ERROR = $(error No CHANGELOG specified) +.PHONY: _ensure-changelog +_ensure-changelog: + @ (git status -sb --porcelain | command grep -E '^( M|[MA] ) CHANGELOG.md' > /dev/null) || (echo no CHANGELOG.md specified && exit 2) + +# Ensures that the git workspace is clean. +.PHONY: _ensure-clean +_ensure-clean: + @[ -z "$$((git status --porcelain --untracked-files=no || echo err) | command grep -v 'CHANGELOG.md')" ] || { echo "Workspace is not clean; please commit changes first." >&2; exit 2; } + +# Makes a release; invoke with `make TAG= release`. +.PHONY: release +release: _ensure-tag _ensure-changelog _ensure-clean + @old_ver=`git describe --abbrev=0 --tags --match 'v[0-9]*.[0-9]*.[0-9]*'` || { echo "Failed to determine current version." >&2; exit 1; }; old_ver=$${old_ver#v}; \ + new_ver=`echo "$(TAG)" | sed 's/^v//'`; new_ver=$${new_ver:-patch}; \ + if printf "$$new_ver" | command grep -q '^[0-9]'; then \ + semver "$$new_ver" >/dev/null || { echo 'Invalid version number specified: $(TAG) - must be major.minor.patch' >&2; exit 2; }; \ + semver -r "> $$old_ver" "$$new_ver" >/dev/null || { echo 'Invalid version number specified: $(TAG) - must be HIGHER than current one.' >&2; exit 2; } \ + else \ + new_ver=`semver -i "$$new_ver" "$$old_ver"` || { echo 'Invalid version-increment specifier: $(TAG)' >&2; exit 2; } \ + fi; \ + printf "=== Bumping version **$$old_ver** to **$$new_ver** before committing and tagging:\n=== TYPE 'proceed' TO PROCEED, anything else to abort: " && read response && [ "$$response" = 'proceed' ] || { echo 'Aborted.' >&2; exit 2; }; \ + replace "$$old_ver" "$$new_ver" -- $(VERSIONED_FILES) && \ + git commit -m "v$$new_ver" $(VERSIONED_FILES) CHANGELOG.md && \ + git tag -a -m "v$$new_ver" "v$$new_ver" diff --git a/node_modules/es-abstract/README.md b/node_modules/es-abstract/README.md new file mode 100644 index 0000000000000..0fbf079ccb97d --- /dev/null +++ b/node_modules/es-abstract/README.md @@ -0,0 +1,44 @@ +# es-abstract [![Version Badge][npm-version-svg]][package-url] + +[![Build Status][travis-svg]][travis-url] +[![dependency status][deps-svg]][deps-url] +[![dev dependency status][dev-deps-svg]][dev-deps-url] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][npm-badge-png]][package-url] + +[![browser support][testling-svg]][testling-url] + +ECMAScript spec abstract operations. +When different versions of the spec conflict, the default export will be the latest version of the abstract operation. +All abstract operations will also be available under an `es5`/`es2015`/`es2016` entry point, and exported property, if you require a specific version. + +## Example + +```js +var ES = require('es-abstract'); +var assert = require('assert'); + +assert(ES.isCallable(function () {})); +assert(!ES.isCallable(/a/g)); +``` + +## Tests +Simply clone the repo, `npm install`, and run `npm test` + +[package-url]: https://npmjs.org/package/es-abstract +[npm-version-svg]: http://versionbadg.es/ljharb/es-abstract.svg +[travis-svg]: https://travis-ci.org/ljharb/es-abstract.svg +[travis-url]: https://travis-ci.org/ljharb/es-abstract +[deps-svg]: https://david-dm.org/ljharb/es-abstract.svg +[deps-url]: https://david-dm.org/ljharb/es-abstract +[dev-deps-svg]: https://david-dm.org/ljharb/es-abstract/dev-status.svg +[dev-deps-url]: https://david-dm.org/ljharb/es-abstract#info=devDependencies +[testling-svg]: https://ci.testling.com/ljharb/es-abstract.png +[testling-url]: https://ci.testling.com/ljharb/es-abstract +[npm-badge-png]: https://nodei.co/npm/es-abstract.png?downloads=true&stars=true +[license-image]: https://img.shields.io/npm/l/es-abstract.svg +[license-url]: LICENSE +[downloads-image]: https://img.shields.io/npm/dm/es-abstract.svg +[downloads-url]: https://npm-stat.com/charts.html?package=es-abstract diff --git a/node_modules/es-abstract/es2015.js b/node_modules/es-abstract/es2015.js new file mode 100644 index 0000000000000..1a4d7de5608db --- /dev/null +++ b/node_modules/es-abstract/es2015.js @@ -0,0 +1,693 @@ +'use strict'; + +var has = require('has'); +var toPrimitive = require('es-to-primitive/es6'); + +var GetIntrinsic = require('./GetIntrinsic'); + +var $TypeError = GetIntrinsic('%TypeError%'); +var $SyntaxError = GetIntrinsic('%SyntaxError%'); +var $Array = GetIntrinsic('%Array%'); +var $String = GetIntrinsic('%String%'); +var $Object = GetIntrinsic('%Object%'); +var $Number = GetIntrinsic('%Number%'); +var $Symbol = GetIntrinsic('%Symbol%', true); +var $RegExp = GetIntrinsic('%RegExp%'); + +var hasSymbols = !!$Symbol; + +var $isNaN = require('./helpers/isNaN'); +var $isFinite = require('./helpers/isFinite'); +var MAX_SAFE_INTEGER = $Number.MAX_SAFE_INTEGER || Math.pow(2, 53) - 1; + +var assign = require('./helpers/assign'); +var sign = require('./helpers/sign'); +var mod = require('./helpers/mod'); +var isPrimitive = require('./helpers/isPrimitive'); +var parseInteger = parseInt; +var bind = require('function-bind'); +var arraySlice = bind.call(Function.call, $Array.prototype.slice); +var strSlice = bind.call(Function.call, $String.prototype.slice); +var isBinary = bind.call(Function.call, $RegExp.prototype.test, /^0b[01]+$/i); +var isOctal = bind.call(Function.call, $RegExp.prototype.test, /^0o[0-7]+$/i); +var regexExec = bind.call(Function.call, $RegExp.prototype.exec); +var nonWS = ['\u0085', '\u200b', '\ufffe'].join(''); +var nonWSregex = new $RegExp('[' + nonWS + ']', 'g'); +var hasNonWS = bind.call(Function.call, $RegExp.prototype.test, nonWSregex); +var invalidHexLiteral = /^[-+]0x[0-9a-f]+$/i; +var isInvalidHexLiteral = bind.call(Function.call, $RegExp.prototype.test, invalidHexLiteral); +var $charCodeAt = bind.call(Function.call, $String.prototype.charCodeAt); + +var toStr = bind.call(Function.call, Object.prototype.toString); + +var $floor = Math.floor; +var $abs = Math.abs; + +var $ObjectCreate = Object.create; +var $gOPD = $Object.getOwnPropertyDescriptor; + +var $isExtensible = $Object.isExtensible; + +// whitespace from: http://es5.github.io/#x15.5.4.20 +// implementation from https://github.com/es-shims/es5-shim/blob/v3.4.0/es5-shim.js#L1304-L1324 +var ws = [ + '\x09\x0A\x0B\x0C\x0D\x20\xA0\u1680\u180E\u2000\u2001\u2002\u2003', + '\u2004\u2005\u2006\u2007\u2008\u2009\u200A\u202F\u205F\u3000\u2028', + '\u2029\uFEFF' +].join(''); +var trimRegex = new RegExp('(^[' + ws + ']+)|([' + ws + ']+$)', 'g'); +var replace = bind.call(Function.call, $String.prototype.replace); +var trim = function (value) { + return replace(value, trimRegex, ''); +}; + +var ES5 = require('./es5'); + +var hasRegExpMatcher = require('is-regex'); + +// https://people.mozilla.org/~jorendorff/es6-draft.html#sec-abstract-operations +var ES6 = assign(assign({}, ES5), { + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-call-f-v-args + Call: function Call(F, V) { + var args = arguments.length > 2 ? arguments[2] : []; + if (!this.IsCallable(F)) { + throw new $TypeError(F + ' is not a function'); + } + return F.apply(V, args); + }, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-toprimitive + ToPrimitive: toPrimitive, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-toboolean + // ToBoolean: ES5.ToBoolean, + + // https://ecma-international.org/ecma-262/6.0/#sec-tonumber + ToNumber: function ToNumber(argument) { + var value = isPrimitive(argument) ? argument : toPrimitive(argument, $Number); + if (typeof value === 'symbol') { + throw new $TypeError('Cannot convert a Symbol value to a number'); + } + if (typeof value === 'string') { + if (isBinary(value)) { + return this.ToNumber(parseInteger(strSlice(value, 2), 2)); + } else if (isOctal(value)) { + return this.ToNumber(parseInteger(strSlice(value, 2), 8)); + } else if (hasNonWS(value) || isInvalidHexLiteral(value)) { + return NaN; + } else { + var trimmed = trim(value); + if (trimmed !== value) { + return this.ToNumber(trimmed); + } + } + } + return $Number(value); + }, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-tointeger + // ToInteger: ES5.ToNumber, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-toint32 + // ToInt32: ES5.ToInt32, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-touint32 + // ToUint32: ES5.ToUint32, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-toint16 + ToInt16: function ToInt16(argument) { + var int16bit = this.ToUint16(argument); + return int16bit >= 0x8000 ? int16bit - 0x10000 : int16bit; + }, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-touint16 + // ToUint16: ES5.ToUint16, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-toint8 + ToInt8: function ToInt8(argument) { + var int8bit = this.ToUint8(argument); + return int8bit >= 0x80 ? int8bit - 0x100 : int8bit; + }, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-touint8 + ToUint8: function ToUint8(argument) { + var number = this.ToNumber(argument); + if ($isNaN(number) || number === 0 || !$isFinite(number)) { return 0; } + var posInt = sign(number) * $floor($abs(number)); + return mod(posInt, 0x100); + }, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-touint8clamp + ToUint8Clamp: function ToUint8Clamp(argument) { + var number = this.ToNumber(argument); + if ($isNaN(number) || number <= 0) { return 0; } + if (number >= 0xFF) { return 0xFF; } + var f = $floor(argument); + if (f + 0.5 < number) { return f + 1; } + if (number < f + 0.5) { return f; } + if (f % 2 !== 0) { return f + 1; } + return f; + }, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-tostring + ToString: function ToString(argument) { + if (typeof argument === 'symbol') { + throw new $TypeError('Cannot convert a Symbol value to a string'); + } + return $String(argument); + }, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-toobject + ToObject: function ToObject(value) { + this.RequireObjectCoercible(value); + return $Object(value); + }, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-topropertykey + ToPropertyKey: function ToPropertyKey(argument) { + var key = this.ToPrimitive(argument, $String); + return typeof key === 'symbol' ? key : this.ToString(key); + }, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-tolength + ToLength: function ToLength(argument) { + var len = this.ToInteger(argument); + if (len <= 0) { return 0; } // includes converting -0 to +0 + if (len > MAX_SAFE_INTEGER) { return MAX_SAFE_INTEGER; } + return len; + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-canonicalnumericindexstring + CanonicalNumericIndexString: function CanonicalNumericIndexString(argument) { + if (toStr(argument) !== '[object String]') { + throw new $TypeError('must be a string'); + } + if (argument === '-0') { return -0; } + var n = this.ToNumber(argument); + if (this.SameValue(this.ToString(n), argument)) { return n; } + return void 0; + }, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-requireobjectcoercible + RequireObjectCoercible: ES5.CheckObjectCoercible, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-isarray + IsArray: $Array.isArray || function IsArray(argument) { + return toStr(argument) === '[object Array]'; + }, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-iscallable + // IsCallable: ES5.IsCallable, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-isconstructor + IsConstructor: function IsConstructor(argument) { + return typeof argument === 'function' && !!argument.prototype; // unfortunately there's no way to truly check this without try/catch `new argument` + }, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-isextensible-o + IsExtensible: Object.preventExtensions + ? function IsExtensible(obj) { + if (isPrimitive(obj)) { + return false; + } + return $isExtensible(obj); + } + : function isExtensible(obj) { return true; }, // eslint-disable-line no-unused-vars + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-isinteger + IsInteger: function IsInteger(argument) { + if (typeof argument !== 'number' || $isNaN(argument) || !$isFinite(argument)) { + return false; + } + var abs = $abs(argument); + return $floor(abs) === abs; + }, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-ispropertykey + IsPropertyKey: function IsPropertyKey(argument) { + return typeof argument === 'string' || typeof argument === 'symbol'; + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-isregexp + IsRegExp: function IsRegExp(argument) { + if (!argument || typeof argument !== 'object') { + return false; + } + if (hasSymbols) { + var isRegExp = argument[$Symbol.match]; + if (typeof isRegExp !== 'undefined') { + return ES5.ToBoolean(isRegExp); + } + } + return hasRegExpMatcher(argument); + }, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-samevalue + // SameValue: ES5.SameValue, + + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-samevaluezero + SameValueZero: function SameValueZero(x, y) { + return (x === y) || ($isNaN(x) && $isNaN(y)); + }, + + /** + * 7.3.2 GetV (V, P) + * 1. Assert: IsPropertyKey(P) is true. + * 2. Let O be ToObject(V). + * 3. ReturnIfAbrupt(O). + * 4. Return O.[[Get]](P, V). + */ + GetV: function GetV(V, P) { + // 7.3.2.1 + if (!this.IsPropertyKey(P)) { + throw new $TypeError('Assertion failed: IsPropertyKey(P) is not true'); + } + + // 7.3.2.2-3 + var O = this.ToObject(V); + + // 7.3.2.4 + return O[P]; + }, + + /** + * 7.3.9 - https://ecma-international.org/ecma-262/6.0/#sec-getmethod + * 1. Assert: IsPropertyKey(P) is true. + * 2. Let func be GetV(O, P). + * 3. ReturnIfAbrupt(func). + * 4. If func is either undefined or null, return undefined. + * 5. If IsCallable(func) is false, throw a TypeError exception. + * 6. Return func. + */ + GetMethod: function GetMethod(O, P) { + // 7.3.9.1 + if (!this.IsPropertyKey(P)) { + throw new $TypeError('Assertion failed: IsPropertyKey(P) is not true'); + } + + // 7.3.9.2 + var func = this.GetV(O, P); + + // 7.3.9.4 + if (func == null) { + return void 0; + } + + // 7.3.9.5 + if (!this.IsCallable(func)) { + throw new $TypeError(P + 'is not a function'); + } + + // 7.3.9.6 + return func; + }, + + /** + * 7.3.1 Get (O, P) - https://ecma-international.org/ecma-262/6.0/#sec-get-o-p + * 1. Assert: Type(O) is Object. + * 2. Assert: IsPropertyKey(P) is true. + * 3. Return O.[[Get]](P, O). + */ + Get: function Get(O, P) { + // 7.3.1.1 + if (this.Type(O) !== 'Object') { + throw new $TypeError('Assertion failed: Type(O) is not Object'); + } + // 7.3.1.2 + if (!this.IsPropertyKey(P)) { + throw new $TypeError('Assertion failed: IsPropertyKey(P) is not true'); + } + // 7.3.1.3 + return O[P]; + }, + + Type: function Type(x) { + if (typeof x === 'symbol') { + return 'Symbol'; + } + return ES5.Type(x); + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-speciesconstructor + SpeciesConstructor: function SpeciesConstructor(O, defaultConstructor) { + if (this.Type(O) !== 'Object') { + throw new $TypeError('Assertion failed: Type(O) is not Object'); + } + var C = O.constructor; + if (typeof C === 'undefined') { + return defaultConstructor; + } + if (this.Type(C) !== 'Object') { + throw new $TypeError('O.constructor is not an Object'); + } + var S = hasSymbols && $Symbol.species ? C[$Symbol.species] : void 0; + if (S == null) { + return defaultConstructor; + } + if (this.IsConstructor(S)) { + return S; + } + throw new $TypeError('no constructor found'); + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-completepropertydescriptor + CompletePropertyDescriptor: function CompletePropertyDescriptor(Desc) { + if (!this.IsPropertyDescriptor(Desc)) { + throw new $TypeError('Desc must be a Property Descriptor'); + } + + if (this.IsGenericDescriptor(Desc) || this.IsDataDescriptor(Desc)) { + if (!has(Desc, '[[Value]]')) { + Desc['[[Value]]'] = void 0; + } + if (!has(Desc, '[[Writable]]')) { + Desc['[[Writable]]'] = false; + } + } else { + if (!has(Desc, '[[Get]]')) { + Desc['[[Get]]'] = void 0; + } + if (!has(Desc, '[[Set]]')) { + Desc['[[Set]]'] = void 0; + } + } + if (!has(Desc, '[[Enumerable]]')) { + Desc['[[Enumerable]]'] = false; + } + if (!has(Desc, '[[Configurable]]')) { + Desc['[[Configurable]]'] = false; + } + return Desc; + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-set-o-p-v-throw + Set: function Set(O, P, V, Throw) { + if (this.Type(O) !== 'Object') { + throw new $TypeError('O must be an Object'); + } + if (!this.IsPropertyKey(P)) { + throw new $TypeError('P must be a Property Key'); + } + if (this.Type(Throw) !== 'Boolean') { + throw new $TypeError('Throw must be a Boolean'); + } + if (Throw) { + O[P] = V; + return true; + } else { + try { + O[P] = V; + } catch (e) { + return false; + } + } + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-hasownproperty + HasOwnProperty: function HasOwnProperty(O, P) { + if (this.Type(O) !== 'Object') { + throw new $TypeError('O must be an Object'); + } + if (!this.IsPropertyKey(P)) { + throw new $TypeError('P must be a Property Key'); + } + return has(O, P); + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-hasproperty + HasProperty: function HasProperty(O, P) { + if (this.Type(O) !== 'Object') { + throw new $TypeError('O must be an Object'); + } + if (!this.IsPropertyKey(P)) { + throw new $TypeError('P must be a Property Key'); + } + return P in O; + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-isconcatspreadable + IsConcatSpreadable: function IsConcatSpreadable(O) { + if (this.Type(O) !== 'Object') { + return false; + } + if (hasSymbols && typeof $Symbol.isConcatSpreadable === 'symbol') { + var spreadable = this.Get(O, Symbol.isConcatSpreadable); + if (typeof spreadable !== 'undefined') { + return this.ToBoolean(spreadable); + } + } + return this.IsArray(O); + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-invoke + Invoke: function Invoke(O, P) { + if (!this.IsPropertyKey(P)) { + throw new $TypeError('P must be a Property Key'); + } + var argumentsList = arraySlice(arguments, 2); + var func = this.GetV(O, P); + return this.Call(func, O, argumentsList); + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-getiterator + GetIterator: function GetIterator(obj, method) { + if (!hasSymbols) { + throw new SyntaxError('ES.GetIterator depends on native iterator support.'); + } + + var actualMethod = method; + if (arguments.length < 2) { + actualMethod = this.GetMethod(obj, $Symbol.iterator); + } + var iterator = this.Call(actualMethod, obj); + if (this.Type(iterator) !== 'Object') { + throw new $TypeError('iterator must return an object'); + } + + return iterator; + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-iteratornext + IteratorNext: function IteratorNext(iterator, value) { + var result = this.Invoke(iterator, 'next', arguments.length < 2 ? [] : [value]); + if (this.Type(result) !== 'Object') { + throw new $TypeError('iterator next must return an object'); + } + return result; + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-iteratorcomplete + IteratorComplete: function IteratorComplete(iterResult) { + if (this.Type(iterResult) !== 'Object') { + throw new $TypeError('Assertion failed: Type(iterResult) is not Object'); + } + return this.ToBoolean(this.Get(iterResult, 'done')); + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-iteratorvalue + IteratorValue: function IteratorValue(iterResult) { + if (this.Type(iterResult) !== 'Object') { + throw new $TypeError('Assertion failed: Type(iterResult) is not Object'); + } + return this.Get(iterResult, 'value'); + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-iteratorstep + IteratorStep: function IteratorStep(iterator) { + var result = this.IteratorNext(iterator); + var done = this.IteratorComplete(result); + return done === true ? false : result; + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-iteratorclose + IteratorClose: function IteratorClose(iterator, completion) { + if (this.Type(iterator) !== 'Object') { + throw new $TypeError('Assertion failed: Type(iterator) is not Object'); + } + if (!this.IsCallable(completion)) { + throw new $TypeError('Assertion failed: completion is not a thunk for a Completion Record'); + } + var completionThunk = completion; + + var iteratorReturn = this.GetMethod(iterator, 'return'); + + if (typeof iteratorReturn === 'undefined') { + return completionThunk(); + } + + var completionRecord; + try { + var innerResult = this.Call(iteratorReturn, iterator, []); + } catch (e) { + // if we hit here, then "e" is the innerResult completion that needs re-throwing + + // if the completion is of type "throw", this will throw. + completionRecord = completionThunk(); + completionThunk = null; // ensure it's not called twice. + + // if not, then return the innerResult completion + throw e; + } + completionRecord = completionThunk(); // if innerResult worked, then throw if the completion does + completionThunk = null; // ensure it's not called twice. + + if (this.Type(innerResult) !== 'Object') { + throw new $TypeError('iterator .return must return an object'); + } + + return completionRecord; + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-createiterresultobject + CreateIterResultObject: function CreateIterResultObject(value, done) { + if (this.Type(done) !== 'Boolean') { + throw new $TypeError('Assertion failed: Type(done) is not Boolean'); + } + return { + value: value, + done: done + }; + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-regexpexec + RegExpExec: function RegExpExec(R, S) { + if (this.Type(R) !== 'Object') { + throw new $TypeError('R must be an Object'); + } + if (this.Type(S) !== 'String') { + throw new $TypeError('S must be a String'); + } + var exec = this.Get(R, 'exec'); + if (this.IsCallable(exec)) { + var result = this.Call(exec, R, [S]); + if (result === null || this.Type(result) === 'Object') { + return result; + } + throw new $TypeError('"exec" method must return `null` or an Object'); + } + return regexExec(R, S); + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-arrayspeciescreate + ArraySpeciesCreate: function ArraySpeciesCreate(originalArray, length) { + if (!this.IsInteger(length) || length < 0) { + throw new $TypeError('Assertion failed: length must be an integer >= 0'); + } + var len = length === 0 ? 0 : length; + var C; + var isArray = this.IsArray(originalArray); + if (isArray) { + C = this.Get(originalArray, 'constructor'); + // TODO: figure out how to make a cross-realm normal Array, a same-realm Array + // if (this.IsConstructor(C)) { + // if C is another realm's Array, C = undefined + // Object.getPrototypeOf(Object.getPrototypeOf(Object.getPrototypeOf(Array))) === null ? + // } + if (this.Type(C) === 'Object' && hasSymbols && $Symbol.species) { + C = this.Get(C, $Symbol.species); + if (C === null) { + C = void 0; + } + } + } + if (typeof C === 'undefined') { + return $Array(len); + } + if (!this.IsConstructor(C)) { + throw new $TypeError('C must be a constructor'); + } + return new C(len); // this.Construct(C, len); + }, + + CreateDataProperty: function CreateDataProperty(O, P, V) { + if (this.Type(O) !== 'Object') { + throw new $TypeError('Assertion failed: Type(O) is not Object'); + } + if (!this.IsPropertyKey(P)) { + throw new $TypeError('Assertion failed: IsPropertyKey(P) is not true'); + } + var oldDesc = $gOPD(O, P); + var extensible = oldDesc || (typeof $isExtensible !== 'function' || $isExtensible(O)); + var immutable = oldDesc && (!oldDesc.writable || !oldDesc.configurable); + if (immutable || !extensible) { + return false; + } + var newDesc = { + configurable: true, + enumerable: true, + value: V, + writable: true + }; + Object.defineProperty(O, P, newDesc); + return true; + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-createdatapropertyorthrow + CreateDataPropertyOrThrow: function CreateDataPropertyOrThrow(O, P, V) { + if (this.Type(O) !== 'Object') { + throw new $TypeError('Assertion failed: Type(O) is not Object'); + } + if (!this.IsPropertyKey(P)) { + throw new $TypeError('Assertion failed: IsPropertyKey(P) is not true'); + } + var success = this.CreateDataProperty(O, P, V); + if (!success) { + throw new $TypeError('unable to create data property'); + } + return success; + }, + + // https://www.ecma-international.org/ecma-262/6.0/#sec-objectcreate + ObjectCreate: function ObjectCreate(proto, internalSlotsList) { + if (proto !== null && this.Type(proto) !== 'Object') { + throw new $TypeError('Assertion failed: proto must be null or an object'); + } + var slots = arguments.length < 2 ? [] : internalSlotsList; + if (slots.length > 0) { + throw new $SyntaxError('es-abstract does not yet support internal slots'); + } + + if (proto === null && !$ObjectCreate) { + throw new $SyntaxError('native Object.create support is required to create null objects'); + } + + return $ObjectCreate(proto); + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-advancestringindex + AdvanceStringIndex: function AdvanceStringIndex(S, index, unicode) { + if (this.Type(S) !== 'String') { + throw new $TypeError('S must be a String'); + } + if (!this.IsInteger(index) || index < 0 || index > MAX_SAFE_INTEGER) { + throw new $TypeError('Assertion failed: length must be an integer >= 0 and <= 2**53'); + } + if (this.Type(unicode) !== 'Boolean') { + throw new $TypeError('Assertion failed: unicode must be a Boolean'); + } + if (!unicode) { + return index + 1; + } + var length = S.length; + if ((index + 1) >= length) { + return index + 1; + } + + var first = $charCodeAt(S, index); + if (first < 0xD800 || first > 0xDBFF) { + return index + 1; + } + + var second = $charCodeAt(S, index + 1); + if (second < 0xDC00 || second > 0xDFFF) { + return index + 1; + } + + return index + 2; + } +}); + +delete ES6.CheckObjectCoercible; // renamed in ES6 to RequireObjectCoercible + +module.exports = ES6; diff --git a/node_modules/es-abstract/es2016.js b/node_modules/es-abstract/es2016.js new file mode 100644 index 0000000000000..c9166cea7f842 --- /dev/null +++ b/node_modules/es-abstract/es2016.js @@ -0,0 +1,16 @@ +'use strict'; + +var ES2015 = require('./es2015'); +var assign = require('./helpers/assign'); + +var ES2016 = assign(assign({}, ES2015), { + // https://github.com/tc39/ecma262/pull/60 + SameValueNonNumber: function SameValueNonNumber(x, y) { + if (typeof x === 'number' || typeof x !== typeof y) { + throw new TypeError('SameValueNonNumber requires two non-number values of the same type.'); + } + return this.SameValue(x, y); + } +}); + +module.exports = ES2016; diff --git a/node_modules/es-abstract/es2017.js b/node_modules/es-abstract/es2017.js new file mode 100644 index 0000000000000..af3ff480663bb --- /dev/null +++ b/node_modules/es-abstract/es2017.js @@ -0,0 +1,25 @@ +'use strict'; + +var ES2016 = require('./es2016'); +var assign = require('./helpers/assign'); + +var ES2017 = assign(assign({}, ES2016), { + ToIndex: function ToIndex(value) { + if (typeof value === 'undefined') { + return 0; + } + var integerIndex = this.ToInteger(value); + if (integerIndex < 0) { + throw new RangeError('index must be >= 0'); + } + var index = this.ToLength(integerIndex); + if (!this.SameValueZero(integerIndex, index)) { + throw new RangeError('index must be >= 0 and < 2 ** 53 - 1'); + } + return index; + } +}); + +delete ES2017.EnumerableOwnNames; // replaced with EnumerableOwnProperties + +module.exports = ES2017; diff --git a/node_modules/es-abstract/es5.js b/node_modules/es-abstract/es5.js new file mode 100644 index 0000000000000..3af7e7ea3d5cf --- /dev/null +++ b/node_modules/es-abstract/es5.js @@ -0,0 +1,242 @@ +'use strict'; + +var GetIntrinsic = require('./GetIntrinsic'); + +var $Object = GetIntrinsic('%Object%'); +var $TypeError = GetIntrinsic('%TypeError%'); +var $String = GetIntrinsic('%String%'); + +var $isNaN = require('./helpers/isNaN'); +var $isFinite = require('./helpers/isFinite'); + +var sign = require('./helpers/sign'); +var mod = require('./helpers/mod'); + +var IsCallable = require('is-callable'); +var toPrimitive = require('es-to-primitive/es5'); + +var has = require('has'); + +// https://es5.github.io/#x9 +var ES5 = { + ToPrimitive: toPrimitive, + + ToBoolean: function ToBoolean(value) { + return !!value; + }, + ToNumber: function ToNumber(value) { + return +value; // eslint-disable-line no-implicit-coercion + }, + ToInteger: function ToInteger(value) { + var number = this.ToNumber(value); + if ($isNaN(number)) { return 0; } + if (number === 0 || !$isFinite(number)) { return number; } + return sign(number) * Math.floor(Math.abs(number)); + }, + ToInt32: function ToInt32(x) { + return this.ToNumber(x) >> 0; + }, + ToUint32: function ToUint32(x) { + return this.ToNumber(x) >>> 0; + }, + ToUint16: function ToUint16(value) { + var number = this.ToNumber(value); + if ($isNaN(number) || number === 0 || !$isFinite(number)) { return 0; } + var posInt = sign(number) * Math.floor(Math.abs(number)); + return mod(posInt, 0x10000); + }, + ToString: function ToString(value) { + return $String(value); + }, + ToObject: function ToObject(value) { + this.CheckObjectCoercible(value); + return $Object(value); + }, + CheckObjectCoercible: function CheckObjectCoercible(value, optMessage) { + /* jshint eqnull:true */ + if (value == null) { + throw new $TypeError(optMessage || 'Cannot call method on ' + value); + } + return value; + }, + IsCallable: IsCallable, + SameValue: function SameValue(x, y) { + if (x === y) { // 0 === -0, but they are not identical. + if (x === 0) { return 1 / x === 1 / y; } + return true; + } + return $isNaN(x) && $isNaN(y); + }, + + // https://www.ecma-international.org/ecma-262/5.1/#sec-8 + Type: function Type(x) { + if (x === null) { + return 'Null'; + } + if (typeof x === 'undefined') { + return 'Undefined'; + } + if (typeof x === 'function' || typeof x === 'object') { + return 'Object'; + } + if (typeof x === 'number') { + return 'Number'; + } + if (typeof x === 'boolean') { + return 'Boolean'; + } + if (typeof x === 'string') { + return 'String'; + } + }, + + // https://ecma-international.org/ecma-262/6.0/#sec-property-descriptor-specification-type + IsPropertyDescriptor: function IsPropertyDescriptor(Desc) { + if (this.Type(Desc) !== 'Object') { + return false; + } + var allowed = { + '[[Configurable]]': true, + '[[Enumerable]]': true, + '[[Get]]': true, + '[[Set]]': true, + '[[Value]]': true, + '[[Writable]]': true + }; + // jscs:disable + for (var key in Desc) { // eslint-disable-line + if (has(Desc, key) && !allowed[key]) { + return false; + } + } + // jscs:enable + var isData = has(Desc, '[[Value]]'); + var IsAccessor = has(Desc, '[[Get]]') || has(Desc, '[[Set]]'); + if (isData && IsAccessor) { + throw new $TypeError('Property Descriptors may not be both accessor and data descriptors'); + } + return true; + }, + + // https://ecma-international.org/ecma-262/5.1/#sec-8.10.1 + IsAccessorDescriptor: function IsAccessorDescriptor(Desc) { + if (typeof Desc === 'undefined') { + return false; + } + + if (!this.IsPropertyDescriptor(Desc)) { + throw new $TypeError('Desc must be a Property Descriptor'); + } + + if (!has(Desc, '[[Get]]') && !has(Desc, '[[Set]]')) { + return false; + } + + return true; + }, + + // https://ecma-international.org/ecma-262/5.1/#sec-8.10.2 + IsDataDescriptor: function IsDataDescriptor(Desc) { + if (typeof Desc === 'undefined') { + return false; + } + + if (!this.IsPropertyDescriptor(Desc)) { + throw new $TypeError('Desc must be a Property Descriptor'); + } + + if (!has(Desc, '[[Value]]') && !has(Desc, '[[Writable]]')) { + return false; + } + + return true; + }, + + // https://ecma-international.org/ecma-262/5.1/#sec-8.10.3 + IsGenericDescriptor: function IsGenericDescriptor(Desc) { + if (typeof Desc === 'undefined') { + return false; + } + + if (!this.IsPropertyDescriptor(Desc)) { + throw new $TypeError('Desc must be a Property Descriptor'); + } + + if (!this.IsAccessorDescriptor(Desc) && !this.IsDataDescriptor(Desc)) { + return true; + } + + return false; + }, + + // https://ecma-international.org/ecma-262/5.1/#sec-8.10.4 + FromPropertyDescriptor: function FromPropertyDescriptor(Desc) { + if (typeof Desc === 'undefined') { + return Desc; + } + + if (!this.IsPropertyDescriptor(Desc)) { + throw new $TypeError('Desc must be a Property Descriptor'); + } + + if (this.IsDataDescriptor(Desc)) { + return { + value: Desc['[[Value]]'], + writable: !!Desc['[[Writable]]'], + enumerable: !!Desc['[[Enumerable]]'], + configurable: !!Desc['[[Configurable]]'] + }; + } else if (this.IsAccessorDescriptor(Desc)) { + return { + get: Desc['[[Get]]'], + set: Desc['[[Set]]'], + enumerable: !!Desc['[[Enumerable]]'], + configurable: !!Desc['[[Configurable]]'] + }; + } else { + throw new $TypeError('FromPropertyDescriptor must be called with a fully populated Property Descriptor'); + } + }, + + // https://ecma-international.org/ecma-262/5.1/#sec-8.10.5 + ToPropertyDescriptor: function ToPropertyDescriptor(Obj) { + if (this.Type(Obj) !== 'Object') { + throw new $TypeError('ToPropertyDescriptor requires an object'); + } + + var desc = {}; + if (has(Obj, 'enumerable')) { + desc['[[Enumerable]]'] = this.ToBoolean(Obj.enumerable); + } + if (has(Obj, 'configurable')) { + desc['[[Configurable]]'] = this.ToBoolean(Obj.configurable); + } + if (has(Obj, 'value')) { + desc['[[Value]]'] = Obj.value; + } + if (has(Obj, 'writable')) { + desc['[[Writable]]'] = this.ToBoolean(Obj.writable); + } + if (has(Obj, 'get')) { + var getter = Obj.get; + if (typeof getter !== 'undefined' && !this.IsCallable(getter)) { + throw new TypeError('getter must be a function'); + } + desc['[[Get]]'] = getter; + } + if (has(Obj, 'set')) { + var setter = Obj.set; + if (typeof setter !== 'undefined' && !this.IsCallable(setter)) { + throw new $TypeError('setter must be a function'); + } + desc['[[Set]]'] = setter; + } + + if ((has(desc, '[[Get]]') || has(desc, '[[Set]]')) && (has(desc, '[[Value]]') || has(desc, '[[Writable]]'))) { + throw new $TypeError('Invalid property descriptor. Cannot both specify accessors and a value or writable attribute'); + } + return desc; + } +}; + +module.exports = ES5; diff --git a/node_modules/es-abstract/es6.js b/node_modules/es-abstract/es6.js new file mode 100644 index 0000000000000..2d1f4dc927a90 --- /dev/null +++ b/node_modules/es-abstract/es6.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./es2015'); diff --git a/node_modules/es-abstract/es7.js b/node_modules/es-abstract/es7.js new file mode 100644 index 0000000000000..f2f15c0a88712 --- /dev/null +++ b/node_modules/es-abstract/es7.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./es2016'); diff --git a/node_modules/es-abstract/helpers/assign.js b/node_modules/es-abstract/helpers/assign.js new file mode 100644 index 0000000000000..2533d20a36195 --- /dev/null +++ b/node_modules/es-abstract/helpers/assign.js @@ -0,0 +1,17 @@ +var bind = require('function-bind'); +var has = bind.call(Function.call, Object.prototype.hasOwnProperty); + +var $assign = Object.assign; + +module.exports = function assign(target, source) { + if ($assign) { + return $assign(target, source); + } + + for (var key in source) { + if (has(source, key)) { + target[key] = source[key]; + } + } + return target; +}; diff --git a/node_modules/es-abstract/helpers/isFinite.js b/node_modules/es-abstract/helpers/isFinite.js new file mode 100644 index 0000000000000..46585376bbee5 --- /dev/null +++ b/node_modules/es-abstract/helpers/isFinite.js @@ -0,0 +1,3 @@ +var $isNaN = Number.isNaN || function (a) { return a !== a; }; + +module.exports = Number.isFinite || function (x) { return typeof x === 'number' && !$isNaN(x) && x !== Infinity && x !== -Infinity; }; diff --git a/node_modules/es-abstract/helpers/isNaN.js b/node_modules/es-abstract/helpers/isNaN.js new file mode 100644 index 0000000000000..e4d4f95f316dd --- /dev/null +++ b/node_modules/es-abstract/helpers/isNaN.js @@ -0,0 +1,3 @@ +module.exports = Number.isNaN || function isNaN(a) { + return a !== a; +}; diff --git a/node_modules/es-abstract/helpers/isPrimitive.js b/node_modules/es-abstract/helpers/isPrimitive.js new file mode 100644 index 0000000000000..3669156452759 --- /dev/null +++ b/node_modules/es-abstract/helpers/isPrimitive.js @@ -0,0 +1,3 @@ +module.exports = function isPrimitive(value) { + return value === null || (typeof value !== 'function' && typeof value !== 'object'); +}; diff --git a/node_modules/es-abstract/helpers/mod.js b/node_modules/es-abstract/helpers/mod.js new file mode 100644 index 0000000000000..5867fd979c0ab --- /dev/null +++ b/node_modules/es-abstract/helpers/mod.js @@ -0,0 +1,4 @@ +module.exports = function mod(number, modulo) { + var remain = number % modulo; + return Math.floor(remain >= 0 ? remain : remain + modulo); +}; diff --git a/node_modules/es-abstract/helpers/sign.js b/node_modules/es-abstract/helpers/sign.js new file mode 100644 index 0000000000000..2ac0bf1b1a0e9 --- /dev/null +++ b/node_modules/es-abstract/helpers/sign.js @@ -0,0 +1,3 @@ +module.exports = function sign(number) { + return number >= 0 ? 1 : -1; +}; diff --git a/node_modules/es-abstract/index.js b/node_modules/es-abstract/index.js new file mode 100644 index 0000000000000..cee856bbdeb9f --- /dev/null +++ b/node_modules/es-abstract/index.js @@ -0,0 +1,22 @@ +'use strict'; + +var assign = require('./helpers/assign'); + +var ES5 = require('./es5'); +var ES2015 = require('./es2015'); +var ES2016 = require('./es2016'); +var ES2017 = require('./es2017'); + +var ES = { + ES5: ES5, + ES6: ES2015, + ES2015: ES2015, + ES7: ES2016, + ES2016: ES2016, + ES2017: ES2017 +}; +assign(ES, ES5); +delete ES.CheckObjectCoercible; // renamed in ES6 to RequireObjectCoercible +assign(ES, ES2015); + +module.exports = ES; diff --git a/node_modules/es-abstract/operations/2015.js b/node_modules/es-abstract/operations/2015.js new file mode 100644 index 0000000000000..1df63c3f4ba8c --- /dev/null +++ b/node_modules/es-abstract/operations/2015.js @@ -0,0 +1,78 @@ +'use strict'; + +module.exports = { + IsPropertyDescriptor: 'https://ecma-international.org/ecma-262/6.0/#sec-property-descriptor-specification-type', + IsAccessorDescriptor: 'https://ecma-international.org/ecma-262/6.0/#sec-isaccessordescriptor', + IsDataDescriptor: 'https://ecma-international.org/ecma-262/6.0/#sec-isdatadescriptor', + IsGenericDescriptor: 'https://ecma-international.org/ecma-262/6.0/#sec-isgenericdescriptor', + FromPropertyDescriptor: 'https://ecma-international.org/ecma-262/6.0/#sec-frompropertydescriptor', + ToPropertyDescriptor: 'https://ecma-international.org/ecma-262/6.0/#sec-topropertydescriptor', + CompletePropertyDescriptor: 'https://ecma-international.org/ecma-262/6.0/#sec-completepropertydescriptor', + ToPrimitive: 'https://ecma-international.org/ecma-262/6.0/#sec-toprimitive', + ToBoolean: 'https://ecma-international.org/ecma-262/6.0/#sec-toboolean', + ToNumber: 'https://ecma-international.org/ecma-262/6.0/#sec-tonumber', + ToInteger: 'https://ecma-international.org/ecma-262/6.0/#sec-tointeger', + ToInt32: 'https://ecma-international.org/ecma-262/6.0/#sec-toint32', + ToUint32: 'https://ecma-international.org/ecma-262/6.0/#sec-touint32', + ToInt16: 'https://ecma-international.org/ecma-262/6.0/#sec-toint16', + ToUint16: 'https://ecma-international.org/ecma-262/6.0/#sec-touint16', + ToInt8: 'https://ecma-international.org/ecma-262/6.0/#sec-toint8', + ToUint8: 'https://ecma-international.org/ecma-262/6.0/#sec-touint8', + ToUint8Clamp: 'https://ecma-international.org/ecma-262/6.0/#sec-touint8clamp', + ToString: 'https://ecma-international.org/ecma-262/6.0/#sec-tostring', + ToObject: 'https://ecma-international.org/ecma-262/6.0/#sec-toobject', + ToPropertyKey: 'https://ecma-international.org/ecma-262/6.0/#sec-topropertykey', + ToLength: 'https://ecma-international.org/ecma-262/6.0/#sec-tolength', + CanonicalNumericIndexString: 'https://ecma-international.org/ecma-262/6.0/#sec-canonicalnumericindexstring', + RequireObjectCoercible: 'https://ecma-international.org/ecma-262/6.0/#sec-requireobjectcoercible', + IsArray: 'https://ecma-international.org/ecma-262/6.0/#sec-isarray', + IsCallable: 'https://ecma-international.org/ecma-262/6.0/#sec-iscallable', + IsConstructor: 'https://ecma-international.org/ecma-262/6.0/#sec-isconstructor', + IsExtensible: 'https://ecma-international.org/ecma-262/6.0/#sec-isextensible-o', + IsInteger: 'https://ecma-international.org/ecma-262/6.0/#sec-isinteger', + IsPropertyKey: 'https://ecma-international.org/ecma-262/6.0/#sec-ispropertykey', + IsRegExp: 'https://ecma-international.org/ecma-262/6.0/#sec-isregexp', + SameValue: 'https://ecma-international.org/ecma-262/6.0/#sec-samevalue', + SameValueZero: 'https://ecma-international.org/ecma-262/6.0/#sec-samevaluezero', + Get: 'https://ecma-international.org/ecma-262/6.0/#sec-get-o-p', + GetV: 'https://ecma-international.org/ecma-262/6.0/#sec-getv', + Set: 'https://ecma-international.org/ecma-262/6.0/#sec-set-o-p-v-throw', + CreateDataProperty: 'https://ecma-international.org/ecma-262/6.0/#sec-createdataproperty', + CreateMethodProperty: 'https://ecma-international.org/ecma-262/6.0/#sec-createmethodproperty', + CreateDataPropertyOrThrow: 'https://ecma-international.org/ecma-262/6.0/#sec-createdatapropertyorthrow', + DefinePropertyOrThrow: 'https://ecma-international.org/ecma-262/6.0/#sec-definepropertyorthrow', + DeletePropertyOrThrow: 'https://ecma-international.org/ecma-262/6.0/#sec-deletepropertyorthrow', + GetMethod: 'https://ecma-international.org/ecma-262/6.0/#sec-getmethod', + HasProperty: 'https://ecma-international.org/ecma-262/6.0/#sec-hasproperty', + HasOwnProperty: 'https://ecma-international.org/ecma-262/6.0/#sec-hasownproperty', + Call: 'https://ecma-international.org/ecma-262/6.0/#sec-call', + Construct: 'https://ecma-international.org/ecma-262/6.0/#sec-construct', + SetIntegrityLevel: 'https://ecma-international.org/ecma-262/6.0/#sec-setintegritylevel', + TestIntegrityLevel: 'https://ecma-international.org/ecma-262/6.0/#sec-testintegritylevel', + CreateArrayFromList: 'https://ecma-international.org/ecma-262/6.0/#sec-createarrayfromlist', + CreateListFromArrayLike: 'https://ecma-international.org/ecma-262/6.0/#sec-createlistfromarraylike', + Invoke: 'https://ecma-international.org/ecma-262/6.0/#sec-invoke', + OrdinaryHasInstance: 'https://ecma-international.org/ecma-262/6.0/#sec-ordinaryhasinstance', + SpeciesConstructor: 'https://ecma-international.org/ecma-262/6.0/#sec-speciesconstructor', + EnumerableOwnNames: 'https://ecma-international.org/ecma-262/6.0/#sec-enumerableownnames', + GetIterator: 'https://ecma-international.org/ecma-262/6.0/#sec-getiterator', + IteratorNext: 'https://ecma-international.org/ecma-262/6.0/#sec-iteratornext', + IteratorComplete: 'https://ecma-international.org/ecma-262/6.0/#sec-iteratorcomplete', + IteratorValue: 'https://ecma-international.org/ecma-262/6.0/#sec-iteratorvalue', + IteratorStep: 'https://ecma-international.org/ecma-262/6.0/#sec-iteratorstep', + IteratorClose: 'https://ecma-international.org/ecma-262/6.0/#sec-iteratorclose', + CreateIterResultObject: 'https://ecma-international.org/ecma-262/6.0/#sec-createiterresultobject', + CreateListIterator: 'https://ecma-international.org/ecma-262/6.0/#sec-createlistiterator', + Type: 'https://ecma-international.org/ecma-262/6.0/#sec-ecmascript-language-types', + thisNumberValue: 'https://ecma-international.org/ecma-262/6.0/#sec-properties-of-the-number-prototype-object', + thisTimeValue: 'https://ecma-international.org/ecma-262/6.0/#sec-properties-of-the-date-prototype-object', + thisStringValue: 'https://ecma-international.org/ecma-262/6.0/#sec-properties-of-the-string-prototype-object', + RegExpExec: 'https://ecma-international.org/ecma-262/6.0/#sec-regexpexec', + RegExpBuiltinExec: 'https://ecma-international.org/ecma-262/6.0/#sec-regexpbuiltinexec', + IsConcatSpreadable: 'https://ecma-international.org/ecma-262/6.0/#sec-isconcatspreadable', + IsPromise: 'https://ecma-international.org/ecma-262/6.0/#sec-ispromise', + ArraySpeciesCreate: 'https://ecma-international.org/ecma-262/6.0/#sec-arrayspeciescreate', + ObjectCreate: 'https://ecma-international.org/ecma-262/6.0/#sec-objectcreate', + AdvanceStringIndex: 'https://ecma-international.org/ecma-262/6.0/#sec-advancestringindex', + NormalCompletion: 'https://ecma-international.org/ecma-262/6.0/#sec-normalcompletion' +}; diff --git a/node_modules/es-abstract/operations/2016.js b/node_modules/es-abstract/operations/2016.js new file mode 100644 index 0000000000000..6ac8aae77c28a --- /dev/null +++ b/node_modules/es-abstract/operations/2016.js @@ -0,0 +1,80 @@ +'use strict'; + +module.exports = { + IsPropertyDescriptor: 'https://ecma-international.org/ecma-262/7.0/#sec-property-descriptor-specification-type', + IsAccessorDescriptor: 'https://ecma-international.org/ecma-262/7.0/#sec-isaccessordescriptor', + IsDataDescriptor: 'https://ecma-international.org/ecma-262/7.0/#sec-isdatadescriptor', + IsGenericDescriptor: 'https://ecma-international.org/ecma-262/7.0/#sec-isgenericdescriptor', + FromPropertyDescriptor: 'https://ecma-international.org/ecma-262/7.0/#sec-frompropertydescriptor', + ToPropertyDescriptor: 'https://ecma-international.org/ecma-262/7.0/#sec-topropertydescriptor', + CompletePropertyDescriptor: 'https://ecma-international.org/ecma-262/7.0/#sec-completepropertydescriptor', + ToPrimitive: 'https://ecma-international.org/ecma-262/7.0/#sec-toprimitive', + ToBoolean: 'https://ecma-international.org/ecma-262/7.0/#sec-toboolean', + ToNumber: 'https://ecma-international.org/ecma-262/7.0/#sec-tonumber', + ToInteger: 'https://ecma-international.org/ecma-262/7.0/#sec-tointeger', + ToInt32: 'https://ecma-international.org/ecma-262/7.0/#sec-toint32', + ToUint32: 'https://ecma-international.org/ecma-262/7.0/#sec-touint32', + ToInt16: 'https://ecma-international.org/ecma-262/7.0/#sec-toint16', + ToUint16: 'https://ecma-international.org/ecma-262/7.0/#sec-touint16', + ToInt8: 'https://ecma-international.org/ecma-262/7.0/#sec-toint8', + ToUint8: 'https://ecma-international.org/ecma-262/7.0/#sec-touint8', + ToUint8Clamp: 'https://ecma-international.org/ecma-262/7.0/#sec-touint8clamp', + ToString: 'https://ecma-international.org/ecma-262/7.0/#sec-tostring', + ToObject: 'https://ecma-international.org/ecma-262/7.0/#sec-toobject', + ToPropertyKey: 'https://ecma-international.org/ecma-262/7.0/#sec-topropertykey', + ToLength: 'https://ecma-international.org/ecma-262/7.0/#sec-tolength', + CanonicalNumericIndexString: 'https://ecma-international.org/ecma-262/7.0/#sec-canonicalnumericindexstring', + RequireObjectCoercible: 'https://ecma-international.org/ecma-262/7.0/#sec-requireobjectcoercible', + IsArray: 'https://ecma-international.org/ecma-262/7.0/#sec-isarray', + IsCallable: 'https://ecma-international.org/ecma-262/7.0/#sec-iscallable', + IsConstructor: 'https://ecma-international.org/ecma-262/7.0/#sec-isconstructor', + IsExtensible: 'https://ecma-international.org/ecma-262/7.0/#sec-isextensible-o', + IsInteger: 'https://ecma-international.org/ecma-262/7.0/#sec-isinteger', + IsPropertyKey: 'https://ecma-international.org/ecma-262/7.0/#sec-ispropertykey', + IsRegExp: 'https://ecma-international.org/ecma-262/7.0/#sec-isregexp', + SameValue: 'https://ecma-international.org/ecma-262/7.0/#sec-samevalue', + SameValueZero: 'https://ecma-international.org/ecma-262/7.0/#sec-samevaluezero', + SameValueNonNumber: 'https://ecma-international.org/ecma-262/7.0/#sec-samevaluenonnumber', + Get: 'https://ecma-international.org/ecma-262/7.0/#sec-get-o-p', + GetV: 'https://ecma-international.org/ecma-262/7.0/#sec-getv', + Set: 'https://ecma-international.org/ecma-262/7.0/#sec-set-o-p-v-throw', + CreateDataProperty: 'https://ecma-international.org/ecma-262/7.0/#sec-createdataproperty', + CreateMethodProperty: 'https://ecma-international.org/ecma-262/7.0/#sec-createmethodproperty', + CreateDataPropertyOrThrow: 'https://ecma-international.org/ecma-262/7.0/#sec-createdatapropertyorthrow', + DefinePropertyOrThrow: 'https://ecma-international.org/ecma-262/7.0/#sec-definepropertyorthrow', + DeletePropertyOrThrow: 'https://ecma-international.org/ecma-262/7.0/#sec-deletepropertyorthrow', + GetMethod: 'https://ecma-international.org/ecma-262/7.0/#sec-getmethod', + HasProperty: 'https://ecma-international.org/ecma-262/7.0/#sec-hasproperty', + HasOwnProperty: 'https://ecma-international.org/ecma-262/7.0/#sec-hasownproperty', + Call: 'https://ecma-international.org/ecma-262/7.0/#sec-call', + Construct: 'https://ecma-international.org/ecma-262/7.0/#sec-construct', + SetIntegrityLevel: 'https://ecma-international.org/ecma-262/7.0/#sec-setintegritylevel', + TestIntegrityLevel: 'https://ecma-international.org/ecma-262/7.0/#sec-testintegritylevel', + CreateArrayFromList: 'https://ecma-international.org/ecma-262/7.0/#sec-createarrayfromlist', + CreateListFromArrayLike: 'https://ecma-international.org/ecma-262/7.0/#sec-createlistfromarraylike', + Invoke: 'https://ecma-international.org/ecma-262/7.0/#sec-invoke', + OrdinaryHasInstance: 'https://ecma-international.org/ecma-262/7.0/#sec-ordinaryhasinstance', + SpeciesConstructor: 'https://ecma-international.org/ecma-262/7.0/#sec-speciesconstructor', + EnumerableOwnNames: 'https://ecma-international.org/ecma-262/7.0/#sec-enumerableownnames', + GetIterator: 'https://ecma-international.org/ecma-262/7.0/#sec-getiterator', + IteratorNext: 'https://ecma-international.org/ecma-262/7.0/#sec-iteratornext', + IteratorComplete: 'https://ecma-international.org/ecma-262/7.0/#sec-iteratorcomplete', + IteratorValue: 'https://ecma-international.org/ecma-262/7.0/#sec-iteratorvalue', + IteratorStep: 'https://ecma-international.org/ecma-262/7.0/#sec-iteratorstep', + IteratorClose: 'https://ecma-international.org/ecma-262/7.0/#sec-iteratorclose', + CreateIterResultObject: 'https://ecma-international.org/ecma-262/7.0/#sec-createiterresultobject', + CreateListIterator: 'https://ecma-international.org/ecma-262/7.0/#sec-createlistiterator', + Type: 'https://ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types', + thisNumberValue: 'https://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-number-prototype-object', + thisTimeValue: 'https://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-date-prototype-object', + thisStringValue: 'https://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-string-prototype-object', + RegExpExec: 'https://ecma-international.org/ecma-262/7.0/#sec-regexpexec', + RegExpBuiltinExec: 'https://ecma-international.org/ecma-262/7.0/#sec-regexpbuiltinexec', + IsConcatSpreadable: 'https://ecma-international.org/ecma-262/7.0/#sec-isconcatspreadable', + IsPromise: 'https://ecma-international.org/ecma-262/7.0/#sec-ispromise', + ArraySpeciesCreate: 'https://ecma-international.org/ecma-262/7.0/#sec-arrayspeciescreate', + ObjectCreate: 'https://ecma-international.org/ecma-262/7.0/#sec-objectcreate', + AdvanceStringIndex: 'https://ecma-international.org/ecma-262/7.0/#sec-advancestringindex', + OrdinarySet: 'https://ecma-international.org/ecma-262/7.0/#sec-ordinaryset', + NormalCompletion: 'https://ecma-international.org/ecma-262/7.0/#sec-normalcompletion' +}; diff --git a/node_modules/es-abstract/operations/2017.js b/node_modules/es-abstract/operations/2017.js new file mode 100644 index 0000000000000..c8c3aa499cc3f --- /dev/null +++ b/node_modules/es-abstract/operations/2017.js @@ -0,0 +1,82 @@ +'use strict'; + +module.exports = { + IsPropertyDescriptor: 'https://ecma-international.org/ecma-262/8.0/#sec-property-descriptor-specification-type', + IsAccessorDescriptor: 'https://ecma-international.org/ecma-262/8.0/#sec-isaccessordescriptor', + IsDataDescriptor: 'https://ecma-international.org/ecma-262/8.0/#sec-isdatadescriptor', + IsGenericDescriptor: 'https://ecma-international.org/ecma-262/8.0/#sec-isgenericdescriptor', + FromPropertyDescriptor: 'https://ecma-international.org/ecma-262/8.0/#sec-frompropertydescriptor', + ToPropertyDescriptor: 'https://ecma-international.org/ecma-262/8.0/#sec-topropertydescriptor', + CompletePropertyDescriptor: 'https://ecma-international.org/ecma-262/8.0/#sec-completepropertydescriptor', + ToPrimitive: 'https://ecma-international.org/ecma-262/8.0/#sec-toprimitive', + ToBoolean: 'https://ecma-international.org/ecma-262/8.0/#sec-toboolean', + ToNumber: 'https://ecma-international.org/ecma-262/8.0/#sec-tonumber', + ToInteger: 'https://ecma-international.org/ecma-262/8.0/#sec-tointeger', + ToInt32: 'https://ecma-international.org/ecma-262/8.0/#sec-toint32', + ToUint32: 'https://ecma-international.org/ecma-262/8.0/#sec-touint32', + ToInt16: 'https://ecma-international.org/ecma-262/8.0/#sec-toint16', + ToUint16: 'https://ecma-international.org/ecma-262/8.0/#sec-touint16', + ToInt8: 'https://ecma-international.org/ecma-262/8.0/#sec-toint8', + ToUint8: 'https://ecma-international.org/ecma-262/8.0/#sec-touint8', + ToUint8Clamp: 'https://ecma-international.org/ecma-262/8.0/#sec-touint8clamp', + ToString: 'https://ecma-international.org/ecma-262/8.0/#sec-tostring', + ToObject: 'https://ecma-international.org/ecma-262/8.0/#sec-toobject', + ToPropertyKey: 'https://ecma-international.org/ecma-262/8.0/#sec-topropertykey', + ToLength: 'https://ecma-international.org/ecma-262/8.0/#sec-tolength', + CanonicalNumericIndexString: 'https://ecma-international.org/ecma-262/8.0/#sec-canonicalnumericindexstring', + ToIndex: 'https://ecma-international.org/ecma-262/8.0/#sec-toindex', + RequireObjectCoercible: 'https://ecma-international.org/ecma-262/8.0/#sec-requireobjectcoercible', + IsArray: 'https://ecma-international.org/ecma-262/8.0/#sec-isarray', + IsCallable: 'https://ecma-international.org/ecma-262/8.0/#sec-iscallable', + IsConstructor: 'https://ecma-international.org/ecma-262/8.0/#sec-isconstructor', + IsExtensible: 'https://ecma-international.org/ecma-262/8.0/#sec-isextensible-o', + IsInteger: 'https://ecma-international.org/ecma-262/8.0/#sec-isinteger', + IsPropertyKey: 'https://ecma-international.org/ecma-262/8.0/#sec-ispropertykey', + IsRegExp: 'https://ecma-international.org/ecma-262/8.0/#sec-isregexp', + SameValue: 'https://ecma-international.org/ecma-262/8.0/#sec-samevalue', + SameValueZero: 'https://ecma-international.org/ecma-262/8.0/#sec-samevaluezero', + SameValueNonNumber: 'https://ecma-international.org/ecma-262/8.0/#sec-samevaluenonnumber', + Get: 'https://ecma-international.org/ecma-262/8.0/#sec-get-o-p', + GetV: 'https://ecma-international.org/ecma-262/8.0/#sec-getv', + Set: 'https://ecma-international.org/ecma-262/8.0/#sec-set-o-p-v-throw', + CreateDataProperty: 'https://ecma-international.org/ecma-262/8.0/#sec-createdataproperty', + CreateMethodProperty: 'https://ecma-international.org/ecma-262/8.0/#sec-createmethodproperty', + CreateDataPropertyOrThrow: 'https://ecma-international.org/ecma-262/8.0/#sec-createdatapropertyorthrow', + DefinePropertyOrThrow: 'https://ecma-international.org/ecma-262/8.0/#sec-definepropertyorthrow', + DeletePropertyOrThrow: 'https://ecma-international.org/ecma-262/8.0/#sec-deletepropertyorthrow', + GetMethod: 'https://ecma-international.org/ecma-262/8.0/#sec-getmethod', + HasProperty: 'https://ecma-international.org/ecma-262/8.0/#sec-hasproperty', + HasOwnProperty: 'https://ecma-international.org/ecma-262/8.0/#sec-hasownproperty', + Call: 'https://ecma-international.org/ecma-262/8.0/#sec-call', + Construct: 'https://ecma-international.org/ecma-262/8.0/#sec-construct', + SetIntegrityLevel: 'https://ecma-international.org/ecma-262/8.0/#sec-setintegritylevel', + TestIntegrityLevel: 'https://ecma-international.org/ecma-262/8.0/#sec-testintegritylevel', + CreateArrayFromList: 'https://ecma-international.org/ecma-262/8.0/#sec-createarrayfromlist', + CreateListFromArrayLike: 'https://ecma-international.org/ecma-262/8.0/#sec-createlistfromarraylike', + Invoke: 'https://ecma-international.org/ecma-262/8.0/#sec-invoke', + OrdinaryHasInstance: 'https://ecma-international.org/ecma-262/8.0/#sec-ordinaryhasinstance', + SpeciesConstructor: 'https://ecma-international.org/ecma-262/8.0/#sec-speciesconstructor', + EnumerableOwnProperties: 'https://ecma-international.org/ecma-262/8.0/#sec-enumerableownproperties', + GetIterator: 'https://ecma-international.org/ecma-262/8.0/#sec-getiterator', + IteratorNext: 'https://ecma-international.org/ecma-262/8.0/#sec-iteratornext', + IteratorComplete: 'https://ecma-international.org/ecma-262/8.0/#sec-iteratorcomplete', + IteratorValue: 'https://ecma-international.org/ecma-262/8.0/#sec-iteratorvalue', + IteratorStep: 'https://ecma-international.org/ecma-262/8.0/#sec-iteratorstep', + IteratorClose: 'https://ecma-international.org/ecma-262/8.0/#sec-iteratorclose', + CreateIterResultObject: 'https://ecma-international.org/ecma-262/8.0/#sec-createiterresultobject', + CreateListIterator: 'https://ecma-international.org/ecma-262/8.0/#sec-createlistiterator', + Type: 'https://ecma-international.org/ecma-262/8.0/#sec-ecmascript-language-types', + thisNumberValue: 'https://ecma-international.org/ecma-262/8.0/#sec-properties-of-the-number-prototype-object', + thisTimeValue: 'https://ecma-international.org/ecma-262/8.0/#sec-properties-of-the-date-prototype-object', + thisStringValue: 'https://ecma-international.org/ecma-262/8.0/#sec-properties-of-the-string-prototype-object', + RegExpExec: 'https://ecma-international.org/ecma-262/8.0/#sec-regexpexec', + RegExpBuiltinExec: 'https://ecma-international.org/ecma-262/8.0/#sec-regexpbuiltinexec', + IsConcatSpreadable: 'https://ecma-international.org/ecma-262/8.0/#sec-isconcatspreadable', + IsPromise: 'https://ecma-international.org/ecma-262/8.0/#sec-ispromise', + ArraySpeciesCreate: 'https://ecma-international.org/ecma-262/8.0/#sec-arrayspeciescreate', + ObjectCreate: 'https://ecma-international.org/ecma-262/8.0/#sec-objectcreate', + AdvanceStringIndex: 'https://ecma-international.org/ecma-262/8.0/#sec-advancestringindex', + OrdinarySet: 'https://ecma-international.org/ecma-262/8.0/#sec-ordinaryset', + NormalCompletion: 'https://ecma-international.org/ecma-262/8.0/#sec-normalcompletion', + IsSharedArrayBuffer: 'https://ecma-international.org/ecma-262/8.0/#sec-issharedarraybuffer', +}; diff --git a/node_modules/es-abstract/operations/es5.js b/node_modules/es-abstract/operations/es5.js new file mode 100644 index 0000000000000..205d1e681560e --- /dev/null +++ b/node_modules/es-abstract/operations/es5.js @@ -0,0 +1,10 @@ +'use strict'; + +module.exports = { + IsPropertyDescriptor: 'https://ecma-international.org/ecma-262/5.1/#sec-8.10', + IsAccessorDescriptor: 'https://ecma-international.org/ecma-262/5.1/#sec-8.10.1', + IsDataDescriptor: 'https://ecma-international.org/ecma-262/5.1/#sec-8.10.2', + IsGenericDescriptor: 'https://ecma-international.org/ecma-262/5.1/#sec-8.10.3', + FromPropertyDescriptor: 'https://ecma-international.org/ecma-262/5.1/#sec-8.10.4', + ToPropertyDescriptor: 'https://ecma-international.org/ecma-262/5.1/#sec-8.10.5' +}; diff --git a/node_modules/es-abstract/package.json b/node_modules/es-abstract/package.json new file mode 100644 index 0000000000000..9b5378b8afb7e --- /dev/null +++ b/node_modules/es-abstract/package.json @@ -0,0 +1,102 @@ +{ + "name": "es-abstract", + "version": "1.12.0", + "author": { + "name": "Jordan Harband", + "email": "ljharb@gmail.com", + "url": "http://ljharb.codes" + }, + "contributors": [ + { + "name": "Jordan Harband", + "email": "ljharb@gmail.com", + "url": "http://ljharb.codes" + } + ], + "description": "ECMAScript spec abstract operations.", + "license": "MIT", + "main": "index.js", + "scripts": { + "prepublish": "safe-publish-latest", + "pretest": "npm run --silent lint", + "test": "npm run tests-only", + "posttest": "npm run --silent security", + "tests-only": "node test", + "coverage": "nyc npm run --silent tests-only >/dev/null", + "postcoverage": "nyc report", + "lint": "npm run --silent jscs && npm run --silent eslint", + "jscs": "jscs test/*.js *.js", + "eslint": "eslint test/*.js *.js", + "eccheck": "editorconfig-tools check *.js **/*.js > /dev/null", + "security": "nsp check" + }, + "repository": { + "type": "git", + "url": "git://github.com/ljharb/es-abstract.git" + }, + "keywords": [ + "ECMAScript", + "ES", + "abstract", + "operation", + "abstract operation", + "JavaScript", + "ES5", + "ES6", + "ES7" + ], + "dependencies": { + "es-to-primitive": "^1.1.1", + "function-bind": "^1.1.1", + "has": "^1.0.1", + "is-callable": "^1.1.3", + "is-regex": "^1.0.4" + }, + "devDependencies": { + "@ljharb/eslint-config": "^12.2.1", + "editorconfig-tools": "^0.1.1", + "eslint": "^4.19.1", + "foreach": "^2.0.5", + "jscs": "^3.0.7", + "nsp": "^3.2.1", + "nyc": "^10.3.2", + "object-inspect": "^1.6.0", + "object-is": "^1.0.1", + "object.assign": "^4.1.0", + "replace": "^1.0.0", + "safe-publish-latest": "^1.1.1", + "semver": "^5.5.0", + "tape": "^4.9.0" + }, + "testling": { + "files": "test/index.js", + "browsers": [ + "iexplore/6.0..latest", + "firefox/3.0..6.0", + "firefox/15.0..latest", + "firefox/nightly", + "chrome/4.0..10.0", + "chrome/20.0..latest", + "chrome/canary", + "opera/10.0..latest", + "opera/next", + "safari/4.0..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2" + ] + }, + "engines": { + "node": ">= 0.4" + }, + "greenkeeper": { + "//": "nyc is ignored because it requires node 4+, and we support older than that", + "ignore": [ + "nyc" + ] + } + +,"_resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.12.0.tgz" +,"_integrity": "sha512-C8Fx/0jFmV5IPoMOFPA9P9G5NtqW+4cOPit3MIuvR2t7Ag2K15EJTpxnHAYTzL+aYQJIESYeXZmDBfOBE1HcpA==" +,"_from": "es-abstract@1.12.0" +} \ No newline at end of file diff --git a/node_modules/es-abstract/test/GetIntrinsic.js b/node_modules/es-abstract/test/GetIntrinsic.js new file mode 100644 index 0000000000000..ed8e7ecc636bd --- /dev/null +++ b/node_modules/es-abstract/test/GetIntrinsic.js @@ -0,0 +1,34 @@ +'use strict'; + +var GetIntrinsic = require('../GetIntrinsic'); + +var test = require('tape'); +var forEach = require('foreach'); +var debug = require('object-inspect'); + +var v = require('./helpers/values'); + +test('export', function (t) { + t.equal(typeof GetIntrinsic, 'function', 'it is a function'); + t.equal(GetIntrinsic.length, 2, 'function has length of 2'); + + t.end(); +}); + +test('throws', function (t) { + t['throws']( + function () { GetIntrinsic('not an intrinsic'); }, + SyntaxError, + 'nonexistent intrinsic throws a syntax error' + ); + + forEach(v.nonBooleans, function (nonBoolean) { + t['throws']( + function () { GetIntrinsic('%', nonBoolean); }, + TypeError, + debug(nonBoolean) + ' is not a Boolean' + ); + }); + + t.end(); +}); diff --git a/node_modules/es-abstract/test/diffOps.js b/node_modules/es-abstract/test/diffOps.js new file mode 100644 index 0000000000000..2d870897c2059 --- /dev/null +++ b/node_modules/es-abstract/test/diffOps.js @@ -0,0 +1,24 @@ +'use strict'; + +var keys = require('object-keys'); +var forEach = require('foreach'); + +module.exports = function diffOperations(actual, expected, expectedMissing) { + var actualKeys = keys(actual); + var expectedKeys = keys(expected); + + var extra = []; + var missing = []; + forEach(actualKeys, function (op) { + if (!(op in expected)) { + extra.push(op); + } + }); + forEach(expectedKeys, function (op) { + if (!(op in actual) && expectedMissing.indexOf(op) === -1) { + missing.push(op); + } + }); + + return { missing: missing, extra: extra }; +}; diff --git a/node_modules/es-abstract/test/es2015.js b/node_modules/es-abstract/test/es2015.js new file mode 100644 index 0000000000000..51b94d2e3eeb0 --- /dev/null +++ b/node_modules/es-abstract/test/es2015.js @@ -0,0 +1,11 @@ +'use strict'; + +var ES = require('../').ES2015; + +var ops = require('../operations/2015'); + +// jscs:disable +var expectedMissing = ['CreateMethodProperty', 'DefinePropertyOrThrow', 'DeletePropertyOrThrow', 'Construct', 'SetIntegrityLevel', 'TestIntegrityLevel', 'CreateArrayFromList', 'CreateListFromArrayLike', 'OrdinaryHasInstance', 'EnumerableOwnNames', 'GetIterator', 'IteratorNext', 'IteratorComplete', 'IteratorValue', 'IteratorStep', 'IteratorClose', 'CreateListIterator', 'thisNumberValue', 'thisTimeValue', 'thisStringValue', 'RegExpBuiltinExec', 'IsPromise', 'NormalCompletion']; +// jscs:enable + +require('./tests').es2015(ES, ops, expectedMissing); diff --git a/node_modules/es-abstract/test/es2016.js b/node_modules/es-abstract/test/es2016.js new file mode 100644 index 0000000000000..4e9dd2af2e8c4 --- /dev/null +++ b/node_modules/es-abstract/test/es2016.js @@ -0,0 +1,11 @@ +'use strict'; + +var ES = require('../').ES2016; + +var ops = require('../operations/2016'); + +// jscs:disable +var expectedMissing = ['CreateMethodProperty', 'DefinePropertyOrThrow', 'DeletePropertyOrThrow', 'Construct', 'SetIntegrityLevel', 'TestIntegrityLevel', 'CreateArrayFromList', 'CreateListFromArrayLike', 'OrdinaryHasInstance', 'EnumerableOwnNames', 'GetIterator', 'IteratorNext', 'IteratorComplete', 'IteratorValue', 'IteratorStep', 'IteratorClose', 'CreateListIterator', 'thisNumberValue', 'thisTimeValue', 'thisStringValue', 'RegExpBuiltinExec', 'IsPromise', 'OrdinarySet', 'NormalCompletion']; +// jscs:enable + +require('./tests').es2016(ES, ops, expectedMissing); diff --git a/node_modules/es-abstract/test/es2017.js b/node_modules/es-abstract/test/es2017.js new file mode 100644 index 0000000000000..82533049a56c5 --- /dev/null +++ b/node_modules/es-abstract/test/es2017.js @@ -0,0 +1,11 @@ +'use strict'; + +var ES = require('../').ES2017; + +var ops = require('../operations/2017'); + +// jscs:disable +var expectedMissing = ['CreateMethodProperty', 'DefinePropertyOrThrow', 'DeletePropertyOrThrow', 'Construct', 'SetIntegrityLevel', 'TestIntegrityLevel', 'CreateArrayFromList', 'CreateListFromArrayLike', 'OrdinaryHasInstance', 'EnumerableOwnProperties', 'GetIterator', 'IteratorNext', 'IteratorComplete', 'IteratorValue', 'IteratorStep', 'IteratorClose', 'CreateListIterator', 'thisNumberValue', 'thisTimeValue', 'thisStringValue', 'RegExpBuiltinExec', 'IsPromise', 'OrdinarySet', 'NormalCompletion', 'IsSharedArrayBuffer']; +// jscs:enable + +require('./tests').es2017(ES, ops, expectedMissing); diff --git a/node_modules/es-abstract/test/es5.js b/node_modules/es-abstract/test/es5.js new file mode 100644 index 0000000000000..cca3030407d88 --- /dev/null +++ b/node_modules/es-abstract/test/es5.js @@ -0,0 +1,415 @@ +'use strict'; + +var ES = require('../').ES5; +var test = require('tape'); + +var forEach = require('foreach'); +var is = require('object-is'); + +var coercibleObject = { valueOf: function () { return '3'; }, toString: function () { return 42; } }; +var coercibleFnObject = { + valueOf: function () { return function valueOfFn() {}; }, + toString: function () { return 42; } +}; +var valueOfOnlyObject = { valueOf: function () { return 4; }, toString: function () { return {}; } }; +var toStringOnlyObject = { valueOf: function () { return {}; }, toString: function () { return 7; } }; +var uncoercibleObject = { valueOf: function () { return {}; }, toString: function () { return {}; } }; +var uncoercibleFnObject = { + valueOf: function () { return function valueOfFn() {}; }, + toString: function () { return function toStrFn() {}; } +}; +var objects = [{}, coercibleObject, toStringOnlyObject, valueOfOnlyObject]; +var numbers = [0, -0, Infinity, -Infinity, 42]; +var nonNullPrimitives = [true, false, 'foo', ''].concat(numbers); +var primitives = [undefined, null].concat(nonNullPrimitives); + +test('ToPrimitive', function (t) { + t.test('primitives', function (st) { + var testPrimitive = function (primitive) { + st.ok(is(ES.ToPrimitive(primitive), primitive), primitive + ' is returned correctly'); + }; + forEach(primitives, testPrimitive); + st.end(); + }); + + t.test('objects', function (st) { + st.equal(ES.ToPrimitive(coercibleObject), coercibleObject.valueOf(), 'coercibleObject coerces to valueOf'); + st.equal(ES.ToPrimitive(coercibleObject, Number), coercibleObject.valueOf(), 'coercibleObject with hint Number coerces to valueOf'); + st.equal(ES.ToPrimitive(coercibleObject, String), coercibleObject.toString(), 'coercibleObject with hint String coerces to toString'); + st.equal(ES.ToPrimitive(coercibleFnObject), coercibleFnObject.toString(), 'coercibleFnObject coerces to toString'); + st.equal(ES.ToPrimitive(toStringOnlyObject), toStringOnlyObject.toString(), 'toStringOnlyObject returns toString'); + st.equal(ES.ToPrimitive(valueOfOnlyObject), valueOfOnlyObject.valueOf(), 'valueOfOnlyObject returns valueOf'); + st.equal(ES.ToPrimitive({}), '[object Object]', '{} with no hint coerces to Object#toString'); + st.equal(ES.ToPrimitive({}, String), '[object Object]', '{} with hint String coerces to Object#toString'); + st.equal(ES.ToPrimitive({}, Number), '[object Object]', '{} with hint Number coerces to Object#toString'); + st['throws'](function () { return ES.ToPrimitive(uncoercibleObject); }, TypeError, 'uncoercibleObject throws a TypeError'); + st['throws'](function () { return ES.ToPrimitive(uncoercibleFnObject); }, TypeError, 'uncoercibleFnObject throws a TypeError'); + st.end(); + }); + + t.end(); +}); + +test('ToBoolean', function (t) { + t.equal(false, ES.ToBoolean(undefined), 'undefined coerces to false'); + t.equal(false, ES.ToBoolean(null), 'null coerces to false'); + t.equal(false, ES.ToBoolean(false), 'false returns false'); + t.equal(true, ES.ToBoolean(true), 'true returns true'); + forEach([0, -0, NaN], function (falsyNumber) { + t.equal(false, ES.ToBoolean(falsyNumber), 'falsy number ' + falsyNumber + ' coerces to false'); + }); + forEach([Infinity, 42, 1, -Infinity], function (truthyNumber) { + t.equal(true, ES.ToBoolean(truthyNumber), 'truthy number ' + truthyNumber + ' coerces to true'); + }); + t.equal(false, ES.ToBoolean(''), 'empty string coerces to false'); + t.equal(true, ES.ToBoolean('foo'), 'nonempty string coerces to true'); + forEach(objects, function (obj) { + t.equal(true, ES.ToBoolean(obj), 'object coerces to true'); + }); + t.equal(true, ES.ToBoolean(uncoercibleObject), 'uncoercibleObject coerces to true'); + t.end(); +}); + +test('ToNumber', function (t) { + t.ok(is(NaN, ES.ToNumber(undefined)), 'undefined coerces to NaN'); + t.ok(is(ES.ToNumber(null), 0), 'null coerces to +0'); + t.ok(is(ES.ToNumber(false), 0), 'false coerces to +0'); + t.equal(1, ES.ToNumber(true), 'true coerces to 1'); + t.ok(is(NaN, ES.ToNumber(NaN)), 'NaN returns itself'); + forEach([0, -0, 42, Infinity, -Infinity], function (num) { + t.equal(num, ES.ToNumber(num), num + ' returns itself'); + }); + forEach(['foo', '0', '4a', '2.0', 'Infinity', '-Infinity'], function (numString) { + t.ok(is(+numString, ES.ToNumber(numString)), '"' + numString + '" coerces to ' + Number(numString)); + }); + forEach(objects, function (object) { + t.ok(is(ES.ToNumber(object), ES.ToNumber(ES.ToPrimitive(object))), 'object ' + object + ' coerces to same as ToPrimitive of object does'); + }); + t['throws'](function () { return ES.ToNumber(uncoercibleObject); }, TypeError, 'uncoercibleObject throws'); + t.end(); +}); + +test('ToInteger', function (t) { + t.ok(is(0, ES.ToInteger(NaN)), 'NaN coerces to +0'); + forEach([0, Infinity, 42], function (num) { + t.ok(is(num, ES.ToInteger(num)), num + ' returns itself'); + t.ok(is(-num, ES.ToInteger(-num)), '-' + num + ' returns itself'); + }); + t.equal(3, ES.ToInteger(Math.PI), 'pi returns 3'); + t['throws'](function () { return ES.ToInteger(uncoercibleObject); }, TypeError, 'uncoercibleObject throws'); + t.end(); +}); + +test('ToInt32', function (t) { + t.ok(is(0, ES.ToInt32(NaN)), 'NaN coerces to +0'); + forEach([0, Infinity], function (num) { + t.ok(is(0, ES.ToInt32(num)), num + ' returns +0'); + t.ok(is(0, ES.ToInt32(-num)), '-' + num + ' returns +0'); + }); + t['throws'](function () { return ES.ToInt32(uncoercibleObject); }, TypeError, 'uncoercibleObject throws'); + t.ok(is(ES.ToInt32(0x100000000), 0), '2^32 returns +0'); + t.ok(is(ES.ToInt32(0x100000000 - 1), -1), '2^32 - 1 returns -1'); + t.ok(is(ES.ToInt32(0x80000000), -0x80000000), '2^31 returns -2^31'); + t.ok(is(ES.ToInt32(0x80000000 - 1), 0x80000000 - 1), '2^31 - 1 returns 2^31 - 1'); + forEach([0, Infinity, NaN, 0x100000000, 0x80000000, 0x10000, 0x42], function (num) { + t.ok(is(ES.ToInt32(num), ES.ToInt32(ES.ToUint32(num))), 'ToInt32(x) === ToInt32(ToUint32(x)) for 0x' + num.toString(16)); + t.ok(is(ES.ToInt32(-num), ES.ToInt32(ES.ToUint32(-num))), 'ToInt32(x) === ToInt32(ToUint32(x)) for -0x' + num.toString(16)); + }); + t.end(); +}); + +test('ToUint32', function (t) { + t.ok(is(0, ES.ToUint32(NaN)), 'NaN coerces to +0'); + forEach([0, Infinity], function (num) { + t.ok(is(0, ES.ToUint32(num)), num + ' returns +0'); + t.ok(is(0, ES.ToUint32(-num)), '-' + num + ' returns +0'); + }); + t['throws'](function () { return ES.ToUint32(uncoercibleObject); }, TypeError, 'uncoercibleObject throws'); + t.ok(is(ES.ToUint32(0x100000000), 0), '2^32 returns +0'); + t.ok(is(ES.ToUint32(0x100000000 - 1), 0x100000000 - 1), '2^32 - 1 returns 2^32 - 1'); + t.ok(is(ES.ToUint32(0x80000000), 0x80000000), '2^31 returns 2^31'); + t.ok(is(ES.ToUint32(0x80000000 - 1), 0x80000000 - 1), '2^31 - 1 returns 2^31 - 1'); + forEach([0, Infinity, NaN, 0x100000000, 0x80000000, 0x10000, 0x42], function (num) { + t.ok(is(ES.ToUint32(num), ES.ToUint32(ES.ToInt32(num))), 'ToUint32(x) === ToUint32(ToInt32(x)) for 0x' + num.toString(16)); + t.ok(is(ES.ToUint32(-num), ES.ToUint32(ES.ToInt32(-num))), 'ToUint32(x) === ToUint32(ToInt32(x)) for -0x' + num.toString(16)); + }); + t.end(); +}); + +test('ToUint16', function (t) { + t.ok(is(0, ES.ToUint16(NaN)), 'NaN coerces to +0'); + forEach([0, Infinity], function (num) { + t.ok(is(0, ES.ToUint16(num)), num + ' returns +0'); + t.ok(is(0, ES.ToUint16(-num)), '-' + num + ' returns +0'); + }); + t['throws'](function () { return ES.ToUint16(uncoercibleObject); }, TypeError, 'uncoercibleObject throws'); + t.ok(is(ES.ToUint16(0x100000000), 0), '2^32 returns +0'); + t.ok(is(ES.ToUint16(0x100000000 - 1), 0x10000 - 1), '2^32 - 1 returns 2^16 - 1'); + t.ok(is(ES.ToUint16(0x80000000), 0), '2^31 returns +0'); + t.ok(is(ES.ToUint16(0x80000000 - 1), 0x10000 - 1), '2^31 - 1 returns 2^16 - 1'); + t.ok(is(ES.ToUint16(0x10000), 0), '2^16 returns +0'); + t.ok(is(ES.ToUint16(0x10000 - 1), 0x10000 - 1), '2^16 - 1 returns 2^16 - 1'); + t.end(); +}); + +test('ToString', function (t) { + t['throws'](function () { return ES.ToString(uncoercibleObject); }, TypeError, 'uncoercibleObject throws'); + t.end(); +}); + +test('ToObject', function (t) { + t['throws'](function () { return ES.ToObject(undefined); }, TypeError, 'undefined throws'); + t['throws'](function () { return ES.ToObject(null); }, TypeError, 'null throws'); + forEach(numbers, function (number) { + var obj = ES.ToObject(number); + t.equal(typeof obj, 'object', 'number ' + number + ' coerces to object'); + t.equal(true, obj instanceof Number, 'object of ' + number + ' is Number object'); + t.ok(is(obj.valueOf(), number), 'object of ' + number + ' coerces to ' + number); + }); + t.end(); +}); + +test('CheckObjectCoercible', function (t) { + t['throws'](function () { return ES.CheckObjectCoercible(undefined); }, TypeError, 'undefined throws'); + t['throws'](function () { return ES.CheckObjectCoercible(null); }, TypeError, 'null throws'); + var checkCoercible = function (value) { + t.doesNotThrow(function () { return ES.CheckObjectCoercible(value); }, '"' + value + '" does not throw'); + }; + forEach(objects.concat(nonNullPrimitives), checkCoercible); + t.end(); +}); + +test('IsCallable', function (t) { + t.equal(true, ES.IsCallable(function () {}), 'function is callable'); + var nonCallables = [/a/g, {}, Object.prototype, NaN].concat(primitives); + forEach(nonCallables, function (nonCallable) { + t.equal(false, ES.IsCallable(nonCallable), nonCallable + ' is not callable'); + }); + t.end(); +}); + +test('SameValue', function (t) { + t.equal(true, ES.SameValue(NaN, NaN), 'NaN is SameValue as NaN'); + t.equal(false, ES.SameValue(0, -0), '+0 is not SameValue as -0'); + forEach(objects.concat(primitives), function (val) { + t.equal(val === val, ES.SameValue(val, val), '"' + val + '" is SameValue to itself'); + }); + t.end(); +}); + +test('Type', function (t) { + t.equal(ES.Type(), 'Undefined', 'Type() is Undefined'); + t.equal(ES.Type(undefined), 'Undefined', 'Type(undefined) is Undefined'); + t.equal(ES.Type(null), 'Null', 'Type(null) is Null'); + t.equal(ES.Type(true), 'Boolean', 'Type(true) is Boolean'); + t.equal(ES.Type(false), 'Boolean', 'Type(false) is Boolean'); + t.equal(ES.Type(0), 'Number', 'Type(0) is Number'); + t.equal(ES.Type(NaN), 'Number', 'Type(NaN) is Number'); + t.equal(ES.Type('abc'), 'String', 'Type("abc") is String'); + t.equal(ES.Type(function () {}), 'Object', 'Type(function () {}) is Object'); + t.equal(ES.Type({}), 'Object', 'Type({}) is Object'); + t.end(); +}); + +var bothDescriptor = function () { + return { '[[Get]]': function () {}, '[[Value]]': true }; +}; +var accessorDescriptor = function () { + return { + '[[Get]]': function () {}, + '[[Enumerable]]': true, + '[[Configurable]]': true + }; +}; +var mutatorDescriptor = function () { + return { + '[[Set]]': function () {}, + '[[Enumerable]]': true, + '[[Configurable]]': true + }; +}; +var dataDescriptor = function () { + return { + '[[Value]]': 42, + '[[Writable]]': false, + '[[Configurable]]': false + }; +}; +var genericDescriptor = function () { + return { + '[[Configurable]]': true, + '[[Enumerable]]': false + }; +}; + +test('IsPropertyDescriptor', function (t) { + forEach(primitives, function (primitive) { + t.equal(ES.IsPropertyDescriptor(primitive), false, primitive + ' is not a Property Descriptor'); + }); + + t.equal(ES.IsPropertyDescriptor({ invalid: true }), false, 'invalid keys not allowed on a Property Descriptor'); + + t.equal(ES.IsPropertyDescriptor({}), true, 'empty object is an incomplete Property Descriptor'); + + t.equal(ES.IsPropertyDescriptor(accessorDescriptor()), true, 'accessor descriptor is a Property Descriptor'); + t.equal(ES.IsPropertyDescriptor(mutatorDescriptor()), true, 'mutator descriptor is a Property Descriptor'); + t.equal(ES.IsPropertyDescriptor(dataDescriptor()), true, 'data descriptor is a Property Descriptor'); + t.equal(ES.IsPropertyDescriptor(genericDescriptor()), true, 'generic descriptor is a Property Descriptor'); + + t['throws'](function () { + ES.IsPropertyDescriptor(bothDescriptor()); + }, TypeError, 'a Property Descriptor can not be both a Data and an Accessor Descriptor'); + + t.end(); +}); + +test('IsAccessorDescriptor', function (t) { + forEach(nonNullPrimitives.concat(null), function (primitive) { + t['throws'](function () { ES.IsAccessorDescriptor(primitive); }, TypeError, primitive + ' is not a Property Descriptor'); + }); + + t.equal(ES.IsAccessorDescriptor(), false, 'no value is not an Accessor Descriptor'); + t.equal(ES.IsAccessorDescriptor(undefined), false, 'undefined value is not an Accessor Descriptor'); + + t.equal(ES.IsAccessorDescriptor(accessorDescriptor()), true, 'accessor descriptor is an Accessor Descriptor'); + t.equal(ES.IsAccessorDescriptor(mutatorDescriptor()), true, 'mutator descriptor is an Accessor Descriptor'); + t.equal(ES.IsAccessorDescriptor(dataDescriptor()), false, 'data descriptor is not an Accessor Descriptor'); + t.equal(ES.IsAccessorDescriptor(genericDescriptor()), false, 'generic descriptor is not an Accessor Descriptor'); + + t.end(); +}); + +test('IsDataDescriptor', function (t) { + forEach(nonNullPrimitives.concat(null), function (primitive) { + t['throws'](function () { ES.IsDataDescriptor(primitive); }, TypeError, primitive + ' is not a Property Descriptor'); + }); + + t.equal(ES.IsDataDescriptor(), false, 'no value is not a Data Descriptor'); + t.equal(ES.IsDataDescriptor(undefined), false, 'undefined value is not a Data Descriptor'); + + t.equal(ES.IsDataDescriptor(accessorDescriptor()), false, 'accessor descriptor is not a Data Descriptor'); + t.equal(ES.IsDataDescriptor(mutatorDescriptor()), false, 'mutator descriptor is not a Data Descriptor'); + t.equal(ES.IsDataDescriptor(dataDescriptor()), true, 'data descriptor is a Data Descriptor'); + t.equal(ES.IsDataDescriptor(genericDescriptor()), false, 'generic descriptor is not a Data Descriptor'); + + t.end(); +}); + +test('IsGenericDescriptor', function (t) { + forEach(nonNullPrimitives.concat(null), function (primitive) { + t['throws']( + function () { ES.IsGenericDescriptor(primitive); }, + TypeError, + primitive + ' is not a Property Descriptor' + ); + }); + + t.equal(ES.IsGenericDescriptor(), false, 'no value is not a Data Descriptor'); + t.equal(ES.IsGenericDescriptor(undefined), false, 'undefined value is not a Data Descriptor'); + + t.equal(ES.IsGenericDescriptor(accessorDescriptor()), false, 'accessor descriptor is not a generic Descriptor'); + t.equal(ES.IsGenericDescriptor(mutatorDescriptor()), false, 'mutator descriptor is not a generic Descriptor'); + t.equal(ES.IsGenericDescriptor(dataDescriptor()), false, 'data descriptor is not a generic Descriptor'); + + t.equal(ES.IsGenericDescriptor(genericDescriptor()), true, 'generic descriptor is a generic Descriptor'); + + t.end(); +}); + +test('FromPropertyDescriptor', function (t) { + t.equal(ES.FromPropertyDescriptor(), undefined, 'no value begets undefined'); + t.equal(ES.FromPropertyDescriptor(undefined), undefined, 'undefined value begets undefined'); + + forEach(nonNullPrimitives.concat(null), function (primitive) { + t['throws']( + function () { ES.FromPropertyDescriptor(primitive); }, + TypeError, + primitive + ' is not a Property Descriptor' + ); + }); + + var accessor = accessorDescriptor(); + t.deepEqual(ES.FromPropertyDescriptor(accessor), { + get: accessor['[[Get]]'], + set: accessor['[[Set]]'], + enumerable: !!accessor['[[Enumerable]]'], + configurable: !!accessor['[[Configurable]]'] + }); + + var mutator = mutatorDescriptor(); + t.deepEqual(ES.FromPropertyDescriptor(mutator), { + get: mutator['[[Get]]'], + set: mutator['[[Set]]'], + enumerable: !!mutator['[[Enumerable]]'], + configurable: !!mutator['[[Configurable]]'] + }); + var data = dataDescriptor(); + t.deepEqual(ES.FromPropertyDescriptor(data), { + value: data['[[Value]]'], + writable: data['[[Writable]]'], + enumerable: !!data['[[Enumerable]]'], + configurable: !!data['[[Configurable]]'] + }); + + t['throws']( + function () { ES.FromPropertyDescriptor(genericDescriptor()); }, + TypeError, + 'a complete Property Descriptor is required' + ); + + t.end(); +}); + +test('ToPropertyDescriptor', function (t) { + forEach(nonNullPrimitives.concat(null), function (primitive) { + t['throws']( + function () { ES.ToPropertyDescriptor(primitive); }, + TypeError, + primitive + ' is not an Object' + ); + }); + + var accessor = accessorDescriptor(); + t.deepEqual(ES.ToPropertyDescriptor({ + get: accessor['[[Get]]'], + enumerable: !!accessor['[[Enumerable]]'], + configurable: !!accessor['[[Configurable]]'] + }), accessor); + + var mutator = mutatorDescriptor(); + t.deepEqual(ES.ToPropertyDescriptor({ + set: mutator['[[Set]]'], + enumerable: !!mutator['[[Enumerable]]'], + configurable: !!mutator['[[Configurable]]'] + }), mutator); + + var data = dataDescriptor(); + t.deepEqual(ES.ToPropertyDescriptor({ + value: data['[[Value]]'], + writable: data['[[Writable]]'], + configurable: !!data['[[Configurable]]'] + }), data); + + var both = bothDescriptor(); + t['throws']( + function () { + ES.ToPropertyDescriptor({ get: both['[[Get]]'], value: both['[[Value]]'] }); + }, + TypeError, + 'data and accessor descriptors are mutually exclusive' + ); + + t['throws']( + function () { ES.ToPropertyDescriptor({ get: 'not callable' }); }, + TypeError, + '"get" must be undefined or callable' + ); + + t['throws']( + function () { ES.ToPropertyDescriptor({ set: 'not callable' }); }, + TypeError, + '"set" must be undefined or callable' + ); + + t.end(); +}); diff --git a/node_modules/es-abstract/test/es6.js b/node_modules/es-abstract/test/es6.js new file mode 100644 index 0000000000000..e7c9d98a243fd --- /dev/null +++ b/node_modules/es-abstract/test/es6.js @@ -0,0 +1,18 @@ +'use strict'; + +var test = require('tape'); + +var ES = require('../'); +var ES6 = ES.ES6; +var ES2015 = ES.ES2015; +var ES6entry = require('../es6'); + +test('legacy es6 export', function (t) { + t.equal(ES6, ES2015, 'main ES6 === main ES2015'); + t.end(); +}); + +test('legacy es6 entry point', function (t) { + t.equal(ES6, ES6entry, 'main ES6 === ES6 entry point'); + t.end(); +}); diff --git a/node_modules/es-abstract/test/es7.js b/node_modules/es-abstract/test/es7.js new file mode 100644 index 0000000000000..ee57e153b05b8 --- /dev/null +++ b/node_modules/es-abstract/test/es7.js @@ -0,0 +1,18 @@ +'use strict'; + +var test = require('tape'); + +var ES = require('../'); +var ES7 = ES.ES7; +var ES2016 = ES.ES2016; +var ES7entry = require('../es7'); + +test('legacy es7 export', function (t) { + t.equal(ES7, ES2016, 'main ES7 === main ES2016'); + t.end(); +}); + +test('legacy es7 entry point', function (t) { + t.equal(ES7, ES7entry, 'main ES7 === ES7 entry point'); + t.end(); +}); diff --git a/node_modules/es-abstract/test/helpers/values.js b/node_modules/es-abstract/test/helpers/values.js new file mode 100644 index 0000000000000..02408dd6bde59 --- /dev/null +++ b/node_modules/es-abstract/test/helpers/values.js @@ -0,0 +1,53 @@ +'use strict'; + +var hasSymbols = typeof Symbol === 'function' && typeof Symbol.iterator === 'symbol'; + +var coercibleObject = { valueOf: function () { return 3; }, toString: function () { return 42; } }; +var valueOfOnlyObject = { valueOf: function () { return 4; }, toString: function () { return {}; } }; +var toStringOnlyObject = { valueOf: function () { return {}; }, toString: function () { return 7; } }; +var uncoercibleObject = { valueOf: function () { return {}; }, toString: function () { return {}; } }; +var objects = [{}, coercibleObject, toStringOnlyObject, valueOfOnlyObject]; +var nullPrimitives = [undefined, null]; +var nonIntegerNumbers = [-1.3, 0.2, 1.8, 1 / 3]; +var numbers = [0, -0, Infinity, -Infinity, 42]; +var strings = ['', 'foo']; +var booleans = [true, false]; +var symbols = hasSymbols ? [Symbol.iterator, Symbol('foo')] : []; +var nonSymbolPrimitives = [].concat(nullPrimitives, booleans, strings, numbers); +var nonNumberPrimitives = [].concat(nullPrimitives, booleans, strings, symbols); +var nonNullPrimitives = [].concat(booleans, strings, numbers, symbols); +var nonUndefinedPrimitives = [].concat(null, nonNullPrimitives); +var nonStrings = [].concat(nullPrimitives, booleans, numbers, symbols, objects); +var primitives = [].concat(nullPrimitives, nonNullPrimitives); +var nonPropertyKeys = [].concat(nullPrimitives, booleans, numbers, objects); +var propertyKeys = [].concat(strings, symbols); +var nonBooleans = [].concat(nullPrimitives, strings, symbols, numbers, objects); +var falsies = [].concat(nullPrimitives, false, '', 0, -0, NaN); +var truthies = [].concat(true, 'foo', 42, symbols, objects); + +module.exports = { + coercibleObject: coercibleObject, + valueOfOnlyObject: valueOfOnlyObject, + toStringOnlyObject: toStringOnlyObject, + uncoercibleObject: uncoercibleObject, + objects: objects, + nullPrimitives: nullPrimitives, + numbers: numbers, + strings: strings, + booleans: booleans, + symbols: symbols, + hasSymbols: hasSymbols, + nonSymbolPrimitives: nonSymbolPrimitives, + nonNumberPrimitives: nonNumberPrimitives, + nonNullPrimitives: nonNullPrimitives, + nonUndefinedPrimitives: nonUndefinedPrimitives, + nonStrings: nonStrings, + nonNumbers: nonNumberPrimitives.concat(objects), + nonIntegerNumbers: nonIntegerNumbers, + primitives: primitives, + nonPropertyKeys: nonPropertyKeys, + propertyKeys: propertyKeys, + nonBooleans: nonBooleans, + falsies: falsies, + truthies: truthies +}; diff --git a/node_modules/es-abstract/test/index.js b/node_modules/es-abstract/test/index.js new file mode 100644 index 0000000000000..63271ac2006c6 --- /dev/null +++ b/node_modules/es-abstract/test/index.js @@ -0,0 +1,28 @@ +'use strict'; + +var ES = require('../'); +var test = require('tape'); + +var ESkeys = Object.keys(ES).sort(); +var ES6keys = Object.keys(ES.ES6).sort(); + +test('exposed properties', function (t) { + t.deepEqual(ESkeys, ES6keys.concat(['ES2017', 'ES7', 'ES2016', 'ES6', 'ES2015', 'ES5']).sort(), 'main ES object keys match ES6 keys'); + t.end(); +}); + +test('methods match', function (t) { + ES6keys.forEach(function (key) { + t.equal(ES.ES6[key], ES[key], 'method ' + key + ' on main ES object is ES6 method'); + }); + t.end(); +}); + +require('./GetIntrinsic'); + +require('./es5'); +require('./es6'); +require('./es2015'); +require('./es7'); +require('./es2016'); +require('./es2017'); diff --git a/node_modules/es-abstract/test/tests.js b/node_modules/es-abstract/test/tests.js new file mode 100644 index 0000000000000..df52c82c7aafd --- /dev/null +++ b/node_modules/es-abstract/test/tests.js @@ -0,0 +1,1610 @@ +'use strict'; + +var test = require('tape'); + +var forEach = require('foreach'); +var is = require('object-is'); +var debug = require('object-inspect'); +var assign = require('object.assign'); + +var v = require('./helpers/values'); +var diffOps = require('./diffOps'); + +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || Math.pow(2, 53) - 1; + +var getArraySubclassWithSpeciesConstructor = function getArraySubclass(speciesConstructor) { + var Bar = function Bar() { + var inst = []; + Object.setPrototypeOf(inst, Bar.prototype); + Object.defineProperty(inst, 'constructor', { value: Bar }); + return inst; + }; + Bar.prototype = Object.create(Array.prototype); + Object.setPrototypeOf(Bar, Array); + Object.defineProperty(Bar, Symbol.species, { value: speciesConstructor }); + + return Bar; +}; + +var hasSpecies = v.hasSymbols && Symbol.species; + +var hasGroups = 'groups' in (/a/).exec('a'); +var groups = function groups(matchObject) { + return hasGroups ? assign(matchObject, { groups: matchObject.groups }) : matchObject; +}; + +var es2015 = function ES2015(ES, ops, expectedMissing) { + test('has expected operations', function (t) { + var diff = diffOps(ES, ops, expectedMissing); + + t.deepEqual(diff.extra, [], 'no extra ops'); + + t.deepEqual(diff.missing, [], 'no unexpected missing ops'); + + t.end(); + }); + + test('ToPrimitive', function (t) { + t.test('primitives', function (st) { + var testPrimitive = function (primitive) { + st.ok(is(ES.ToPrimitive(primitive), primitive), debug(primitive) + ' is returned correctly'); + }; + forEach(v.primitives, testPrimitive); + st.end(); + }); + + t.test('objects', function (st) { + st.equal(ES.ToPrimitive(v.coercibleObject), 3, 'coercibleObject with no hint coerces to valueOf'); + st.ok(is(ES.ToPrimitive({}), '[object Object]'), '{} with no hint coerces to Object#toString'); + st.equal(ES.ToPrimitive(v.coercibleObject, Number), 3, 'coercibleObject with hint Number coerces to valueOf'); + st.ok(is(ES.ToPrimitive({}, Number), '[object Object]'), '{} with hint Number coerces to NaN'); + st.equal(ES.ToPrimitive(v.coercibleObject, String), 42, 'coercibleObject with hint String coerces to nonstringified toString'); + st.equal(ES.ToPrimitive({}, String), '[object Object]', '{} with hint String coerces to Object#toString'); + st.equal(ES.ToPrimitive(v.toStringOnlyObject), 7, 'toStringOnlyObject returns non-stringified toString'); + st.equal(ES.ToPrimitive(v.valueOfOnlyObject), 4, 'valueOfOnlyObject returns valueOf'); + st['throws'](function () { return ES.ToPrimitive(v.uncoercibleObject); }, TypeError, 'uncoercibleObject throws a TypeError'); + st.end(); + }); + + t.test('dates', function (st) { + var invalid = new Date(NaN); + st.equal(ES.ToPrimitive(invalid), Date.prototype.toString.call(invalid), 'invalid Date coerces to Date#toString'); + var now = new Date(); + st.equal(ES.ToPrimitive(now), Date.prototype.toString.call(now), 'Date coerces to Date#toString'); + st.end(); + }); + + t.end(); + }); + + test('ToBoolean', function (t) { + t.equal(false, ES.ToBoolean(undefined), 'undefined coerces to false'); + t.equal(false, ES.ToBoolean(null), 'null coerces to false'); + t.equal(false, ES.ToBoolean(false), 'false returns false'); + t.equal(true, ES.ToBoolean(true), 'true returns true'); + + t.test('numbers', function (st) { + forEach([0, -0, NaN], function (falsyNumber) { + st.equal(false, ES.ToBoolean(falsyNumber), 'falsy number ' + falsyNumber + ' coerces to false'); + }); + forEach([Infinity, 42, 1, -Infinity], function (truthyNumber) { + st.equal(true, ES.ToBoolean(truthyNumber), 'truthy number ' + truthyNumber + ' coerces to true'); + }); + + st.end(); + }); + + t.equal(false, ES.ToBoolean(''), 'empty string coerces to false'); + t.equal(true, ES.ToBoolean('foo'), 'nonempty string coerces to true'); + + t.test('objects', function (st) { + forEach(v.objects, function (obj) { + st.equal(true, ES.ToBoolean(obj), 'object coerces to true'); + }); + st.equal(true, ES.ToBoolean(v.uncoercibleObject), 'uncoercibleObject coerces to true'); + + st.end(); + }); + + t.end(); + }); + + test('ToNumber', function (t) { + t.ok(is(NaN, ES.ToNumber(undefined)), 'undefined coerces to NaN'); + t.ok(is(ES.ToNumber(null), 0), 'null coerces to +0'); + t.ok(is(ES.ToNumber(false), 0), 'false coerces to +0'); + t.equal(1, ES.ToNumber(true), 'true coerces to 1'); + + t.test('numbers', function (st) { + st.ok(is(NaN, ES.ToNumber(NaN)), 'NaN returns itself'); + forEach([0, -0, 42, Infinity, -Infinity], function (num) { + st.equal(num, ES.ToNumber(num), num + ' returns itself'); + }); + forEach(['foo', '0', '4a', '2.0', 'Infinity', '-Infinity'], function (numString) { + st.ok(is(+numString, ES.ToNumber(numString)), '"' + numString + '" coerces to ' + Number(numString)); + }); + st.end(); + }); + + t.test('objects', function (st) { + forEach(v.objects, function (object) { + st.ok(is(ES.ToNumber(object), ES.ToNumber(ES.ToPrimitive(object))), 'object ' + object + ' coerces to same as ToPrimitive of object does'); + }); + st['throws'](function () { return ES.ToNumber(v.uncoercibleObject); }, TypeError, 'uncoercibleObject throws'); + st.end(); + }); + + t.test('binary literals', function (st) { + st.equal(ES.ToNumber('0b10'), 2, '0b10 is 2'); + st.equal(ES.ToNumber({ toString: function () { return '0b11'; } }), 3, 'Object that toStrings to 0b11 is 3'); + + st.equal(true, is(ES.ToNumber('0b12'), NaN), '0b12 is NaN'); + st.equal(true, is(ES.ToNumber({ toString: function () { return '0b112'; } }), NaN), 'Object that toStrings to 0b112 is NaN'); + st.end(); + }); + + t.test('octal literals', function (st) { + st.equal(ES.ToNumber('0o10'), 8, '0o10 is 8'); + st.equal(ES.ToNumber({ toString: function () { return '0o11'; } }), 9, 'Object that toStrings to 0o11 is 9'); + + st.equal(true, is(ES.ToNumber('0o18'), NaN), '0o18 is NaN'); + st.equal(true, is(ES.ToNumber({ toString: function () { return '0o118'; } }), NaN), 'Object that toStrings to 0o118 is NaN'); + st.end(); + }); + + t.test('signed hex numbers', function (st) { + st.equal(true, is(ES.ToNumber('-0xF'), NaN), '-0xF is NaN'); + st.equal(true, is(ES.ToNumber(' -0xF '), NaN), 'space-padded -0xF is NaN'); + st.equal(true, is(ES.ToNumber('+0xF'), NaN), '+0xF is NaN'); + st.equal(true, is(ES.ToNumber(' +0xF '), NaN), 'space-padded +0xF is NaN'); + + st.end(); + }); + + t.test('trimming of whitespace and non-whitespace characters', function (st) { + var whitespace = ' \t\x0b\f\xa0\ufeff\n\r\u2028\u2029\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000'; + st.equal(0, ES.ToNumber(whitespace + 0 + whitespace), 'whitespace is trimmed'); + + // Zero-width space (zws), next line character (nel), and non-character (bom) are not whitespace. + var nonWhitespaces = { + '\\u0085': '\u0085', + '\\u200b': '\u200b', + '\\ufffe': '\ufffe' + }; + + forEach(nonWhitespaces, function (desc, nonWS) { + st.equal(true, is(ES.ToNumber(nonWS + 0 + nonWS), NaN), 'non-whitespace ' + desc + ' not trimmed'); + }); + + st.end(); + }); + + forEach(v.symbols, function (symbol) { + t['throws']( + function () { ES.ToNumber(symbol); }, + TypeError, + 'Symbols can’t be converted to a Number: ' + debug(symbol) + ); + }); + + t.test('dates', function (st) { + var invalid = new Date(NaN); + st.ok(is(ES.ToNumber(invalid), NaN), 'invalid Date coerces to NaN'); + var now = Date.now(); + st.equal(ES.ToNumber(new Date(now)), now, 'Date coerces to timestamp'); + st.end(); + }); + + t.end(); + }); + + test('ToInteger', function (t) { + t.ok(is(0, ES.ToInteger(NaN)), 'NaN coerces to +0'); + forEach([0, Infinity, 42], function (num) { + t.ok(is(num, ES.ToInteger(num)), num + ' returns itself'); + t.ok(is(-num, ES.ToInteger(-num)), '-' + num + ' returns itself'); + }); + t.equal(3, ES.ToInteger(Math.PI), 'pi returns 3'); + t['throws'](function () { return ES.ToInteger(v.uncoercibleObject); }, TypeError, 'uncoercibleObject throws'); + t.end(); + }); + + test('ToInt32', function (t) { + t.ok(is(0, ES.ToInt32(NaN)), 'NaN coerces to +0'); + forEach([0, Infinity], function (num) { + t.ok(is(0, ES.ToInt32(num)), num + ' returns +0'); + t.ok(is(0, ES.ToInt32(-num)), '-' + num + ' returns +0'); + }); + t['throws'](function () { return ES.ToInt32(v.uncoercibleObject); }, TypeError, 'uncoercibleObject throws'); + t.ok(is(ES.ToInt32(0x100000000), 0), '2^32 returns +0'); + t.ok(is(ES.ToInt32(0x100000000 - 1), -1), '2^32 - 1 returns -1'); + t.ok(is(ES.ToInt32(0x80000000), -0x80000000), '2^31 returns -2^31'); + t.ok(is(ES.ToInt32(0x80000000 - 1), 0x80000000 - 1), '2^31 - 1 returns 2^31 - 1'); + forEach([0, Infinity, NaN, 0x100000000, 0x80000000, 0x10000, 0x42], function (num) { + t.ok(is(ES.ToInt32(num), ES.ToInt32(ES.ToUint32(num))), 'ToInt32(x) === ToInt32(ToUint32(x)) for 0x' + num.toString(16)); + t.ok(is(ES.ToInt32(-num), ES.ToInt32(ES.ToUint32(-num))), 'ToInt32(x) === ToInt32(ToUint32(x)) for -0x' + num.toString(16)); + }); + t.end(); + }); + + test('ToUint32', function (t) { + t.ok(is(0, ES.ToUint32(NaN)), 'NaN coerces to +0'); + forEach([0, Infinity], function (num) { + t.ok(is(0, ES.ToUint32(num)), num + ' returns +0'); + t.ok(is(0, ES.ToUint32(-num)), '-' + num + ' returns +0'); + }); + t['throws'](function () { return ES.ToUint32(v.uncoercibleObject); }, TypeError, 'uncoercibleObject throws'); + t.ok(is(ES.ToUint32(0x100000000), 0), '2^32 returns +0'); + t.ok(is(ES.ToUint32(0x100000000 - 1), 0x100000000 - 1), '2^32 - 1 returns 2^32 - 1'); + t.ok(is(ES.ToUint32(0x80000000), 0x80000000), '2^31 returns 2^31'); + t.ok(is(ES.ToUint32(0x80000000 - 1), 0x80000000 - 1), '2^31 - 1 returns 2^31 - 1'); + forEach([0, Infinity, NaN, 0x100000000, 0x80000000, 0x10000, 0x42], function (num) { + t.ok(is(ES.ToUint32(num), ES.ToUint32(ES.ToInt32(num))), 'ToUint32(x) === ToUint32(ToInt32(x)) for 0x' + num.toString(16)); + t.ok(is(ES.ToUint32(-num), ES.ToUint32(ES.ToInt32(-num))), 'ToUint32(x) === ToUint32(ToInt32(x)) for -0x' + num.toString(16)); + }); + t.end(); + }); + + test('ToInt16', function (t) { + t.ok(is(0, ES.ToInt16(NaN)), 'NaN coerces to +0'); + forEach([0, Infinity], function (num) { + t.ok(is(0, ES.ToInt16(num)), num + ' returns +0'); + t.ok(is(0, ES.ToInt16(-num)), '-' + num + ' returns +0'); + }); + t['throws'](function () { return ES.ToInt16(v.uncoercibleObject); }, TypeError, 'uncoercibleObject throws'); + t.ok(is(ES.ToInt16(0x100000000), 0), '2^32 returns +0'); + t.ok(is(ES.ToInt16(0x100000000 - 1), -1), '2^32 - 1 returns -1'); + t.ok(is(ES.ToInt16(0x80000000), 0), '2^31 returns +0'); + t.ok(is(ES.ToInt16(0x80000000 - 1), -1), '2^31 - 1 returns -1'); + t.ok(is(ES.ToInt16(0x10000), 0), '2^16 returns +0'); + t.ok(is(ES.ToInt16(0x10000 - 1), -1), '2^16 - 1 returns -1'); + t.end(); + }); + + test('ToUint16', function (t) { + t.ok(is(0, ES.ToUint16(NaN)), 'NaN coerces to +0'); + forEach([0, Infinity], function (num) { + t.ok(is(0, ES.ToUint16(num)), num + ' returns +0'); + t.ok(is(0, ES.ToUint16(-num)), '-' + num + ' returns +0'); + }); + t['throws'](function () { return ES.ToUint16(v.uncoercibleObject); }, TypeError, 'uncoercibleObject throws'); + t.ok(is(ES.ToUint16(0x100000000), 0), '2^32 returns +0'); + t.ok(is(ES.ToUint16(0x100000000 - 1), 0x10000 - 1), '2^32 - 1 returns 2^16 - 1'); + t.ok(is(ES.ToUint16(0x80000000), 0), '2^31 returns +0'); + t.ok(is(ES.ToUint16(0x80000000 - 1), 0x10000 - 1), '2^31 - 1 returns 2^16 - 1'); + t.ok(is(ES.ToUint16(0x10000), 0), '2^16 returns +0'); + t.ok(is(ES.ToUint16(0x10000 - 1), 0x10000 - 1), '2^16 - 1 returns 2^16 - 1'); + t.end(); + }); + + test('ToInt8', function (t) { + t.ok(is(0, ES.ToInt8(NaN)), 'NaN coerces to +0'); + forEach([0, Infinity], function (num) { + t.ok(is(0, ES.ToInt8(num)), num + ' returns +0'); + t.ok(is(0, ES.ToInt8(-num)), '-' + num + ' returns +0'); + }); + t['throws'](function () { return ES.ToInt8(v.uncoercibleObject); }, TypeError, 'uncoercibleObject throws'); + t.ok(is(ES.ToInt8(0x100000000), 0), '2^32 returns +0'); + t.ok(is(ES.ToInt8(0x100000000 - 1), -1), '2^32 - 1 returns -1'); + t.ok(is(ES.ToInt8(0x80000000), 0), '2^31 returns +0'); + t.ok(is(ES.ToInt8(0x80000000 - 1), -1), '2^31 - 1 returns -1'); + t.ok(is(ES.ToInt8(0x10000), 0), '2^16 returns +0'); + t.ok(is(ES.ToInt8(0x10000 - 1), -1), '2^16 - 1 returns -1'); + t.ok(is(ES.ToInt8(0x100), 0), '2^8 returns +0'); + t.ok(is(ES.ToInt8(0x100 - 1), -1), '2^8 - 1 returns -1'); + t.ok(is(ES.ToInt8(0x10), 0x10), '2^4 returns 2^4'); + t.end(); + }); + + test('ToUint8', function (t) { + t.ok(is(0, ES.ToUint8(NaN)), 'NaN coerces to +0'); + forEach([0, Infinity], function (num) { + t.ok(is(0, ES.ToUint8(num)), num + ' returns +0'); + t.ok(is(0, ES.ToUint8(-num)), '-' + num + ' returns +0'); + }); + t['throws'](function () { return ES.ToUint8(v.uncoercibleObject); }, TypeError, 'uncoercibleObject throws'); + t.ok(is(ES.ToUint8(0x100000000), 0), '2^32 returns +0'); + t.ok(is(ES.ToUint8(0x100000000 - 1), 0x100 - 1), '2^32 - 1 returns 2^8 - 1'); + t.ok(is(ES.ToUint8(0x80000000), 0), '2^31 returns +0'); + t.ok(is(ES.ToUint8(0x80000000 - 1), 0x100 - 1), '2^31 - 1 returns 2^8 - 1'); + t.ok(is(ES.ToUint8(0x10000), 0), '2^16 returns +0'); + t.ok(is(ES.ToUint8(0x10000 - 1), 0x100 - 1), '2^16 - 1 returns 2^8 - 1'); + t.ok(is(ES.ToUint8(0x100), 0), '2^8 returns +0'); + t.ok(is(ES.ToUint8(0x100 - 1), 0x100 - 1), '2^8 - 1 returns 2^16 - 1'); + t.ok(is(ES.ToUint8(0x10), 0x10), '2^4 returns 2^4'); + t.ok(is(ES.ToUint8(0x10 - 1), 0x10 - 1), '2^4 - 1 returns 2^4 - 1'); + t.end(); + }); + + test('ToUint8Clamp', function (t) { + t.ok(is(0, ES.ToUint8Clamp(NaN)), 'NaN coerces to +0'); + t.ok(is(0, ES.ToUint8Clamp(0)), '+0 returns +0'); + t.ok(is(0, ES.ToUint8Clamp(-0)), '-0 returns +0'); + t.ok(is(0, ES.ToUint8Clamp(-Infinity)), '-Infinity returns +0'); + t['throws'](function () { return ES.ToUint8Clamp(v.uncoercibleObject); }, TypeError, 'uncoercibleObject throws'); + forEach([255, 256, 0x100000, Infinity], function (number) { + t.ok(is(255, ES.ToUint8Clamp(number)), number + ' coerces to 255'); + }); + t.equal(1, ES.ToUint8Clamp(1.49), '1.49 coerces to 1'); + t.equal(2, ES.ToUint8Clamp(1.5), '1.5 coerces to 2, because 2 is even'); + t.equal(2, ES.ToUint8Clamp(1.51), '1.51 coerces to 2'); + + t.equal(2, ES.ToUint8Clamp(2.49), '2.49 coerces to 2'); + t.equal(2, ES.ToUint8Clamp(2.5), '2.5 coerces to 2, because 2 is even'); + t.equal(3, ES.ToUint8Clamp(2.51), '2.51 coerces to 3'); + t.end(); + }); + + test('ToString', function (t) { + forEach(v.objects.concat(v.nonSymbolPrimitives), function (item) { + t.equal(ES.ToString(item), String(item), 'ES.ToString(' + debug(item) + ') ToStrings to String(' + debug(item) + ')'); + }); + + t['throws'](function () { return ES.ToString(v.uncoercibleObject); }, TypeError, 'uncoercibleObject throws'); + + forEach(v.symbols, function (symbol) { + t['throws'](function () { return ES.ToString(symbol); }, TypeError, debug(symbol) + ' throws'); + }); + t.end(); + }); + + test('ToObject', function (t) { + t['throws'](function () { return ES.ToObject(undefined); }, TypeError, 'undefined throws'); + t['throws'](function () { return ES.ToObject(null); }, TypeError, 'null throws'); + forEach(v.numbers, function (number) { + var obj = ES.ToObject(number); + t.equal(typeof obj, 'object', 'number ' + number + ' coerces to object'); + t.equal(true, obj instanceof Number, 'object of ' + number + ' is Number object'); + t.ok(is(obj.valueOf(), number), 'object of ' + number + ' coerces to ' + number); + }); + t.end(); + }); + + test('RequireObjectCoercible', function (t) { + t.equal(false, 'CheckObjectCoercible' in ES, 'CheckObjectCoercible -> RequireObjectCoercible in ES6'); + t['throws'](function () { return ES.RequireObjectCoercible(undefined); }, TypeError, 'undefined throws'); + t['throws'](function () { return ES.RequireObjectCoercible(null); }, TypeError, 'null throws'); + var isCoercible = function (value) { + t.doesNotThrow(function () { return ES.RequireObjectCoercible(value); }, debug(value) + ' does not throw'); + }; + forEach(v.objects.concat(v.nonNullPrimitives), isCoercible); + t.end(); + }); + + test('IsCallable', function (t) { + t.equal(true, ES.IsCallable(function () {}), 'function is callable'); + var nonCallables = [/a/g, {}, Object.prototype, NaN].concat(v.primitives); + forEach(nonCallables, function (nonCallable) { + t.equal(false, ES.IsCallable(nonCallable), debug(nonCallable) + ' is not callable'); + }); + t.end(); + }); + + test('SameValue', function (t) { + t.equal(true, ES.SameValue(NaN, NaN), 'NaN is SameValue as NaN'); + t.equal(false, ES.SameValue(0, -0), '+0 is not SameValue as -0'); + forEach(v.objects.concat(v.primitives), function (val) { + t.equal(val === val, ES.SameValue(val, val), debug(val) + ' is SameValue to itself'); + }); + t.end(); + }); + + test('SameValueZero', function (t) { + t.equal(true, ES.SameValueZero(NaN, NaN), 'NaN is SameValueZero as NaN'); + t.equal(true, ES.SameValueZero(0, -0), '+0 is SameValueZero as -0'); + forEach(v.objects.concat(v.primitives), function (val) { + t.equal(val === val, ES.SameValueZero(val, val), debug(val) + ' is SameValueZero to itself'); + }); + t.end(); + }); + + test('ToPropertyKey', function (t) { + forEach(v.objects.concat(v.nonSymbolPrimitives), function (value) { + t.equal(ES.ToPropertyKey(value), String(value), 'ToPropertyKey(value) === String(value) for non-Symbols'); + }); + + forEach(v.symbols, function (symbol) { + t.equal( + ES.ToPropertyKey(symbol), + symbol, + 'ToPropertyKey(' + debug(symbol) + ') === ' + debug(symbol) + ); + t.equal( + ES.ToPropertyKey(Object(symbol)), + symbol, + 'ToPropertyKey(' + debug(Object(symbol)) + ') === ' + debug(symbol) + ); + }); + + t.end(); + }); + + test('ToLength', function (t) { + t['throws'](function () { return ES.ToLength(v.uncoercibleObject); }, TypeError, 'uncoercibleObject throws a TypeError'); + t.equal(3, ES.ToLength(v.coercibleObject), 'coercibleObject coerces to 3'); + t.equal(42, ES.ToLength('42.5'), '"42.5" coerces to 42'); + t.equal(7, ES.ToLength(7.3), '7.3 coerces to 7'); + forEach([-0, -1, -42, -Infinity], function (negative) { + t.ok(is(0, ES.ToLength(negative)), negative + ' coerces to +0'); + }); + t.equal(MAX_SAFE_INTEGER, ES.ToLength(MAX_SAFE_INTEGER + 1), '2^53 coerces to 2^53 - 1'); + t.equal(MAX_SAFE_INTEGER, ES.ToLength(MAX_SAFE_INTEGER + 3), '2^53 + 2 coerces to 2^53 - 1'); + t.end(); + }); + + test('IsArray', function (t) { + t.equal(true, ES.IsArray([]), '[] is array'); + t.equal(false, ES.IsArray({}), '{} is not array'); + t.equal(false, ES.IsArray({ length: 1, 0: true }), 'arraylike object is not array'); + forEach(v.objects.concat(v.primitives), function (value) { + t.equal(false, ES.IsArray(value), debug(value) + ' is not array'); + }); + t.end(); + }); + + test('IsRegExp', function (t) { + forEach([/a/g, new RegExp('a', 'g')], function (regex) { + t.equal(true, ES.IsRegExp(regex), regex + ' is regex'); + }); + + forEach(v.objects.concat(v.primitives), function (nonRegex) { + t.equal(false, ES.IsRegExp(nonRegex), debug(nonRegex) + ' is not regex'); + }); + + t.test('Symbol.match', { skip: !v.hasSymbols || !Symbol.match }, function (st) { + var obj = {}; + obj[Symbol.match] = true; + st.equal(true, ES.IsRegExp(obj), 'object with truthy Symbol.match is regex'); + + var regex = /a/; + regex[Symbol.match] = false; + st.equal(false, ES.IsRegExp(regex), 'regex with falsy Symbol.match is not regex'); + + st.end(); + }); + + t.end(); + }); + + test('IsPropertyKey', function (t) { + forEach(v.numbers.concat(v.objects), function (notKey) { + t.equal(false, ES.IsPropertyKey(notKey), debug(notKey) + ' is not property key'); + }); + + t.equal(true, ES.IsPropertyKey('foo'), 'string is property key'); + + forEach(v.symbols, function (symbol) { + t.equal(true, ES.IsPropertyKey(symbol), debug(symbol) + ' is property key'); + }); + t.end(); + }); + + test('IsInteger', function (t) { + for (var i = -100; i < 100; i += 10) { + t.equal(true, ES.IsInteger(i), i + ' is integer'); + t.equal(false, ES.IsInteger(i + 0.2), (i + 0.2) + ' is not integer'); + } + t.equal(true, ES.IsInteger(-0), '-0 is integer'); + var notInts = v.nonNumbers.concat(v.nonIntegerNumbers, [Infinity, -Infinity, NaN, [], new Date()]); + forEach(notInts, function (notInt) { + t.equal(false, ES.IsInteger(notInt), debug(notInt) + ' is not integer'); + }); + t.equal(false, ES.IsInteger(v.uncoercibleObject), 'uncoercibleObject is not integer'); + t.end(); + }); + + test('IsExtensible', function (t) { + forEach(v.objects, function (object) { + t.equal(true, ES.IsExtensible(object), debug(object) + ' object is extensible'); + }); + forEach(v.primitives, function (primitive) { + t.equal(false, ES.IsExtensible(primitive), debug(primitive) + ' is not extensible'); + }); + if (Object.preventExtensions) { + t.equal(false, ES.IsExtensible(Object.preventExtensions({})), 'object with extensions prevented is not extensible'); + } + t.end(); + }); + + test('CanonicalNumericIndexString', function (t) { + var throwsOnNonString = function (notString) { + t['throws']( + function () { return ES.CanonicalNumericIndexString(notString); }, + TypeError, + debug(notString) + ' is not a string' + ); + }; + forEach(v.objects.concat(v.numbers), throwsOnNonString); + t.ok(is(-0, ES.CanonicalNumericIndexString('-0')), '"-0" returns -0'); + for (var i = -50; i < 50; i += 10) { + t.equal(i, ES.CanonicalNumericIndexString(String(i)), '"' + i + '" returns ' + i); + t.equal(undefined, ES.CanonicalNumericIndexString(String(i) + 'a'), '"' + i + 'a" returns undefined'); + } + t.end(); + }); + + test('IsConstructor', function (t) { + t.equal(true, ES.IsConstructor(function () {}), 'function is constructor'); + t.equal(false, ES.IsConstructor(/a/g), 'regex is not constructor'); + forEach(v.objects, function (object) { + t.equal(false, ES.IsConstructor(object), object + ' object is not constructor'); + }); + + try { + var foo = Function('return class Foo {}')(); // eslint-disable-line no-new-func + t.equal(ES.IsConstructor(foo), true, 'class is constructor'); + } catch (e) { + t.comment('SKIP: class syntax not supported.'); + } + t.end(); + }); + + test('Call', function (t) { + var receiver = {}; + var notFuncs = v.objects.concat(v.primitives).concat([/a/g, new RegExp('a', 'g')]); + t.plan(notFuncs.length + 4); + var throwsIfNotCallable = function (notFunc) { + t['throws']( + function () { return ES.Call(notFunc, receiver); }, + TypeError, + debug(notFunc) + ' (' + typeof notFunc + ') is not callable' + ); + }; + forEach(notFuncs, throwsIfNotCallable); + ES.Call(function (a, b) { + t.equal(this, receiver, 'context matches expected'); + t.deepEqual([a, b], [1, 2], 'named args are correct'); + t.equal(arguments.length, 3, 'extra argument was passed'); + t.equal(arguments[2], 3, 'extra argument was correct'); + }, receiver, [1, 2, 3]); + t.end(); + }); + + test('GetV', function (t) { + t['throws'](function () { return ES.GetV({ 7: 7 }, 7); }, TypeError, 'Throws a TypeError if `P` is not a property key'); + var obj = { a: function () {} }; + t.equal(ES.GetV(obj, 'a'), obj.a, 'returns property if it exists'); + t.equal(ES.GetV(obj, 'b'), undefined, 'returns undefiend if property does not exist'); + t.end(); + }); + + test('GetMethod', function (t) { + t['throws'](function () { return ES.GetMethod({ 7: 7 }, 7); }, TypeError, 'Throws a TypeError if `P` is not a property key'); + t.equal(ES.GetMethod({}, 'a'), undefined, 'returns undefined in property is undefined'); + t.equal(ES.GetMethod({ a: null }, 'a'), undefined, 'returns undefined if property is null'); + t.equal(ES.GetMethod({ a: undefined }, 'a'), undefined, 'returns undefined if property is undefined'); + var obj = { a: function () {} }; + t['throws'](function () { ES.GetMethod({ a: 'b' }, 'a'); }, TypeError, 'throws TypeError if property exists and is not callable'); + t.equal(ES.GetMethod(obj, 'a'), obj.a, 'returns property if it is callable'); + t.end(); + }); + + test('Get', function (t) { + t['throws'](function () { return ES.Get('a', 'a'); }, TypeError, 'Throws a TypeError if `O` is not an Object'); + t['throws'](function () { return ES.Get({ 7: 7 }, 7); }, TypeError, 'Throws a TypeError if `P` is not a property key'); + + var value = {}; + t.test('Symbols', { skip: !v.hasSymbols }, function (st) { + var sym = Symbol('sym'); + var obj = {}; + obj[sym] = value; + st.equal(ES.Get(obj, sym), value, 'returns property `P` if it exists on object `O`'); + st.end(); + }); + t.equal(ES.Get({ a: value }, 'a'), value, 'returns property `P` if it exists on object `O`'); + t.end(); + }); + + test('Type', { skip: !v.hasSymbols }, function (t) { + t.equal(ES.Type(Symbol.iterator), 'Symbol', 'Type(Symbol.iterator) is Symbol'); + t.end(); + }); + + test('SpeciesConstructor', function (t) { + t['throws'](function () { ES.SpeciesConstructor(null); }, TypeError); + t['throws'](function () { ES.SpeciesConstructor(undefined); }, TypeError); + + var defaultConstructor = function Foo() {}; + + t.equal( + ES.SpeciesConstructor({ constructor: undefined }, defaultConstructor), + defaultConstructor, + 'undefined constructor returns defaultConstructor' + ); + + t['throws']( + function () { return ES.SpeciesConstructor({ constructor: null }, defaultConstructor); }, + TypeError, + 'non-undefined non-object constructor throws' + ); + + t.test('with Symbol.species', { skip: !hasSpecies }, function (st) { + var Bar = function Bar() {}; + Bar[Symbol.species] = null; + + st.equal( + ES.SpeciesConstructor(new Bar(), defaultConstructor), + defaultConstructor, + 'undefined/null Symbol.species returns default constructor' + ); + + var Baz = function Baz() {}; + Baz[Symbol.species] = Bar; + st.equal( + ES.SpeciesConstructor(new Baz(), defaultConstructor), + Bar, + 'returns Symbol.species constructor value' + ); + + Baz[Symbol.species] = {}; + st['throws']( + function () { ES.SpeciesConstructor(new Baz(), defaultConstructor); }, + TypeError, + 'throws when non-constructor non-null non-undefined species value found' + ); + + st.end(); + }); + + t.end(); + }); + + var bothDescriptor = function () { + return { '[[Get]]': function () {}, '[[Value]]': true }; + }; + var accessorDescriptor = function () { + return { + '[[Get]]': function () {}, + '[[Enumerable]]': true, + '[[Configurable]]': true + }; + }; + var mutatorDescriptor = function () { + return { + '[[Set]]': function () {}, + '[[Enumerable]]': true, + '[[Configurable]]': true + }; + }; + var dataDescriptor = function () { + return { + '[[Value]]': 42, + '[[Writable]]': false + }; + }; + var genericDescriptor = function () { + return { + '[[Configurable]]': true, + '[[Enumerable]]': false + }; + }; + + test('IsPropertyDescriptor', function (t) { + forEach(v.nonUndefinedPrimitives, function (primitive) { + t.equal( + ES.IsPropertyDescriptor(primitive), + false, + debug(primitive) + ' is not a Property Descriptor' + ); + }); + + t.equal(ES.IsPropertyDescriptor({ invalid: true }), false, 'invalid keys not allowed on a Property Descriptor'); + + t.equal(ES.IsPropertyDescriptor({}), true, 'empty object is an incomplete Property Descriptor'); + + t.equal(ES.IsPropertyDescriptor(accessorDescriptor()), true, 'accessor descriptor is a Property Descriptor'); + t.equal(ES.IsPropertyDescriptor(mutatorDescriptor()), true, 'mutator descriptor is a Property Descriptor'); + t.equal(ES.IsPropertyDescriptor(dataDescriptor()), true, 'data descriptor is a Property Descriptor'); + t.equal(ES.IsPropertyDescriptor(genericDescriptor()), true, 'generic descriptor is a Property Descriptor'); + + t['throws'](function () { + ES.IsPropertyDescriptor(bothDescriptor()); + }, TypeError, 'a Property Descriptor can not be both a Data and an Accessor Descriptor'); + + t.end(); + }); + + test('IsAccessorDescriptor', function (t) { + forEach(v.nonUndefinedPrimitives, function (primitive) { + t['throws']( + function () { ES.IsAccessorDescriptor(primitive); }, + TypeError, + debug(primitive) + ' is not a Property Descriptor' + ); + }); + + t.equal(ES.IsAccessorDescriptor(), false, 'no value is not an Accessor Descriptor'); + t.equal(ES.IsAccessorDescriptor(undefined), false, 'undefined value is not an Accessor Descriptor'); + + t.equal(ES.IsAccessorDescriptor(accessorDescriptor()), true, 'accessor descriptor is an Accessor Descriptor'); + t.equal(ES.IsAccessorDescriptor(mutatorDescriptor()), true, 'mutator descriptor is an Accessor Descriptor'); + t.equal(ES.IsAccessorDescriptor(dataDescriptor()), false, 'data descriptor is not an Accessor Descriptor'); + t.equal(ES.IsAccessorDescriptor(genericDescriptor()), false, 'generic descriptor is not an Accessor Descriptor'); + + t.end(); + }); + + test('IsDataDescriptor', function (t) { + forEach(v.nonUndefinedPrimitives, function (primitive) { + t['throws']( + function () { ES.IsDataDescriptor(primitive); }, + TypeError, + debug(primitive) + ' is not a Property Descriptor' + ); + }); + + t.equal(ES.IsDataDescriptor(), false, 'no value is not a Data Descriptor'); + t.equal(ES.IsDataDescriptor(undefined), false, 'undefined value is not a Data Descriptor'); + + t.equal(ES.IsDataDescriptor(accessorDescriptor()), false, 'accessor descriptor is not a Data Descriptor'); + t.equal(ES.IsDataDescriptor(mutatorDescriptor()), false, 'mutator descriptor is not a Data Descriptor'); + t.equal(ES.IsDataDescriptor(dataDescriptor()), true, 'data descriptor is a Data Descriptor'); + t.equal(ES.IsDataDescriptor(genericDescriptor()), false, 'generic descriptor is not a Data Descriptor'); + + t.end(); + }); + + test('IsGenericDescriptor', function (t) { + forEach(v.nonUndefinedPrimitives, function (primitive) { + t['throws']( + function () { ES.IsGenericDescriptor(primitive); }, + TypeError, + debug(primitive) + ' is not a Property Descriptor' + ); + }); + + t.equal(ES.IsGenericDescriptor(), false, 'no value is not a Data Descriptor'); + t.equal(ES.IsGenericDescriptor(undefined), false, 'undefined value is not a Data Descriptor'); + + t.equal(ES.IsGenericDescriptor(accessorDescriptor()), false, 'accessor descriptor is not a generic Descriptor'); + t.equal(ES.IsGenericDescriptor(mutatorDescriptor()), false, 'mutator descriptor is not a generic Descriptor'); + t.equal(ES.IsGenericDescriptor(dataDescriptor()), false, 'data descriptor is not a generic Descriptor'); + + t.equal(ES.IsGenericDescriptor(genericDescriptor()), true, 'generic descriptor is a generic Descriptor'); + + t.end(); + }); + + test('FromPropertyDescriptor', function (t) { + t.equal(ES.FromPropertyDescriptor(), undefined, 'no value begets undefined'); + t.equal(ES.FromPropertyDescriptor(undefined), undefined, 'undefined value begets undefined'); + + forEach(v.nonUndefinedPrimitives, function (primitive) { + t['throws']( + function () { ES.FromPropertyDescriptor(primitive); }, + TypeError, + debug(primitive) + ' is not a Property Descriptor' + ); + }); + + var accessor = accessorDescriptor(); + t.deepEqual(ES.FromPropertyDescriptor(accessor), { + get: accessor['[[Get]]'], + set: accessor['[[Set]]'], + enumerable: !!accessor['[[Enumerable]]'], + configurable: !!accessor['[[Configurable]]'] + }); + + var mutator = mutatorDescriptor(); + t.deepEqual(ES.FromPropertyDescriptor(mutator), { + get: mutator['[[Get]]'], + set: mutator['[[Set]]'], + enumerable: !!mutator['[[Enumerable]]'], + configurable: !!mutator['[[Configurable]]'] + }); + var data = dataDescriptor(); + t.deepEqual(ES.FromPropertyDescriptor(data), { + value: data['[[Value]]'], + writable: data['[[Writable]]'], + enumerable: !!data['[[Enumerable]]'], + configurable: !!data['[[Configurable]]'] + }); + + t['throws']( + function () { ES.FromPropertyDescriptor(genericDescriptor()); }, + TypeError, + 'a complete Property Descriptor is required' + ); + + t.end(); + }); + + test('ToPropertyDescriptor', function (t) { + forEach(v.nonUndefinedPrimitives, function (primitive) { + t['throws']( + function () { ES.ToPropertyDescriptor(primitive); }, + TypeError, + debug(primitive) + ' is not an Object' + ); + }); + + var accessor = accessorDescriptor(); + t.deepEqual(ES.ToPropertyDescriptor({ + get: accessor['[[Get]]'], + enumerable: !!accessor['[[Enumerable]]'], + configurable: !!accessor['[[Configurable]]'] + }), accessor); + + var mutator = mutatorDescriptor(); + t.deepEqual(ES.ToPropertyDescriptor({ + set: mutator['[[Set]]'], + enumerable: !!mutator['[[Enumerable]]'], + configurable: !!mutator['[[Configurable]]'] + }), mutator); + + var data = dataDescriptor(); + t.deepEqual(ES.ToPropertyDescriptor({ + value: data['[[Value]]'], + writable: data['[[Writable]]'], + configurable: !!data['[[Configurable]]'] + }), assign(data, { '[[Configurable]]': false })); + + var both = bothDescriptor(); + t['throws']( + function () { + ES.FromPropertyDescriptor({ get: both['[[Get]]'], value: both['[[Value]]'] }); + }, + TypeError, + 'data and accessor descriptors are mutually exclusive' + ); + + t.end(); + }); + + test('CompletePropertyDescriptor', function (t) { + forEach(v.nonUndefinedPrimitives, function (primitive) { + t['throws']( + function () { ES.CompletePropertyDescriptor(primitive); }, + TypeError, + debug(primitive) + ' is not a Property Descriptor' + ); + }); + + var generic = genericDescriptor(); + t.deepEqual(ES.CompletePropertyDescriptor(generic), { + '[[Configurable]]': !!generic['[[Configurable]]'], + '[[Enumerable]]': !!generic['[[Enumerable]]'], + '[[Value]]': undefined, + '[[Writable]]': false + }, 'completes a Generic Descriptor'); + + var data = dataDescriptor(); + t.deepEqual(ES.CompletePropertyDescriptor(data), { + '[[Configurable]]': !!data['[[Configurable]]'], + '[[Enumerable]]': false, + '[[Value]]': data['[[Value]]'], + '[[Writable]]': !!data['[[Writable]]'] + }, 'completes a Data Descriptor'); + + var accessor = accessorDescriptor(); + t.deepEqual(ES.CompletePropertyDescriptor(accessor), { + '[[Get]]': accessor['[[Get]]'], + '[[Enumerable]]': !!accessor['[[Enumerable]]'], + '[[Configurable]]': !!accessor['[[Configurable]]'], + '[[Set]]': undefined + }, 'completes an Accessor Descriptor'); + + var mutator = mutatorDescriptor(); + t.deepEqual(ES.CompletePropertyDescriptor(mutator), { + '[[Set]]': mutator['[[Set]]'], + '[[Enumerable]]': !!mutator['[[Enumerable]]'], + '[[Configurable]]': !!mutator['[[Configurable]]'], + '[[Get]]': undefined + }, 'completes a mutator Descriptor'); + + t['throws']( + function () { ES.CompletePropertyDescriptor(bothDescriptor()); }, + TypeError, + 'data and accessor descriptors are mutually exclusive' + ); + + t.end(); + }); + + test('Set', function (t) { + forEach(v.primitives, function (primitive) { + t['throws']( + function () { ES.Set(primitive, '', null, false); }, + TypeError, + debug(primitive) + ' is not an Object' + ); + }); + + forEach(v.nonPropertyKeys, function (nonKey) { + t['throws']( + function () { ES.Set({}, nonKey, null, false); }, + TypeError, + debug(nonKey) + ' is not a Property Key' + ); + }); + + forEach(v.nonBooleans, function (nonBoolean) { + t['throws']( + function () { ES.Set({}, '', null, nonBoolean); }, + TypeError, + debug(nonBoolean) + ' is not a Boolean' + ); + }); + + var o = {}; + var value = {}; + ES.Set(o, 'key', value, true); + t.deepEqual(o, { key: value }, 'key is set'); + + t.test('nonwritable', { skip: !Object.defineProperty }, function (st) { + var obj = { a: value }; + Object.defineProperty(obj, 'a', { writable: false }); + + st['throws']( + function () { ES.Set(obj, 'a', value, true); }, + TypeError, + 'can not Set nonwritable property' + ); + + st.doesNotThrow( + function () { ES.Set(obj, 'a', value, false); }, + 'setting Throw to false prevents an exception' + ); + + st.end(); + }); + + t.test('nonconfigurable', { skip: !Object.defineProperty }, function (st) { + var obj = { a: value }; + Object.defineProperty(obj, 'a', { configurable: false }); + + ES.Set(obj, 'a', value, true); + st.deepEqual(obj, { a: value }, 'key is set'); + + st.end(); + }); + + t.end(); + }); + + test('HasOwnProperty', function (t) { + forEach(v.primitives, function (primitive) { + t['throws']( + function () { ES.HasOwnProperty(primitive, 'key'); }, + TypeError, + debug(primitive) + ' is not an Object' + ); + }); + + forEach(v.nonPropertyKeys, function (nonKey) { + t['throws']( + function () { ES.HasOwnProperty({}, nonKey); }, + TypeError, + debug(nonKey) + ' is not a Property Key' + ); + }); + + t.equal(ES.HasOwnProperty({}, 'toString'), false, 'inherited properties are not own'); + t.equal( + ES.HasOwnProperty({ toString: 1 }, 'toString'), + true, + 'shadowed inherited own properties are own' + ); + t.equal(ES.HasOwnProperty({ a: 1 }, 'a'), true, 'own properties are own'); + + t.end(); + }); + + test('HasProperty', function (t) { + forEach(v.primitives, function (primitive) { + t['throws']( + function () { ES.HasProperty(primitive, 'key'); }, + TypeError, + debug(primitive) + ' is not an Object' + ); + }); + + forEach(v.nonPropertyKeys, function (nonKey) { + t['throws']( + function () { ES.HasProperty({}, nonKey); }, + TypeError, + debug(nonKey) + ' is not a Property Key' + ); + }); + + t.equal(ES.HasProperty({}, 'nope'), false, 'object does not have nonexistent properties'); + t.equal(ES.HasProperty({}, 'toString'), true, 'object has inherited properties'); + t.equal( + ES.HasProperty({ toString: 1 }, 'toString'), + true, + 'object has shadowed inherited own properties' + ); + t.equal(ES.HasProperty({ a: 1 }, 'a'), true, 'object has own properties'); + + t.end(); + }); + + test('IsConcatSpreadable', function (t) { + forEach(v.primitives, function (primitive) { + t.equal(ES.IsConcatSpreadable(primitive), false, debug(primitive) + ' is not an Object'); + }); + + var hasSymbolConcatSpreadable = v.hasSymbols && Symbol.isConcatSpreadable; + t.test('Symbol.isConcatSpreadable', { skip: !hasSymbolConcatSpreadable }, function (st) { + forEach(v.falsies, function (falsy) { + var obj = {}; + obj[Symbol.isConcatSpreadable] = falsy; + st.equal( + ES.IsConcatSpreadable(obj), + false, + 'an object with ' + debug(falsy) + ' as Symbol.isConcatSpreadable is not concat spreadable' + ); + }); + + forEach(v.truthies, function (truthy) { + var obj = {}; + obj[Symbol.isConcatSpreadable] = truthy; + st.equal( + ES.IsConcatSpreadable(obj), + true, + 'an object with ' + debug(truthy) + ' as Symbol.isConcatSpreadable is concat spreadable' + ); + }); + + st.end(); + }); + + forEach(v.objects, function (object) { + t.equal( + ES.IsConcatSpreadable(object), + false, + 'non-array without Symbol.isConcatSpreadable is not concat spreadable' + ); + }); + + t.equal(ES.IsConcatSpreadable([]), true, 'arrays are concat spreadable'); + + t.end(); + }); + + test('Invoke', function (t) { + forEach(v.nonPropertyKeys, function (nonKey) { + t['throws']( + function () { ES.Invoke({}, nonKey); }, + TypeError, + debug(nonKey) + ' is not a Property Key' + ); + }); + + t['throws'](function () { ES.Invoke({ o: false }, 'o'); }, TypeError, 'fails on a non-function'); + + t.test('invoked callback', function (st) { + var aValue = {}; + var bValue = {}; + var obj = { + f: function (a) { + st.equal(arguments.length, 2, '2 args passed'); + st.equal(a, aValue, 'first arg is correct'); + st.equal(arguments[1], bValue, 'second arg is correct'); + } + }; + st.plan(3); + ES.Invoke(obj, 'f', aValue, bValue); + }); + + t.end(); + }); + + test('GetIterator', { skip: true }); + + test('IteratorNext', { skip: true }); + + test('IteratorComplete', { skip: true }); + + test('IteratorValue', { skip: true }); + + test('IteratorStep', { skip: true }); + + test('IteratorClose', { skip: true }); + + test('CreateIterResultObject', function (t) { + forEach(v.nonBooleans, function (nonBoolean) { + t['throws']( + function () { ES.CreateIterResultObject({}, nonBoolean); }, + TypeError, + '"done" argument must be a boolean; ' + debug(nonBoolean) + ' is not' + ); + }); + + var value = {}; + t.deepEqual(ES.CreateIterResultObject(value, true), { + value: value, + done: true + }, 'creates a "done" iteration result'); + t.deepEqual(ES.CreateIterResultObject(value, false), { + value: value, + done: false + }, 'creates a "not done" iteration result'); + + t.end(); + }); + + test('RegExpExec', function (t) { + forEach(v.primitives, function (primitive) { + t['throws']( + function () { ES.RegExpExec(primitive); }, + TypeError, + '"R" argument must be an object; ' + debug(primitive) + ' is not' + ); + }); + + forEach(v.nonStrings, function (nonString) { + t['throws']( + function () { ES.RegExpExec({}, nonString); }, + TypeError, + '"S" argument must be a String; ' + debug(nonString) + ' is not' + ); + }); + + t.test('gets and calls a callable "exec"', function (st) { + var str = '123'; + var o = { + exec: function (S) { + st.equal(this, o, '"exec" receiver is R'); + st.equal(S, str, '"exec" argument is S'); + + return null; + } + }; + st.plan(2); + ES.RegExpExec(o, str); + st.end(); + }); + + t.test('throws if a callable "exec" returns a non-null non-object', function (st) { + var str = '123'; + st.plan(v.nonNullPrimitives.length); + forEach(v.nonNullPrimitives, function (nonNullPrimitive) { + st['throws']( + function () { ES.RegExpExec({ exec: function () { return nonNullPrimitive; } }, str); }, + TypeError, + '"exec" method must return `null` or an Object; ' + debug(nonNullPrimitive) + ' is not' + ); + }); + st.end(); + }); + + t.test('actual regex that should match against a string', function (st) { + var S = 'aabc'; + var R = /a/g; + var match1 = ES.RegExpExec(R, S); + var match2 = ES.RegExpExec(R, S); + var match3 = ES.RegExpExec(R, S); + st.deepEqual(match1, assign(['a'], groups({ index: 0, input: S })), 'match object 1 is as expected'); + st.deepEqual(match2, assign(['a'], groups({ index: 1, input: S })), 'match object 2 is as expected'); + st.equal(match3, null, 'match 3 is null as expected'); + st.end(); + }); + + t.test('actual regex that should match against a string, with shadowed "exec"', function (st) { + var S = 'aabc'; + var R = /a/g; + R.exec = undefined; + var match1 = ES.RegExpExec(R, S); + var match2 = ES.RegExpExec(R, S); + var match3 = ES.RegExpExec(R, S); + st.deepEqual(match1, assign(['a'], groups({ index: 0, input: S })), 'match object 1 is as expected'); + st.deepEqual(match2, assign(['a'], groups({ index: 1, input: S })), 'match object 2 is as expected'); + st.equal(match3, null, 'match 3 is null as expected'); + st.end(); + }); + t.end(); + }); + + test('ArraySpeciesCreate', function (t) { + t.test('errors', function (st) { + var testNonNumber = function (nonNumber) { + st['throws']( + function () { ES.ArraySpeciesCreate([], nonNumber); }, + TypeError, + debug(nonNumber) + ' is not a number' + ); + }; + forEach(v.nonNumbers, testNonNumber); + + st['throws']( + function () { ES.ArraySpeciesCreate([], -1); }, + TypeError, + '-1 is not >= 0' + ); + st['throws']( + function () { ES.ArraySpeciesCreate([], -Infinity); }, + TypeError, + '-Infinity is not >= 0' + ); + + var testNonIntegers = function (nonInteger) { + st['throws']( + function () { ES.ArraySpeciesCreate([], nonInteger); }, + TypeError, + debug(nonInteger) + ' is not an integer' + ); + }; + forEach(v.nonIntegerNumbers, testNonIntegers); + + st.end(); + }); + + t.test('works with a non-array', function (st) { + forEach(v.objects.concat(v.primitives), function (nonArray) { + var arr = ES.ArraySpeciesCreate(nonArray, 0); + st.ok(ES.IsArray(arr), 'is an array'); + st.equal(arr.length, 0, 'length is correct'); + st.equal(arr.constructor, Array, 'constructor is correct'); + }); + + st.end(); + }); + + t.test('works with a normal array', function (st) { + var len = 2; + var orig = [1, 2, 3]; + var arr = ES.ArraySpeciesCreate(orig, len); + + st.ok(ES.IsArray(arr), 'is an array'); + st.equal(arr.length, len, 'length is correct'); + st.equal(arr.constructor, orig.constructor, 'constructor is correct'); + + st.end(); + }); + + t.test('-0 length produces +0 length', function (st) { + var len = -0; + st.ok(is(len, -0), '-0 is negative zero'); + st.notOk(is(len, 0), '-0 is not positive zero'); + + var orig = [1, 2, 3]; + var arr = ES.ArraySpeciesCreate(orig, len); + + st.equal(ES.IsArray(arr), true); + st.ok(is(arr.length, 0)); + st.equal(arr.constructor, orig.constructor); + + st.end(); + }); + + t.test('works with species construtor', { skip: !hasSpecies }, function (st) { + var sentinel = {}; + var Foo = function Foo(len) { + this.length = len; + this.sentinel = sentinel; + }; + var Bar = getArraySubclassWithSpeciesConstructor(Foo); + var bar = new Bar(); + + t.equal(ES.IsArray(bar), true, 'Bar instance is an array'); + + var arr = ES.ArraySpeciesCreate(bar, 3); + st.equal(arr.constructor, Foo, 'result used species constructor'); + st.equal(arr.length, 3, 'length property is correct'); + st.equal(arr.sentinel, sentinel, 'Foo constructor was exercised'); + + st.end(); + }); + + t.test('works with null species constructor', { skip: !hasSpecies }, function (st) { + var Bar = getArraySubclassWithSpeciesConstructor(null); + var bar = new Bar(); + + t.equal(ES.IsArray(bar), true, 'Bar instance is an array'); + + var arr = ES.ArraySpeciesCreate(bar, 3); + st.equal(arr.constructor, Array, 'result used default constructor'); + st.equal(arr.length, 3, 'length property is correct'); + + st.end(); + }); + + t.test('works with undefined species constructor', { skip: !hasSpecies }, function (st) { + var Bar = getArraySubclassWithSpeciesConstructor(); + var bar = new Bar(); + + t.equal(ES.IsArray(bar), true, 'Bar instance is an array'); + + var arr = ES.ArraySpeciesCreate(bar, 3); + st.equal(arr.constructor, Array, 'result used default constructor'); + st.equal(arr.length, 3, 'length property is correct'); + + st.end(); + }); + + t.test('throws with object non-construtor species constructor', { skip: !hasSpecies }, function (st) { + forEach(v.objects, function (obj) { + var Bar = getArraySubclassWithSpeciesConstructor(obj); + var bar = new Bar(); + + st.equal(ES.IsArray(bar), true, 'Bar instance is an array'); + + st['throws']( + function () { ES.ArraySpeciesCreate(bar, 3); }, + TypeError, + debug(obj) + ' is not a constructor' + ); + }); + + st.end(); + }); + + t.end(); + }); + + test('CreateDataProperty', function (t) { + forEach(v.primitives, function (primitive) { + t['throws']( + function () { ES.CreateDataProperty(primitive); }, + TypeError, + debug(primitive) + ' is not an object' + ); + }); + + forEach(v.nonPropertyKeys, function (nonPropertyKey) { + t['throws']( + function () { ES.CreateDataProperty({}, nonPropertyKey); }, + TypeError, + debug(nonPropertyKey) + ' is not a property key' + ); + }); + + var sentinel = {}; + forEach(v.propertyKeys, function (propertyKey) { + var obj = {}; + var status = ES.CreateDataProperty(obj, propertyKey, sentinel); + t.equal(status, true, 'status is true'); + t.equal( + obj[propertyKey], + sentinel, + debug(sentinel) + ' is installed on "' + debug(propertyKey) + '" on the object' + ); + + if (typeof Object.defineProperty === 'function') { + var nonWritable = Object.defineProperty({}, propertyKey, { configurable: true, writable: false }); + + var nonWritableStatus = ES.CreateDataProperty(nonWritable, propertyKey, sentinel); + t.equal(nonWritableStatus, false, 'create data property failed'); + t.notEqual( + nonWritable[propertyKey], + sentinel, + debug(sentinel) + ' is not installed on "' + debug(propertyKey) + '" on the object when key is nonwritable' + ); + + var nonConfigurable = Object.defineProperty({}, propertyKey, { configurable: false, writable: true }); + + var nonConfigurableStatus = ES.CreateDataProperty(nonConfigurable, propertyKey, sentinel); + t.equal(nonConfigurableStatus, false, 'create data property failed'); + t.notEqual( + nonConfigurable[propertyKey], + sentinel, + debug(sentinel) + ' is not installed on "' + debug(propertyKey) + '" on the object when key is nonconfigurable' + ); + } + }); + + t.end(); + }); + + test('CreateDataPropertyOrThrow', function (t) { + forEach(v.primitives, function (primitive) { + t['throws']( + function () { ES.CreateDataPropertyOrThrow(primitive); }, + TypeError, + debug(primitive) + ' is not an object' + ); + }); + + forEach(v.nonPropertyKeys, function (nonPropertyKey) { + t['throws']( + function () { ES.CreateDataPropertyOrThrow({}, nonPropertyKey); }, + TypeError, + debug(nonPropertyKey) + ' is not a property key' + ); + }); + + var sentinel = {}; + forEach(v.propertyKeys, function (propertyKey) { + var obj = {}; + var status = ES.CreateDataPropertyOrThrow(obj, propertyKey, sentinel); + t.equal(status, true, 'status is true'); + t.equal( + obj[propertyKey], + sentinel, + debug(sentinel) + ' is installed on "' + debug(propertyKey) + '" on the object' + ); + + if (typeof Object.preventExtensions === 'function') { + var notExtensible = {}; + Object.preventExtensions(notExtensible); + + t['throws']( + function () { ES.CreateDataPropertyOrThrow(notExtensible, propertyKey, sentinel); }, + TypeError, + 'can not install ' + debug(propertyKey) + ' on non-extensible object' + ); + t.notEqual( + notExtensible[propertyKey], + sentinel, + debug(sentinel) + ' is not installed on "' + debug(propertyKey) + '" on the object' + ); + } + }); + + t.end(); + }); + + test('ObjectCreate', function (t) { + forEach(v.nonNullPrimitives, function (value) { + t['throws']( + function () { ES.ObjectCreate(value); }, + TypeError, + debug(value) + ' is not null, or an object' + ); + }); + + t.test('proto arg', function (st) { + var Parent = function Parent() {}; + Parent.prototype.foo = {}; + var child = ES.ObjectCreate(Parent.prototype); + st.equal(child instanceof Parent, true, 'child is instanceof Parent'); + st.equal(child.foo, Parent.prototype.foo, 'child inherits properties from Parent.prototype'); + + st.end(); + }); + + t.test('internal slots arg', function (st) { + st.doesNotThrow(function () { ES.ObjectCreate(null, []); }, 'an empty slot list is valid'); + + st['throws']( + function () { ES.ObjectCreate(null, ['a']); }, + SyntaxError, + 'internal slots are not supported' + ); + + st.end(); + }); + + t.test('null proto', { skip: !Object.create }, function (st) { + st.equal('toString' in ({}), true, 'normal objects have toString'); + st.equal('toString' in ES.ObjectCreate(null), false, 'makes a null object'); + + st.end(); + }); + + t.test('null proto when no native Object.create', { skip: Object.create }, function (st) { + st['throws']( + function () { ES.ObjectCreate(null); }, + SyntaxError, + 'without a native Object.create, can not create null objects' + ); + + st.end(); + }); + + t.end(); + }); + + test('AdvanceStringIndex', function (t) { + forEach(v.nonStrings, function (nonString) { + t['throws']( + function () { ES.AdvanceStringIndex(nonString); }, + TypeError, + '"S" argument must be a String; ' + debug(nonString) + ' is not' + ); + }); + + var notInts = v.nonNumbers.concat( + v.nonIntegerNumbers, + [Infinity, -Infinity, NaN, [], new Date(), Math.pow(2, 53), -1] + ); + forEach(notInts, function (nonInt) { + t['throws']( + function () { ES.AdvanceStringIndex('abc', nonInt); }, + TypeError, + '"index" argument must be an integer, ' + debug(nonInt) + ' is not.' + ); + }); + + forEach(v.nonBooleans, function (nonBoolean) { + t['throws']( + function () { ES.AdvanceStringIndex('abc', 0, nonBoolean); }, + TypeError, + debug(nonBoolean) + ' is not a Boolean' + ); + }); + + var str = 'a\uD83D\uDCA9c'; + + t.test('non-unicode mode', function (st) { + for (var i = 0; i < str.length + 2; i += 1) { + st.equal(ES.AdvanceStringIndex(str, i, false), i + 1, i + ' advances to ' + (i + 1)); + } + + st.end(); + }); + + t.test('unicode mode', function (st) { + st.equal(ES.AdvanceStringIndex(str, 0, true), 1, '0 advances to 1'); + st.equal(ES.AdvanceStringIndex(str, 1, true), 3, '1 advances to 3'); + st.equal(ES.AdvanceStringIndex(str, 2, true), 3, '2 advances to 3'); + st.equal(ES.AdvanceStringIndex(str, 3, true), 4, '3 advances to 4'); + st.equal(ES.AdvanceStringIndex(str, 4, true), 5, '4 advances to 5'); + + st.end(); + }); + + t.test('lone surrogates', function (st) { + var halfPoo = 'a\uD83Dc'; + + st.equal(ES.AdvanceStringIndex(halfPoo, 0, true), 1, '0 advances to 1'); + st.equal(ES.AdvanceStringIndex(halfPoo, 1, true), 2, '1 advances to 2'); + st.equal(ES.AdvanceStringIndex(halfPoo, 2, true), 3, '2 advances to 3'); + st.equal(ES.AdvanceStringIndex(halfPoo, 3, true), 4, '3 advances to 4'); + + st.end(); + }); + + t.test('surrogate pairs', function (st) { + var lowestPair = String.fromCharCode('0xD800') + String.fromCharCode('0xDC00'); + var highestPair = String.fromCharCode('0xDBFF') + String.fromCharCode('0xDFFF'); + var poop = String.fromCharCode('0xD83D') + String.fromCharCode('0xDCA9'); + + st.equal(ES.AdvanceStringIndex(lowestPair, 0, true), 2, 'lowest surrogate pair, 0 -> 2'); + st.equal(ES.AdvanceStringIndex(highestPair, 0, true), 2, 'highest surrogate pair, 0 -> 2'); + st.equal(ES.AdvanceStringIndex(poop, 0, true), 2, 'poop, 0 -> 2'); + + st.end(); + }); + + t.end(); + }); +}; + +var es2016 = function ES2016(ES, ops, expectedMissing) { + es2015(ES, ops, expectedMissing); + + test('SameValueNonNumber', function (t) { + var willThrow = [ + [3, 4], + [NaN, 4], + [4, ''], + ['abc', true], + [{}, false] + ]; + forEach(willThrow, function (nums) { + t['throws'](function () { return ES.SameValueNonNumber.apply(ES, nums); }, TypeError, 'value must be same type and non-number'); + }); + + forEach(v.objects.concat(v.nonNumberPrimitives), function (val) { + t.equal(val === val, ES.SameValueNonNumber(val, val), debug(val) + ' is SameValueNonNumber to itself'); + }); + + t.end(); + }); +}; + +var es2017 = function E2017(ES, ops, expectedMissing) { + es2016(ES, ops, expectedMissing); + + test('ToIndex', function (t) { + t.ok(is(ES.ToIndex(), 0), 'no value gives 0'); + t.ok(is(ES.ToIndex(undefined), 0), 'undefined value gives 0'); + + t['throws'](function () { ES.ToIndex(-1); }, RangeError, 'negative numbers throw'); + + t['throws'](function () { ES.ToIndex(MAX_SAFE_INTEGER + 1); }, RangeError, 'too large numbers throw'); + + t.equal(ES.ToIndex(3), 3, 'numbers work'); + t.equal(ES.ToIndex(v.valueOfOnlyObject), 4, 'coercible objects are coerced'); + + t.end(); + }); +}; + +module.exports = { + es2015: es2015, + es2016: es2016, + es2017: es2017 +}; diff --git a/node_modules/es-to-primitive/.editorconfig b/node_modules/es-to-primitive/.editorconfig new file mode 100644 index 0000000000000..bc228f8269443 --- /dev/null +++ b/node_modules/es-to-primitive/.editorconfig @@ -0,0 +1,20 @@ +root = true + +[*] +indent_style = tab +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true +max_line_length = 150 + +[CHANGELOG.md] +indent_style = space +indent_size = 2 + +[*.json] +max_line_length = off + +[Makefile] +max_line_length = off diff --git a/node_modules/es-to-primitive/.jscs.json b/node_modules/es-to-primitive/.jscs.json new file mode 100644 index 0000000000000..8666c750db96c --- /dev/null +++ b/node_modules/es-to-primitive/.jscs.json @@ -0,0 +1,176 @@ +{ + "es3": true, + + "additionalRules": [], + + "requireSemicolons": true, + + "disallowMultipleSpaces": true, + + "disallowIdentifierNames": [], + + "requireCurlyBraces": { + "allExcept": [], + "keywords": ["if", "else", "for", "while", "do", "try", "catch"] + }, + + "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"], + + "disallowSpaceAfterKeywords": [], + + "disallowSpaceBeforeComma": true, + "disallowSpaceAfterComma": false, + "disallowSpaceBeforeSemicolon": true, + + "disallowNodeTypes": [ + "DebuggerStatement", + "ForInStatement", + "LabeledStatement", + "SwitchCase", + "SwitchStatement", + "WithStatement" + ], + + "requireObjectKeysOnNewLine": { "allExcept": ["sameLine"] }, + + "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true }, + "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true }, + "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true }, + + "requireSpaceBetweenArguments": true, + + "disallowSpacesInsideParentheses": true, + + "disallowSpacesInsideArrayBrackets": true, + + "disallowQuotedKeysInObjects": { "allExcept": ["reserved"] }, + + "disallowSpaceAfterObjectKeys": true, + + "requireCommaBeforeLineBreak": true, + + "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"], + "requireSpaceAfterPrefixUnaryOperators": [], + + "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"], + "requireSpaceBeforePostfixUnaryOperators": [], + + "disallowSpaceBeforeBinaryOperators": [], + "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + + "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + "disallowSpaceAfterBinaryOperators": [], + + "disallowImplicitTypeConversion": ["binary", "string"], + + "disallowKeywords": ["with", "eval"], + + "requireKeywordsOnNewLine": [], + "disallowKeywordsOnNewLine": ["else"], + + "requireLineFeedAtFileEnd": true, + + "disallowTrailingWhitespace": true, + + "disallowTrailingComma": true, + + "excludeFiles": ["node_modules/**", "vendor/**"], + + "disallowMultipleLineStrings": true, + + "requireDotNotation": { "allExcept": ["keywords"] }, + + "requireParenthesesAroundIIFE": true, + + "validateLineBreaks": "LF", + + "validateQuoteMarks": { + "escape": true, + "mark": "'" + }, + + "disallowOperatorBeforeLineBreak": [], + + "requireSpaceBeforeKeywords": [ + "do", + "for", + "if", + "else", + "switch", + "case", + "try", + "catch", + "finally", + "while", + "with", + "return" + ], + + "validateAlignedFunctionParameters": { + "lineBreakAfterOpeningBraces": true, + "lineBreakBeforeClosingBraces": true + }, + + "requirePaddingNewLinesBeforeExport": true, + + "validateNewlineAfterArrayElements": { + "maximum": 12 + }, + + "requirePaddingNewLinesAfterUseStrict": true, + + "disallowArrowFunctions": true, + + "disallowMultiLineTernary": true, + + "validateOrderInObjectKeys": false, + + "disallowIdenticalDestructuringNames": true, + + "disallowNestedTernaries": { "maxLevel": 1 }, + + "requireSpaceAfterComma": { "allExcept": ["trailing"] }, + "requireAlignedMultilineParams": false, + + "requireSpacesInGenerator": { + "afterStar": true + }, + + "disallowSpacesInGenerator": { + "beforeStar": true + }, + + "disallowVar": false, + + "requireArrayDestructuring": false, + + "requireEnhancedObjectLiterals": false, + + "requireObjectDestructuring": false, + + "requireEarlyReturn": false, + + "requireCapitalizedConstructorsNew": { + "allExcept": ["Function", "String", "Object", "Symbol", "Number", "Date", "RegExp", "Error", "Boolean", "Array", "GetMethod"] + }, + + "requireImportAlphabetized": false, + + "requireSpaceBeforeObjectValues": true, + "requireSpaceBeforeDestructuredValues": true, + + "disallowSpacesInsideTemplateStringPlaceholders": true, + + "disallowArrayDestructuringReturn": false, + + "requireNewlineBeforeSingleStatementsInIf": false, + + "disallowUnusedVariables": true, + + "requireSpacesInsideImportedObjectBraces": true, + + "requireUseStrict": true +} + diff --git a/node_modules/es-to-primitive/.travis.yml b/node_modules/es-to-primitive/.travis.yml new file mode 100644 index 0000000000000..c9ee1ece78cb6 --- /dev/null +++ b/node_modules/es-to-primitive/.travis.yml @@ -0,0 +1,243 @@ +language: node_js +cache: + directories: + - "$(nvm cache dir)" +os: + - linux +node_js: + - "10.11" + - "9.11" + - "8.12" + - "7.10" + - "6.14" + - "5.12" + - "4.9" + - "iojs-v3.3" + - "iojs-v2.5" + - "iojs-v1.8" + - "0.12" + - "0.11" + - "0.10" + - "0.8" + - "0.6" +before_install: + - 'case "${TRAVIS_NODE_VERSION}" in 0.*) export NPM_CONFIG_STRICT_SSL=false ;; esac' + - 'nvm install-latest-npm' +install: + - 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ] || [ "${TRAVIS_NODE_VERSION}" = "0.9" ]; then nvm install --latest-npm 0.8 && npm install && nvm use "${TRAVIS_NODE_VERSION}"; else npm install; fi;' +script: + - 'if [ -n "${PRETEST-}" ]; then npm run pretest ; fi' + - 'if [ -n "${POSTTEST-}" ]; then npm run posttest ; fi' + - 'if [ -n "${COVERAGE-}" ]; then npm run coverage ; fi' + - 'if [ -n "${TEST-}" ]; then npm run tests-only ; fi' +sudo: false +env: + - TEST=true +matrix: + fast_finish: true + include: + - node_js: "lts/*" + env: PRETEST=true + - node_js: "lts/*" + env: POSTTEST=true + - node_js: "4" + env: COVERAGE=true + - node_js: "10.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.13" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.12" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.4" + env: TEST=true ALLOW_FAILURE=true + allow_failures: + - os: osx + - env: TEST=true ALLOW_FAILURE=true + - env: COVERAGE=true + - node_js: "0.6" diff --git a/node_modules/es-to-primitive/CHANGELOG.md b/node_modules/es-to-primitive/CHANGELOG.md new file mode 100644 index 0000000000000..962986969d1f7 --- /dev/null +++ b/node_modules/es-to-primitive/CHANGELOG.md @@ -0,0 +1,38 @@ +1.2.0 / 2018-09-27 +================= + * [New] create ES2015 entry point/property, to replace ES6 + * [Fix] Ensure optional arguments are not part of the length (#29) + * [Deps] update `is-callable` + * [Dev Deps] update `tape`, `jscs`, `nsp`, `eslint`, `@ljharb/eslint-config`, `semver`, `object-inspect`, `replace` + * [Tests] avoid util.inspect bug with `new Date(NaN)` on node v6.0 and v6.1. + * [Tests] up to `node` `v10.11`, `v9.11`, `v8.12`, `v6.14`, `v4.9` + +1.1.1 / 2016-01-03 +================= + * [Fix: ES5] fix coercion logic: ES5’s ToPrimitive does not coerce any primitive value, regardless of hint (#2) + +1.1.0 / 2015-12-27 +================= + * [New] add `Symbol.toPrimitive` support + * [Deps] update `is-callable`, `is-date-object` + * [Dev Deps] update `eslint`, `tape`, `semver`, `jscs`, `covert`, `nsp`, `@ljharb/eslint-config` + * [Dev Deps] remove unused deps + * [Tests] up to `node` `v5.3` + * [Tests] fix npm upgrades on older node versions + * [Tests] fix testling + * [Docs] Switch from vb.teelaun.ch to versionbadg.es for the npm version badge SVG + +1.0.1 / 2016-01-03 +================= + * [Fix: ES5] fix coercion logic: ES5’s ToPrimitive does not coerce any primitive value, regardless of hint (#2) + * [Deps] update `is-callable`, `is-date-object` + * [Dev Deps] update `eslint`, `tape`, `semver`, `jscs`, `covert`, `nsp`, `@ljharb/eslint-config` + * [Dev Deps] remove unused deps + * [Tests] up to `node` `v5.3` + * [Tests] fix npm upgrades on older node versions + * [Tests] fix testling + * [Docs] Switch from vb.teelaun.ch to versionbadg.es for the npm version badge SVG + +1.0.0 / 2015-03-19 +================= + * Initial release. diff --git a/node_modules/es-to-primitive/LICENSE b/node_modules/es-to-primitive/LICENSE new file mode 100644 index 0000000000000..b43df444e5182 --- /dev/null +++ b/node_modules/es-to-primitive/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Jordan Harband + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/es-to-primitive/Makefile b/node_modules/es-to-primitive/Makefile new file mode 100644 index 0000000000000..b9e4fe1aab3dd --- /dev/null +++ b/node_modules/es-to-primitive/Makefile @@ -0,0 +1,61 @@ +# Since we rely on paths relative to the makefile location, abort if make isn't being run from there. +$(if $(findstring /,$(MAKEFILE_LIST)),$(error Please only invoke this makefile from the directory it resides in)) + + # The files that need updating when incrementing the version number. +VERSIONED_FILES := *.js *.json README* + + +# Add the local npm packages' bin folder to the PATH, so that `make` can find them, when invoked directly. +# Note that rather than using `$(npm bin)` the 'node_modules/.bin' path component is hard-coded, so that invocation works even from an environment +# where npm is (temporarily) unavailable due to having deactivated an nvm instance loaded into the calling shell in order to avoid interference with tests. +export PATH := $(shell printf '%s' "$$PWD/node_modules/.bin:$$PATH") +UTILS := semver +# Make sure that all required utilities can be located. +UTIL_CHECK := $(or $(shell PATH="$(PATH)" which $(UTILS) >/dev/null && echo 'ok'),$(error Did you forget to run `npm install` after cloning the repo? At least one of the required supporting utilities not found: $(UTILS))) + +# Default target (by virtue of being the first non '.'-prefixed in the file). +.PHONY: _no-target-specified +_no-target-specified: + $(error Please specify the target to make - `make list` shows targets. Alternatively, use `npm test` to run the default tests; `npm run` shows all tests) + +# Lists all targets defined in this makefile. +.PHONY: list +list: + @$(MAKE) -pRrn : -f $(MAKEFILE_LIST) 2>/dev/null | awk -v RS= -F: '/^# File/,/^# Finished Make data base/ {if ($$1 !~ "^[#.]") {print $$1}}' | command grep -v -e '^[^[:alnum:]]' -e '^$@$$command ' | sort + +# All-tests target: invokes the specified test suites for ALL shells defined in $(SHELLS). +.PHONY: test +test: + @npm test + +.PHONY: _ensure-tag +_ensure-tag: +ifndef TAG + $(error Please invoke with `make TAG= release`, where is either an increment specifier (patch, minor, major, prepatch, preminor, premajor, prerelease), or an explicit major.minor.patch version number) +endif + +CHANGELOG_ERROR = $(error No CHANGELOG specified) +.PHONY: _ensure-changelog +_ensure-changelog: + @ (git status -sb --porcelain | command grep -E '^( M|[MA] ) CHANGELOG.md' > /dev/null) || (echo no CHANGELOG.md specified && exit 2) + +# Ensures that the git workspace is clean. +.PHONY: _ensure-clean +_ensure-clean: + @[ -z "$$((git status --porcelain --untracked-files=no || echo err) | command grep -v 'CHANGELOG.md')" ] || { echo "Workspace is not clean; please commit changes first." >&2; exit 2; } + +# Makes a release; invoke with `make TAG= release`. +.PHONY: release +release: _ensure-tag _ensure-changelog _ensure-clean + @old_ver=`git describe --abbrev=0 --tags --match 'v[0-9]*.[0-9]*.[0-9]*'` || { echo "Failed to determine current version." >&2; exit 1; }; old_ver=$${old_ver#v}; \ + new_ver=`echo "$(TAG)" | sed 's/^v//'`; new_ver=$${new_ver:-patch}; \ + if printf "$$new_ver" | command grep -q '^[0-9]'; then \ + semver "$$new_ver" >/dev/null || { echo 'Invalid version number specified: $(TAG) - must be major.minor.patch' >&2; exit 2; }; \ + semver -r "> $$old_ver" "$$new_ver" >/dev/null || { echo 'Invalid version number specified: $(TAG) - must be HIGHER than current one.' >&2; exit 2; } \ + else \ + new_ver=`semver -i "$$new_ver" "$$old_ver"` || { echo 'Invalid version-increment specifier: $(TAG)' >&2; exit 2; } \ + fi; \ + printf "=== Bumping version **$$old_ver** to **$$new_ver** before committing and tagging:\n=== TYPE 'proceed' TO PROCEED, anything else to abort: " && read response && [ "$$response" = 'proceed' ] || { echo 'Aborted.' >&2; exit 2; }; \ + replace "$$old_ver" "$$new_ver" -- $(VERSIONED_FILES) && \ + git commit -m "v$$new_ver" $(VERSIONED_FILES) CHANGELOG.md && \ + git tag -a -m "v$$new_ver" "v$$new_ver" diff --git a/node_modules/es-to-primitive/README.md b/node_modules/es-to-primitive/README.md new file mode 100644 index 0000000000000..1831ecf39564f --- /dev/null +++ b/node_modules/es-to-primitive/README.md @@ -0,0 +1,51 @@ +# es-to-primitive [![Version Badge][npm-version-svg]][package-url] + +[![Build Status][travis-svg]][travis-url] +[![dependency status][deps-svg]][deps-url] +[![dev dependency status][dev-deps-svg]][dev-deps-url] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][npm-badge-png]][package-url] + +ECMAScript “ToPrimitive” algorithm. Provides ES5 and ES2015 versions. +When different versions of the spec conflict, the default export will be the latest version of the abstract operation. +Alternative versions will also be available under an `es5`/`es2015` exported property if you require a specific version. + +## Example + +```js +var toPrimitive = require('es-to-primitive'); +var assert = require('assert'); + +assert(toPrimitive(function () {}) === String(function () {})); + +var date = new Date(); +assert(toPrimitive(date) === String(date)); + +assert(toPrimitive({ valueOf: function () { return 3; } }) === 3); + +assert(toPrimitive(['a', 'b', 3]) === String(['a', 'b', 3])); + +var sym = Symbol(); +assert(toPrimitive(Object(sym)) === sym); +``` + +## Tests +Simply clone the repo, `npm install`, and run `npm test` + +[package-url]: https://npmjs.org/package/es-to-primitive +[npm-version-svg]: http://versionbadg.es/ljharb/es-to-primitive.svg +[travis-svg]: https://travis-ci.org/ljharb/es-to-primitive.svg +[travis-url]: https://travis-ci.org/ljharb/es-to-primitive +[deps-svg]: https://david-dm.org/ljharb/es-to-primitive.svg +[deps-url]: https://david-dm.org/ljharb/es-to-primitive +[dev-deps-svg]: https://david-dm.org/ljharb/es-to-primitive/dev-status.svg +[dev-deps-url]: https://david-dm.org/ljharb/es-to-primitive#info=devDependencies +[testling-svg]: https://ci.testling.com/ljharb/es-to-primitive.png +[testling-url]: https://ci.testling.com/ljharb/es-to-primitive +[npm-badge-png]: https://nodei.co/npm/es-to-primitive.png?downloads=true&stars=true +[license-image]: http://img.shields.io/npm/l/es-to-primitive.svg +[license-url]: LICENSE +[downloads-image]: http://img.shields.io/npm/dm/es-to-primitive.svg +[downloads-url]: http://npm-stat.com/charts.html?package=es-to-primitive diff --git a/node_modules/es-to-primitive/es2015.js b/node_modules/es-to-primitive/es2015.js new file mode 100644 index 0000000000000..4a11a346c608c --- /dev/null +++ b/node_modules/es-to-primitive/es2015.js @@ -0,0 +1,75 @@ +'use strict'; + +var hasSymbols = typeof Symbol === 'function' && typeof Symbol.iterator === 'symbol'; + +var isPrimitive = require('./helpers/isPrimitive'); +var isCallable = require('is-callable'); +var isDate = require('is-date-object'); +var isSymbol = require('is-symbol'); + +var ordinaryToPrimitive = function OrdinaryToPrimitive(O, hint) { + if (typeof O === 'undefined' || O === null) { + throw new TypeError('Cannot call method on ' + O); + } + if (typeof hint !== 'string' || (hint !== 'number' && hint !== 'string')) { + throw new TypeError('hint must be "string" or "number"'); + } + var methodNames = hint === 'string' ? ['toString', 'valueOf'] : ['valueOf', 'toString']; + var method, result, i; + for (i = 0; i < methodNames.length; ++i) { + method = O[methodNames[i]]; + if (isCallable(method)) { + result = method.call(O); + if (isPrimitive(result)) { + return result; + } + } + } + throw new TypeError('No default value'); +}; + +var GetMethod = function GetMethod(O, P) { + var func = O[P]; + if (func !== null && typeof func !== 'undefined') { + if (!isCallable(func)) { + throw new TypeError(func + ' returned for property ' + P + ' of object ' + O + ' is not a function'); + } + return func; + } + return void 0; +}; + +// http://www.ecma-international.org/ecma-262/6.0/#sec-toprimitive +module.exports = function ToPrimitive(input) { + if (isPrimitive(input)) { + return input; + } + var hint = 'default'; + if (arguments.length > 1) { + if (arguments[1] === String) { + hint = 'string'; + } else if (arguments[1] === Number) { + hint = 'number'; + } + } + + var exoticToPrim; + if (hasSymbols) { + if (Symbol.toPrimitive) { + exoticToPrim = GetMethod(input, Symbol.toPrimitive); + } else if (isSymbol(input)) { + exoticToPrim = Symbol.prototype.valueOf; + } + } + if (typeof exoticToPrim !== 'undefined') { + var result = exoticToPrim.call(input, hint); + if (isPrimitive(result)) { + return result; + } + throw new TypeError('unable to convert exotic object to primitive'); + } + if (hint === 'default' && (isDate(input) || isSymbol(input))) { + hint = 'string'; + } + return ordinaryToPrimitive(input, hint === 'default' ? 'number' : hint); +}; diff --git a/node_modules/es-to-primitive/es5.js b/node_modules/es-to-primitive/es5.js new file mode 100644 index 0000000000000..602aa362c7e3e --- /dev/null +++ b/node_modules/es-to-primitive/es5.js @@ -0,0 +1,45 @@ +'use strict'; + +var toStr = Object.prototype.toString; + +var isPrimitive = require('./helpers/isPrimitive'); + +var isCallable = require('is-callable'); + +// http://ecma-international.org/ecma-262/5.1/#sec-8.12.8 +var ES5internalSlots = { + '[[DefaultValue]]': function (O) { + var actualHint; + if (arguments.length > 1) { + actualHint = arguments[1]; + } else { + actualHint = toStr.call(O) === '[object Date]' ? String : Number; + } + + if (actualHint === String || actualHint === Number) { + var methods = actualHint === String ? ['toString', 'valueOf'] : ['valueOf', 'toString']; + var value, i; + for (i = 0; i < methods.length; ++i) { + if (isCallable(O[methods[i]])) { + value = O[methods[i]](); + if (isPrimitive(value)) { + return value; + } + } + } + throw new TypeError('No default value'); + } + throw new TypeError('invalid [[DefaultValue]] hint supplied'); + } +}; + +// http://ecma-international.org/ecma-262/5.1/#sec-9.1 +module.exports = function ToPrimitive(input) { + if (isPrimitive(input)) { + return input; + } + if (arguments.length > 1) { + return ES5internalSlots['[[DefaultValue]]'](input, arguments[1]); + } + return ES5internalSlots['[[DefaultValue]]'](input); +}; diff --git a/node_modules/es-to-primitive/es6.js b/node_modules/es-to-primitive/es6.js new file mode 100644 index 0000000000000..2d1f4dc927a90 --- /dev/null +++ b/node_modules/es-to-primitive/es6.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./es2015'); diff --git a/node_modules/es-to-primitive/helpers/isPrimitive.js b/node_modules/es-to-primitive/helpers/isPrimitive.js new file mode 100644 index 0000000000000..3669156452759 --- /dev/null +++ b/node_modules/es-to-primitive/helpers/isPrimitive.js @@ -0,0 +1,3 @@ +module.exports = function isPrimitive(value) { + return value === null || (typeof value !== 'function' && typeof value !== 'object'); +}; diff --git a/node_modules/es-to-primitive/index.js b/node_modules/es-to-primitive/index.js new file mode 100644 index 0000000000000..e60d912e11332 --- /dev/null +++ b/node_modules/es-to-primitive/index.js @@ -0,0 +1,17 @@ +'use strict'; + +var ES5 = require('./es5'); +var ES6 = require('./es6'); +var ES2015 = require('./es2015'); + +if (Object.defineProperty) { + Object.defineProperty(ES2015, 'ES5', { enumerable: false, value: ES5 }); + Object.defineProperty(ES2015, 'ES6', { enumerable: false, value: ES6 }); + Object.defineProperty(ES2015, 'ES2015', { enumerable: false, value: ES2015 }); +} else { + ES6.ES5 = ES5; + ES6.ES6 = ES6; + ES6.ES2015 = ES2015; +} + +module.exports = ES2015; diff --git a/node_modules/es-to-primitive/package.json b/node_modules/es-to-primitive/package.json new file mode 100644 index 0000000000000..c3191f047e29c --- /dev/null +++ b/node_modules/es-to-primitive/package.json @@ -0,0 +1,86 @@ +{ + "name": "es-to-primitive", + "version": "1.2.0", + "author": "Jordan Harband", + "description": "ECMAScript “ToPrimitive” algorithm. Provides ES5 and ES2015 versions.", + "license": "MIT", + "main": "index.js", + "scripts": { + "pretest": "npm run --silent lint", + "test": "npm run --silent tests-only", + "posttest": "npm run --silent security", + "tests-only": "node --es-staging test", + "coverage": "covert test/*.js", + "coverage-quiet": "covert test/*.js --quiet", + "lint": "npm run --silent jscs && npm run --silent eslint", + "jscs": "jscs test/*.js *.js", + "eslint": "eslint test/*.js *.js", + "security": "nsp check" + }, + "repository": { + "type": "git", + "url": "git://github.com/ljharb/es-to-primitive.git" + }, + "keywords": [ + "primitive", + "abstract", + "ecmascript", + "es5", + "es6", + "es2015", + "toPrimitive", + "coerce", + "type", + "object", + "string", + "number", + "boolean", + "symbol", + "null", + "undefined" + ], + "dependencies": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + }, + "devDependencies": { + "@ljharb/eslint-config": "^13.0.0", + "covert": "^1.1.0", + "eslint": "^5.6.0", + "foreach": "^2.0.5", + "function.prototype.name": "^1.1.0", + "jscs": "^3.0.7", + "nsp": "^3.2.1", + "object-inspect": "^1.6.0", + "object-is": "^1.0.1", + "replace": "^1.0.0", + "semver": "^5.5.1", + "tape": "^4.9.1" + }, + "testling": { + "files": "test", + "browsers": [ + "iexplore/6.0..latest", + "firefox/3.0..6.0", + "firefox/15.0..latest", + "firefox/nightly", + "chrome/4.0..10.0", + "chrome/20.0..latest", + "chrome/canary", + "opera/10.0..latest", + "opera/next", + "safari/4.0..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2" + ] + }, + "engines": { + "node": ">= 0.4" + } + +,"_resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.0.tgz" +,"_integrity": "sha512-qZryBOJjV//LaxLTV6UC//WewneB3LcXOL9NP++ozKVXsIIIpm/2c13UDiD9Jp2eThsecw9m3jPqDwTyobcdbg==" +,"_from": "es-to-primitive@1.2.0" +} \ No newline at end of file diff --git a/node_modules/es-to-primitive/test/es2015.js b/node_modules/es-to-primitive/test/es2015.js new file mode 100644 index 0000000000000..80f4083dd9315 --- /dev/null +++ b/node_modules/es-to-primitive/test/es2015.js @@ -0,0 +1,151 @@ +'use strict'; + +var test = require('tape'); +var toPrimitive = require('../es2015'); +var is = require('object-is'); +var forEach = require('foreach'); +var functionName = require('function.prototype.name'); +var debug = require('object-inspect'); + +var hasSymbols = typeof Symbol === 'function' && typeof Symbol.iterator === 'symbol'; +var hasSymbolToPrimitive = hasSymbols && typeof Symbol.toPrimitive === 'symbol'; + +test('function properties', function (t) { + t.equal(toPrimitive.length, 1, 'length is 1'); + t.equal(functionName(toPrimitive), 'ToPrimitive', 'name is ToPrimitive'); + + t.end(); +}); + +var primitives = [null, undefined, true, false, 0, -0, 42, NaN, Infinity, -Infinity, '', 'abc']; + +test('primitives', function (t) { + forEach(primitives, function (i) { + t.ok(is(toPrimitive(i), i), 'toPrimitive(' + debug(i) + ') returns the same value'); + t.ok(is(toPrimitive(i, String), i), 'toPrimitive(' + debug(i) + ', String) returns the same value'); + t.ok(is(toPrimitive(i, Number), i), 'toPrimitive(' + debug(i) + ', Number) returns the same value'); + }); + t.end(); +}); + +test('Symbols', { skip: !hasSymbols }, function (t) { + var symbols = [ + Symbol('foo'), + Symbol.iterator, + Symbol['for']('foo') // eslint-disable-line no-restricted-properties + ]; + forEach(symbols, function (sym) { + t.equal(toPrimitive(sym), sym, 'toPrimitive(' + debug(sym) + ') returns the same value'); + t.equal(toPrimitive(sym, String), sym, 'toPrimitive(' + debug(sym) + ', String) returns the same value'); + t.equal(toPrimitive(sym, Number), sym, 'toPrimitive(' + debug(sym) + ', Number) returns the same value'); + }); + + var primitiveSym = Symbol('primitiveSym'); + var objectSym = Object(primitiveSym); + t.equal(toPrimitive(objectSym), primitiveSym, 'toPrimitive(' + debug(objectSym) + ') returns ' + debug(primitiveSym)); + t.equal(toPrimitive(objectSym, String), primitiveSym, 'toPrimitive(' + debug(objectSym) + ', String) returns ' + debug(primitiveSym)); + t.equal(toPrimitive(objectSym, Number), primitiveSym, 'toPrimitive(' + debug(objectSym) + ', Number) returns ' + debug(primitiveSym)); + t.end(); +}); + +test('Arrays', function (t) { + var arrays = [[], ['a', 'b'], [1, 2]]; + forEach(arrays, function (arr) { + t.equal(toPrimitive(arr), String(arr), 'toPrimitive(' + debug(arr) + ') returns the string version of the array'); + t.equal(toPrimitive(arr, String), String(arr), 'toPrimitive(' + debug(arr) + ') returns the string version of the array'); + t.equal(toPrimitive(arr, Number), String(arr), 'toPrimitive(' + debug(arr) + ') returns the string version of the array'); + }); + t.end(); +}); + +test('Dates', function (t) { + var dates = [new Date(), new Date(0), new Date(NaN)]; + forEach(dates, function (date) { + t.equal(toPrimitive(date), String(date), 'toPrimitive(' + debug(date) + ') returns the string version of the date'); + t.equal(toPrimitive(date, String), String(date), 'toPrimitive(' + debug(date) + ') returns the string version of the date'); + t.ok(is(toPrimitive(date, Number), Number(date)), 'toPrimitive(' + debug(date) + ') returns the number version of the date'); + }); + t.end(); +}); + +var coercibleObject = { valueOf: function () { return 3; }, toString: function () { return 42; } }; +var valueOfOnlyObject = { valueOf: function () { return 4; }, toString: function () { return {}; } }; +var toStringOnlyObject = { valueOf: function () { return {}; }, toString: function () { return 7; } }; +var coercibleFnObject = { + valueOf: function () { return function valueOfFn() {}; }, + toString: function () { return 42; } +}; +var uncoercibleObject = { valueOf: function () { return {}; }, toString: function () { return {}; } }; +var uncoercibleFnObject = { + valueOf: function () { return function valueOfFn() {}; }, + toString: function () { return function toStrFn() {}; } +}; + +test('Objects', function (t) { + t.equal(toPrimitive(coercibleObject), coercibleObject.valueOf(), 'coercibleObject with no hint coerces to valueOf'); + t.equal(toPrimitive(coercibleObject, Number), coercibleObject.valueOf(), 'coercibleObject with hint Number coerces to valueOf'); + t.equal(toPrimitive(coercibleObject, String), coercibleObject.toString(), 'coercibleObject with hint String coerces to non-stringified toString'); + + t.equal(toPrimitive(coercibleFnObject), coercibleFnObject.toString(), 'coercibleFnObject coerces to non-stringified toString'); + t.equal(toPrimitive(coercibleFnObject, Number), coercibleFnObject.toString(), 'coercibleFnObject with hint Number coerces to non-stringified toString'); + t.equal(toPrimitive(coercibleFnObject, String), coercibleFnObject.toString(), 'coercibleFnObject with hint String coerces to non-stringified toString'); + + t.equal(toPrimitive({}), '[object Object]', '{} with no hint coerces to Object#toString'); + t.equal(toPrimitive({}, Number), '[object Object]', '{} with hint Number coerces to Object#toString'); + t.equal(toPrimitive({}, String), '[object Object]', '{} with hint String coerces to Object#toString'); + + t.equal(toPrimitive(toStringOnlyObject), toStringOnlyObject.toString(), 'toStringOnlyObject returns non-stringified toString'); + t.equal(toPrimitive(toStringOnlyObject, Number), toStringOnlyObject.toString(), 'toStringOnlyObject with hint Number returns non-stringified toString'); + t.equal(toPrimitive(toStringOnlyObject, String), toStringOnlyObject.toString(), 'toStringOnlyObject with hint String returns non-stringified toString'); + + t.equal(toPrimitive(valueOfOnlyObject), valueOfOnlyObject.valueOf(), 'valueOfOnlyObject returns valueOf'); + t.equal(toPrimitive(valueOfOnlyObject, Number), valueOfOnlyObject.valueOf(), 'valueOfOnlyObject with hint Number returns valueOf'); + t.equal(toPrimitive(valueOfOnlyObject, String), valueOfOnlyObject.valueOf(), 'valueOfOnlyObject with hint String returns non-stringified valueOf'); + + t.test('Symbol.toPrimitive', { skip: !hasSymbolToPrimitive }, function (st) { + var overriddenObject = { toString: st.fail, valueOf: st.fail }; + overriddenObject[Symbol.toPrimitive] = function (hint) { return String(hint); }; + + st.equal(toPrimitive(overriddenObject), 'default', 'object with Symbol.toPrimitive + no hint invokes that'); + st.equal(toPrimitive(overriddenObject, Number), 'number', 'object with Symbol.toPrimitive + hint Number invokes that'); + st.equal(toPrimitive(overriddenObject, String), 'string', 'object with Symbol.toPrimitive + hint String invokes that'); + + var nullToPrimitive = { toString: coercibleObject.toString, valueOf: coercibleObject.valueOf }; + nullToPrimitive[Symbol.toPrimitive] = null; + st.equal(toPrimitive(nullToPrimitive), toPrimitive(coercibleObject), 'object with no hint + null Symbol.toPrimitive ignores it'); + st.equal(toPrimitive(nullToPrimitive, Number), toPrimitive(coercibleObject, Number), 'object with hint Number + null Symbol.toPrimitive ignores it'); + st.equal(toPrimitive(nullToPrimitive, String), toPrimitive(coercibleObject, String), 'object with hint String + null Symbol.toPrimitive ignores it'); + + st.test('exceptions', function (sst) { + var nonFunctionToPrimitive = { toString: sst.fail, valueOf: sst.fail }; + nonFunctionToPrimitive[Symbol.toPrimitive] = {}; + sst['throws'](toPrimitive.bind(null, nonFunctionToPrimitive), TypeError, 'Symbol.toPrimitive returning a non-function throws'); + + var uncoercibleToPrimitive = { toString: sst.fail, valueOf: sst.fail }; + uncoercibleToPrimitive[Symbol.toPrimitive] = function (hint) { + return { toString: function () { return hint; } }; + }; + sst['throws'](toPrimitive.bind(null, uncoercibleToPrimitive), TypeError, 'Symbol.toPrimitive returning an object throws'); + + var throwingToPrimitive = { toString: sst.fail, valueOf: sst.fail }; + throwingToPrimitive[Symbol.toPrimitive] = function (hint) { throw new RangeError(hint); }; + sst['throws'](toPrimitive.bind(null, throwingToPrimitive), RangeError, 'Symbol.toPrimitive throwing throws'); + + sst.end(); + }); + + st.end(); + }); + + t.test('exceptions', function (st) { + st['throws'](toPrimitive.bind(null, uncoercibleObject), TypeError, 'uncoercibleObject throws a TypeError'); + st['throws'](toPrimitive.bind(null, uncoercibleObject, Number), TypeError, 'uncoercibleObject with hint Number throws a TypeError'); + st['throws'](toPrimitive.bind(null, uncoercibleObject, String), TypeError, 'uncoercibleObject with hint String throws a TypeError'); + + st['throws'](toPrimitive.bind(null, uncoercibleFnObject), TypeError, 'uncoercibleFnObject throws a TypeError'); + st['throws'](toPrimitive.bind(null, uncoercibleFnObject, Number), TypeError, 'uncoercibleFnObject with hint Number throws a TypeError'); + st['throws'](toPrimitive.bind(null, uncoercibleFnObject, String), TypeError, 'uncoercibleFnObject with hint String throws a TypeError'); + st.end(); + }); + t.end(); +}); diff --git a/node_modules/es-to-primitive/test/es5.js b/node_modules/es-to-primitive/test/es5.js new file mode 100644 index 0000000000000..8b80ff5bd968a --- /dev/null +++ b/node_modules/es-to-primitive/test/es5.js @@ -0,0 +1,94 @@ +'use strict'; + +var test = require('tape'); +var toPrimitive = require('../es5'); +var is = require('object-is'); +var forEach = require('foreach'); +var functionName = require('function.prototype.name'); +var debug = require('object-inspect'); + +test('function properties', function (t) { + t.equal(toPrimitive.length, 1, 'length is 1'); + t.equal(functionName(toPrimitive), 'ToPrimitive', 'name is ToPrimitive'); + + t.end(); +}); + +var primitives = [null, undefined, true, false, 0, -0, 42, NaN, Infinity, -Infinity, '', 'abc']; + +test('primitives', function (t) { + forEach(primitives, function (i) { + t.ok(is(toPrimitive(i), i), 'toPrimitive(' + debug(i) + ') returns the same value'); + t.ok(is(toPrimitive(i, String), i), 'toPrimitive(' + debug(i) + ', String) returns the same value'); + t.ok(is(toPrimitive(i, Number), i), 'toPrimitive(' + debug(i) + ', Number) returns the same value'); + }); + t.end(); +}); + +test('Arrays', function (t) { + var arrays = [[], ['a', 'b'], [1, 2]]; + forEach(arrays, function (arr) { + t.ok(is(toPrimitive(arr), arr.toString()), 'toPrimitive(' + debug(arr) + ') returns toString of the array'); + t.equal(toPrimitive(arr, String), arr.toString(), 'toPrimitive(' + debug(arr) + ') returns toString of the array'); + t.ok(is(toPrimitive(arr, Number), arr.toString()), 'toPrimitive(' + debug(arr) + ') returns toString of the array'); + }); + t.end(); +}); + +test('Dates', function (t) { + var dates = [new Date(), new Date(0), new Date(NaN)]; + forEach(dates, function (date) { + t.equal(toPrimitive(date), date.toString(), 'toPrimitive(' + debug(date) + ') returns toString of the date'); + t.equal(toPrimitive(date, String), date.toString(), 'toPrimitive(' + debug(date) + ') returns toString of the date'); + t.ok(is(toPrimitive(date, Number), date.valueOf()), 'toPrimitive(' + debug(date) + ') returns valueOf of the date'); + }); + t.end(); +}); + +var coercibleObject = { valueOf: function () { return 3; }, toString: function () { return 42; } }; +var valueOfOnlyObject = { valueOf: function () { return 4; }, toString: function () { return {}; } }; +var toStringOnlyObject = { valueOf: function () { return {}; }, toString: function () { return 7; } }; +var coercibleFnObject = { + valueOf: function () { return function valueOfFn() {}; }, + toString: function () { return 42; } +}; +var uncoercibleObject = { valueOf: function () { return {}; }, toString: function () { return {}; } }; +var uncoercibleFnObject = { + valueOf: function () { return function valueOfFn() {}; }, + toString: function () { return function toStrFn() {}; } +}; + +test('Objects', function (t) { + t.equal(toPrimitive(coercibleObject), coercibleObject.valueOf(), 'coercibleObject with no hint coerces to valueOf'); + t.equal(toPrimitive(coercibleObject, String), coercibleObject.toString(), 'coercibleObject with hint String coerces to toString'); + t.equal(toPrimitive(coercibleObject, Number), coercibleObject.valueOf(), 'coercibleObject with hint Number coerces to valueOf'); + + t.equal(toPrimitive(coercibleFnObject), coercibleFnObject.toString(), 'coercibleFnObject coerces to toString'); + t.equal(toPrimitive(coercibleFnObject, String), coercibleFnObject.toString(), 'coercibleFnObject with hint String coerces to toString'); + t.equal(toPrimitive(coercibleFnObject, Number), coercibleFnObject.toString(), 'coercibleFnObject with hint Number coerces to toString'); + + t.ok(is(toPrimitive({}), '[object Object]'), '{} with no hint coerces to Object#toString'); + t.equal(toPrimitive({}, String), '[object Object]', '{} with hint String coerces to Object#toString'); + t.ok(is(toPrimitive({}, Number), '[object Object]'), '{} with hint Number coerces to Object#toString'); + + t.equal(toPrimitive(toStringOnlyObject), toStringOnlyObject.toString(), 'toStringOnlyObject returns toString'); + t.equal(toPrimitive(toStringOnlyObject, String), toStringOnlyObject.toString(), 'toStringOnlyObject with hint String returns toString'); + t.equal(toPrimitive(toStringOnlyObject, Number), toStringOnlyObject.toString(), 'toStringOnlyObject with hint Number returns toString'); + + t.equal(toPrimitive(valueOfOnlyObject), valueOfOnlyObject.valueOf(), 'valueOfOnlyObject returns valueOf'); + t.equal(toPrimitive(valueOfOnlyObject, String), valueOfOnlyObject.valueOf(), 'valueOfOnlyObject with hint String returns valueOf'); + t.equal(toPrimitive(valueOfOnlyObject, Number), valueOfOnlyObject.valueOf(), 'valueOfOnlyObject with hint Number returns valueOf'); + + t.test('exceptions', function (st) { + st['throws'](toPrimitive.bind(null, uncoercibleObject), TypeError, 'uncoercibleObject throws a TypeError'); + st['throws'](toPrimitive.bind(null, uncoercibleObject, String), TypeError, 'uncoercibleObject with hint String throws a TypeError'); + st['throws'](toPrimitive.bind(null, uncoercibleObject, Number), TypeError, 'uncoercibleObject with hint Number throws a TypeError'); + + st['throws'](toPrimitive.bind(null, uncoercibleFnObject), TypeError, 'uncoercibleFnObject throws a TypeError'); + st['throws'](toPrimitive.bind(null, uncoercibleFnObject, String), TypeError, 'uncoercibleFnObject with hint String throws a TypeError'); + st['throws'](toPrimitive.bind(null, uncoercibleFnObject, Number), TypeError, 'uncoercibleFnObject with hint Number throws a TypeError'); + st.end(); + }); + + t.end(); +}); diff --git a/node_modules/es-to-primitive/test/es6.js b/node_modules/es-to-primitive/test/es6.js new file mode 100644 index 0000000000000..c6df63fb6dc08 --- /dev/null +++ b/node_modules/es-to-primitive/test/es6.js @@ -0,0 +1,151 @@ +'use strict'; + +var test = require('tape'); +var toPrimitive = require('../es6'); +var is = require('object-is'); +var forEach = require('foreach'); +var functionName = require('function.prototype.name'); +var debug = require('object-inspect'); + +var hasSymbols = typeof Symbol === 'function' && typeof Symbol.iterator === 'symbol'; +var hasSymbolToPrimitive = hasSymbols && typeof Symbol.toPrimitive === 'symbol'; + +test('function properties', function (t) { + t.equal(toPrimitive.length, 1, 'length is 1'); + t.equal(functionName(toPrimitive), 'ToPrimitive', 'name is ToPrimitive'); + + t.end(); +}); + +var primitives = [null, undefined, true, false, 0, -0, 42, NaN, Infinity, -Infinity, '', 'abc']; + +test('primitives', function (t) { + forEach(primitives, function (i) { + t.ok(is(toPrimitive(i), i), 'toPrimitive(' + debug(i) + ') returns the same value'); + t.ok(is(toPrimitive(i, String), i), 'toPrimitive(' + debug(i) + ', String) returns the same value'); + t.ok(is(toPrimitive(i, Number), i), 'toPrimitive(' + debug(i) + ', Number) returns the same value'); + }); + t.end(); +}); + +test('Symbols', { skip: !hasSymbols }, function (t) { + var symbols = [ + Symbol('foo'), + Symbol.iterator, + Symbol['for']('foo') // eslint-disable-line no-restricted-properties + ]; + forEach(symbols, function (sym) { + t.equal(toPrimitive(sym), sym, 'toPrimitive(' + debug(sym) + ') returns the same value'); + t.equal(toPrimitive(sym, String), sym, 'toPrimitive(' + debug(sym) + ', String) returns the same value'); + t.equal(toPrimitive(sym, Number), sym, 'toPrimitive(' + debug(sym) + ', Number) returns the same value'); + }); + + var primitiveSym = Symbol('primitiveSym'); + var objectSym = Object(primitiveSym); + t.equal(toPrimitive(objectSym), primitiveSym, 'toPrimitive(' + debug(objectSym) + ') returns ' + debug(primitiveSym)); + t.equal(toPrimitive(objectSym, String), primitiveSym, 'toPrimitive(' + debug(objectSym) + ', String) returns ' + debug(primitiveSym)); + t.equal(toPrimitive(objectSym, Number), primitiveSym, 'toPrimitive(' + debug(objectSym) + ', Number) returns ' + debug(primitiveSym)); + t.end(); +}); + +test('Arrays', function (t) { + var arrays = [[], ['a', 'b'], [1, 2]]; + forEach(arrays, function (arr) { + t.equal(toPrimitive(arr), String(arr), 'toPrimitive(' + debug(arr) + ') returns the string version of the array'); + t.equal(toPrimitive(arr, String), String(arr), 'toPrimitive(' + debug(arr) + ') returns the string version of the array'); + t.equal(toPrimitive(arr, Number), String(arr), 'toPrimitive(' + debug(arr) + ') returns the string version of the array'); + }); + t.end(); +}); + +test('Dates', function (t) { + var dates = [new Date(), new Date(0), new Date(NaN)]; + forEach(dates, function (date) { + t.equal(toPrimitive(date), String(date), 'toPrimitive(' + debug(date) + ') returns the string version of the date'); + t.equal(toPrimitive(date, String), String(date), 'toPrimitive(' + debug(date) + ') returns the string version of the date'); + t.ok(is(toPrimitive(date, Number), Number(date)), 'toPrimitive(' + debug(date) + ') returns the number version of the date'); + }); + t.end(); +}); + +var coercibleObject = { valueOf: function () { return 3; }, toString: function () { return 42; } }; +var valueOfOnlyObject = { valueOf: function () { return 4; }, toString: function () { return {}; } }; +var toStringOnlyObject = { valueOf: function () { return {}; }, toString: function () { return 7; } }; +var coercibleFnObject = { + valueOf: function () { return function valueOfFn() {}; }, + toString: function () { return 42; } +}; +var uncoercibleObject = { valueOf: function () { return {}; }, toString: function () { return {}; } }; +var uncoercibleFnObject = { + valueOf: function () { return function valueOfFn() {}; }, + toString: function () { return function toStrFn() {}; } +}; + +test('Objects', function (t) { + t.equal(toPrimitive(coercibleObject), coercibleObject.valueOf(), 'coercibleObject with no hint coerces to valueOf'); + t.equal(toPrimitive(coercibleObject, Number), coercibleObject.valueOf(), 'coercibleObject with hint Number coerces to valueOf'); + t.equal(toPrimitive(coercibleObject, String), coercibleObject.toString(), 'coercibleObject with hint String coerces to non-stringified toString'); + + t.equal(toPrimitive(coercibleFnObject), coercibleFnObject.toString(), 'coercibleFnObject coerces to non-stringified toString'); + t.equal(toPrimitive(coercibleFnObject, Number), coercibleFnObject.toString(), 'coercibleFnObject with hint Number coerces to non-stringified toString'); + t.equal(toPrimitive(coercibleFnObject, String), coercibleFnObject.toString(), 'coercibleFnObject with hint String coerces to non-stringified toString'); + + t.equal(toPrimitive({}), '[object Object]', '{} with no hint coerces to Object#toString'); + t.equal(toPrimitive({}, Number), '[object Object]', '{} with hint Number coerces to Object#toString'); + t.equal(toPrimitive({}, String), '[object Object]', '{} with hint String coerces to Object#toString'); + + t.equal(toPrimitive(toStringOnlyObject), toStringOnlyObject.toString(), 'toStringOnlyObject returns non-stringified toString'); + t.equal(toPrimitive(toStringOnlyObject, Number), toStringOnlyObject.toString(), 'toStringOnlyObject with hint Number returns non-stringified toString'); + t.equal(toPrimitive(toStringOnlyObject, String), toStringOnlyObject.toString(), 'toStringOnlyObject with hint String returns non-stringified toString'); + + t.equal(toPrimitive(valueOfOnlyObject), valueOfOnlyObject.valueOf(), 'valueOfOnlyObject returns valueOf'); + t.equal(toPrimitive(valueOfOnlyObject, Number), valueOfOnlyObject.valueOf(), 'valueOfOnlyObject with hint Number returns valueOf'); + t.equal(toPrimitive(valueOfOnlyObject, String), valueOfOnlyObject.valueOf(), 'valueOfOnlyObject with hint String returns non-stringified valueOf'); + + t.test('Symbol.toPrimitive', { skip: !hasSymbolToPrimitive }, function (st) { + var overriddenObject = { toString: st.fail, valueOf: st.fail }; + overriddenObject[Symbol.toPrimitive] = function (hint) { return String(hint); }; + + st.equal(toPrimitive(overriddenObject), 'default', 'object with Symbol.toPrimitive + no hint invokes that'); + st.equal(toPrimitive(overriddenObject, Number), 'number', 'object with Symbol.toPrimitive + hint Number invokes that'); + st.equal(toPrimitive(overriddenObject, String), 'string', 'object with Symbol.toPrimitive + hint String invokes that'); + + var nullToPrimitive = { toString: coercibleObject.toString, valueOf: coercibleObject.valueOf }; + nullToPrimitive[Symbol.toPrimitive] = null; + st.equal(toPrimitive(nullToPrimitive), toPrimitive(coercibleObject), 'object with no hint + null Symbol.toPrimitive ignores it'); + st.equal(toPrimitive(nullToPrimitive, Number), toPrimitive(coercibleObject, Number), 'object with hint Number + null Symbol.toPrimitive ignores it'); + st.equal(toPrimitive(nullToPrimitive, String), toPrimitive(coercibleObject, String), 'object with hint String + null Symbol.toPrimitive ignores it'); + + st.test('exceptions', function (sst) { + var nonFunctionToPrimitive = { toString: sst.fail, valueOf: sst.fail }; + nonFunctionToPrimitive[Symbol.toPrimitive] = {}; + sst['throws'](toPrimitive.bind(null, nonFunctionToPrimitive), TypeError, 'Symbol.toPrimitive returning a non-function throws'); + + var uncoercibleToPrimitive = { toString: sst.fail, valueOf: sst.fail }; + uncoercibleToPrimitive[Symbol.toPrimitive] = function (hint) { + return { toString: function () { return hint; } }; + }; + sst['throws'](toPrimitive.bind(null, uncoercibleToPrimitive), TypeError, 'Symbol.toPrimitive returning an object throws'); + + var throwingToPrimitive = { toString: sst.fail, valueOf: sst.fail }; + throwingToPrimitive[Symbol.toPrimitive] = function (hint) { throw new RangeError(hint); }; + sst['throws'](toPrimitive.bind(null, throwingToPrimitive), RangeError, 'Symbol.toPrimitive throwing throws'); + + sst.end(); + }); + + st.end(); + }); + + t.test('exceptions', function (st) { + st['throws'](toPrimitive.bind(null, uncoercibleObject), TypeError, 'uncoercibleObject throws a TypeError'); + st['throws'](toPrimitive.bind(null, uncoercibleObject, Number), TypeError, 'uncoercibleObject with hint Number throws a TypeError'); + st['throws'](toPrimitive.bind(null, uncoercibleObject, String), TypeError, 'uncoercibleObject with hint String throws a TypeError'); + + st['throws'](toPrimitive.bind(null, uncoercibleFnObject), TypeError, 'uncoercibleFnObject throws a TypeError'); + st['throws'](toPrimitive.bind(null, uncoercibleFnObject, Number), TypeError, 'uncoercibleFnObject with hint Number throws a TypeError'); + st['throws'](toPrimitive.bind(null, uncoercibleFnObject, String), TypeError, 'uncoercibleFnObject with hint String throws a TypeError'); + st.end(); + }); + t.end(); +}); diff --git a/node_modules/es-to-primitive/test/index.js b/node_modules/es-to-primitive/test/index.js new file mode 100644 index 0000000000000..ad71f39e2581e --- /dev/null +++ b/node_modules/es-to-primitive/test/index.js @@ -0,0 +1,20 @@ +'use strict'; + +var toPrimitive = require('../'); +var ES5 = require('../es5'); +var ES6 = require('../es6'); +var ES2015 = require('../es2015'); + +var test = require('tape'); + +test('default export', function (t) { + t.equal(toPrimitive, ES2015, 'default export is ES2015'); + t.equal(toPrimitive.ES5, ES5, 'ES5 property has ES5 method'); + t.equal(toPrimitive.ES6, ES6, 'ES6 property has ES6 method'); + t.equal(toPrimitive.ES2015, ES2015, 'ES2015 property has ES2015 method'); + t.end(); +}); + +require('./es5'); +require('./es6'); +require('./es2015'); diff --git a/node_modules/es6-promise/CHANGELOG.md b/node_modules/es6-promise/CHANGELOG.md index 51582059f9046..d630cc0dc06c0 100644 --- a/node_modules/es6-promise/CHANGELOG.md +++ b/node_modules/es6-promise/CHANGELOG.md @@ -1,5 +1,9 @@ # Master +# 4.2.5 + +* remove old try/catch performance hacks, modern runtimes do not require these tricks + # 4.2.4 * [Fixes #305] Confuse webpack diff --git a/node_modules/es6-promise/dist/es6-promise.auto.js b/node_modules/es6-promise/dist/es6-promise.auto.js index 30d01304f0a4a..7ad1de569011e 100644 --- a/node_modules/es6-promise/dist/es6-promise.auto.js +++ b/node_modules/es6-promise/dist/es6-promise.auto.js @@ -3,7 +3,7 @@ * @copyright Copyright (c) 2014 Yehuda Katz, Tom Dale, Stefan Penner and contributors (Conversion to ES6 API by Jake Archibald) * @license Licensed under MIT license * See https://raw.githubusercontent.com/stefanpenner/es6-promise/master/LICENSE - * @version v4.2.4+314e4831 + * @version v4.2.8+1e68dce6 */ (function (global, factory) { @@ -234,8 +234,6 @@ var PENDING = void 0; var FULFILLED = 1; var REJECTED = 2; -var TRY_CATCH_ERROR = { error: null }; - function selfFulfillment() { return new TypeError("You cannot resolve a promise with itself"); } @@ -244,15 +242,6 @@ function cannotReturnOwn() { return new TypeError('A promises callback cannot return that same promise.'); } -function getThen(promise) { - try { - return promise.then; - } catch (error) { - TRY_CATCH_ERROR.error = error; - return TRY_CATCH_ERROR; - } -} - function tryThen(then$$1, value, fulfillmentHandler, rejectionHandler) { try { then$$1.call(value, fulfillmentHandler, rejectionHandler); @@ -308,10 +297,7 @@ function handleMaybeThenable(promise, maybeThenable, then$$1) { if (maybeThenable.constructor === promise.constructor && then$$1 === then && maybeThenable.constructor.resolve === resolve$1) { handleOwnThenable(promise, maybeThenable); } else { - if (then$$1 === TRY_CATCH_ERROR) { - reject(promise, TRY_CATCH_ERROR.error); - TRY_CATCH_ERROR.error = null; - } else if (then$$1 === undefined) { + if (then$$1 === undefined) { fulfill(promise, maybeThenable); } else if (isFunction(then$$1)) { handleForeignThenable(promise, maybeThenable, then$$1); @@ -325,7 +311,14 @@ function resolve(promise, value) { if (promise === value) { reject(promise, selfFulfillment()); } else if (objectOrFunction(value)) { - handleMaybeThenable(promise, value, getThen(value)); + var then$$1 = void 0; + try { + then$$1 = value.then; + } catch (error) { + reject(promise, error); + return; + } + handleMaybeThenable(promise, value, then$$1); } else { fulfill(promise, value); } @@ -404,31 +397,18 @@ function publish(promise) { promise._subscribers.length = 0; } -function tryCatch(callback, detail) { - try { - return callback(detail); - } catch (e) { - TRY_CATCH_ERROR.error = e; - return TRY_CATCH_ERROR; - } -} - function invokeCallback(settled, promise, callback, detail) { var hasCallback = isFunction(callback), value = void 0, error = void 0, - succeeded = void 0, - failed = void 0; + succeeded = true; if (hasCallback) { - value = tryCatch(callback, detail); - - if (value === TRY_CATCH_ERROR) { - failed = true; - error = value.error; - value.error = null; - } else { - succeeded = true; + try { + value = callback(detail); + } catch (e) { + succeeded = false; + error = e; } if (promise === value) { @@ -437,14 +417,13 @@ function invokeCallback(settled, promise, callback, detail) { } } else { value = detail; - succeeded = true; } if (promise._state !== PENDING) { // noop } else if (hasCallback && succeeded) { resolve(promise, value); - } else if (failed) { + } else if (succeeded === false) { reject(promise, error); } else if (settled === FULFILLED) { fulfill(promise, value); @@ -522,7 +501,15 @@ var Enumerator = function () { if (resolve$$1 === resolve$1) { - var _then = getThen(entry); + var _then = void 0; + var error = void 0; + var didError = false; + try { + _then = entry.then; + } catch (e) { + didError = true; + error = e; + } if (_then === then && entry._state !== PENDING) { this._settledAt(entry._state, i, entry._result); @@ -531,7 +518,11 @@ var Enumerator = function () { this._result[i] = entry; } else if (c === Promise$2) { var promise = new c(noop); - handleMaybeThenable(promise, entry, _then); + if (didError) { + reject(promise, error); + } else { + handleMaybeThenable(promise, entry, _then); + } this._willSettleAt(promise, i); } else { this._willSettleAt(new c(function (resolve$$1) { @@ -1109,15 +1100,19 @@ var Promise$2 = function () { var promise = this; var constructor = promise.constructor; - return promise.then(function (value) { - return constructor.resolve(callback()).then(function () { - return value; - }); - }, function (reason) { - return constructor.resolve(callback()).then(function () { - throw reason; + if (isFunction(callback)) { + return promise.then(function (value) { + return constructor.resolve(callback()).then(function () { + return value; + }); + }, function (reason) { + return constructor.resolve(callback()).then(function () { + throw reason; + }); }); - }); + } + + return promise.then(callback, callback); }; return Promise; diff --git a/node_modules/es6-promise/dist/es6-promise.auto.map b/node_modules/es6-promise/dist/es6-promise.auto.map index bff203c5faabf..a5abce99f4105 100644 --- a/node_modules/es6-promise/dist/es6-promise.auto.map +++ b/node_modules/es6-promise/dist/es6-promise.auto.map @@ -1 +1 @@ -{"version":3,"sources":["config/versionTemplate.txt","lib/es6-promise/utils.js","lib/es6-promise/asap.js","lib/es6-promise/then.js","lib/es6-promise/promise/resolve.js","lib/es6-promise/-internal.js","lib/es6-promise/enumerator.js","lib/es6-promise/promise/all.js","lib/es6-promise/promise/race.js","lib/es6-promise/promise/reject.js","lib/es6-promise/promise.js","lib/es6-promise/polyfill.js","lib/es6-promise.js","lib/es6-promise.auto.js"],"sourcesContent":["/*!\n * @overview es6-promise - a tiny implementation of Promises/A+.\n * @copyright Copyright (c) 2014 Yehuda Katz, Tom Dale, Stefan Penner and contributors (Conversion to ES6 API by Jake Archibald)\n * @license Licensed under MIT license\n * See https://raw.githubusercontent.com/stefanpenner/es6-promise/master/LICENSE\n * @version v4.2.4+314e4831\n */\n","export function objectOrFunction(x) {\n var type = typeof x;\n return x !== null && (type === 'object' || type === 'function');\n}\n\nexport function isFunction(x) {\n return typeof x === 'function';\n}\n\nexport function isMaybeThenable(x) {\n return x !== null && typeof x === 'object';\n}\n\nvar _isArray = void 0;\nif (Array.isArray) {\n _isArray = Array.isArray;\n} else {\n _isArray = function (x) {\n return Object.prototype.toString.call(x) === '[object Array]';\n };\n}\n\nexport var isArray = _isArray;","var len = 0;\nvar vertxNext = void 0;\nvar customSchedulerFn = void 0;\n\nexport var asap = function asap(callback, arg) {\n queue[len] = callback;\n queue[len + 1] = arg;\n len += 2;\n if (len === 2) {\n // If len is 2, that means that we need to schedule an async flush.\n // If additional callbacks are queued before the queue is flushed, they\n // will be processed by this flush that we are scheduling.\n if (customSchedulerFn) {\n customSchedulerFn(flush);\n } else {\n scheduleFlush();\n }\n }\n};\n\nexport function setScheduler(scheduleFn) {\n customSchedulerFn = scheduleFn;\n}\n\nexport function setAsap(asapFn) {\n asap = asapFn;\n}\n\nvar browserWindow = typeof window !== 'undefined' ? window : undefined;\nvar browserGlobal = browserWindow || {};\nvar BrowserMutationObserver = browserGlobal.MutationObserver || browserGlobal.WebKitMutationObserver;\nvar isNode = typeof self === 'undefined' && typeof process !== 'undefined' && {}.toString.call(process) === '[object process]';\n\n// test for web worker but not in IE10\nvar isWorker = typeof Uint8ClampedArray !== 'undefined' && typeof importScripts !== 'undefined' && typeof MessageChannel !== 'undefined';\n\n// node\nfunction useNextTick() {\n // node version 0.10.x displays a deprecation warning when nextTick is used recursively\n // see https://github.com/cujojs/when/issues/410 for details\n return function () {\n return process.nextTick(flush);\n };\n}\n\n// vertx\nfunction useVertxTimer() {\n if (typeof vertxNext !== 'undefined') {\n return function () {\n vertxNext(flush);\n };\n }\n\n return useSetTimeout();\n}\n\nfunction useMutationObserver() {\n var iterations = 0;\n var observer = new BrowserMutationObserver(flush);\n var node = document.createTextNode('');\n observer.observe(node, { characterData: true });\n\n return function () {\n node.data = iterations = ++iterations % 2;\n };\n}\n\n// web worker\nfunction useMessageChannel() {\n var channel = new MessageChannel();\n channel.port1.onmessage = flush;\n return function () {\n return channel.port2.postMessage(0);\n };\n}\n\nfunction useSetTimeout() {\n // Store setTimeout reference so es6-promise will be unaffected by\n // other code modifying setTimeout (like sinon.useFakeTimers())\n var globalSetTimeout = setTimeout;\n return function () {\n return globalSetTimeout(flush, 1);\n };\n}\n\nvar queue = new Array(1000);\nfunction flush() {\n for (var i = 0; i < len; i += 2) {\n var callback = queue[i];\n var arg = queue[i + 1];\n\n callback(arg);\n\n queue[i] = undefined;\n queue[i + 1] = undefined;\n }\n\n len = 0;\n}\n\nfunction attemptVertx() {\n try {\n var vertx = Function('return this')().require('vertx');\n vertxNext = vertx.runOnLoop || vertx.runOnContext;\n return useVertxTimer();\n } catch (e) {\n return useSetTimeout();\n }\n}\n\nvar scheduleFlush = void 0;\n// Decide what async method to use to triggering processing of queued callbacks:\nif (isNode) {\n scheduleFlush = useNextTick();\n} else if (BrowserMutationObserver) {\n scheduleFlush = useMutationObserver();\n} else if (isWorker) {\n scheduleFlush = useMessageChannel();\n} else if (browserWindow === undefined && typeof require === 'function') {\n scheduleFlush = attemptVertx();\n} else {\n scheduleFlush = useSetTimeout();\n}","import { invokeCallback, subscribe, FULFILLED, REJECTED, noop, makePromise, PROMISE_ID } from './-internal';\n\nimport { asap } from './asap';\n\nexport default function then(onFulfillment, onRejection) {\n var parent = this;\n\n var child = new this.constructor(noop);\n\n if (child[PROMISE_ID] === undefined) {\n makePromise(child);\n }\n\n var _state = parent._state;\n\n\n if (_state) {\n var callback = arguments[_state - 1];\n asap(function () {\n return invokeCallback(_state, child, callback, parent._result);\n });\n } else {\n subscribe(parent, child, onFulfillment, onRejection);\n }\n\n return child;\n}","import { noop, resolve as _resolve } from '../-internal';\n\n/**\n `Promise.resolve` returns a promise that will become resolved with the\n passed `value`. It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n resolve(1);\n });\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.resolve(1);\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n @method resolve\n @static\n @param {Any} value value that the returned promise will be resolved with\n Useful for tooling.\n @return {Promise} a promise that will become fulfilled with the given\n `value`\n*/\nexport default function resolve(object) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (object && typeof object === 'object' && object.constructor === Constructor) {\n return object;\n }\n\n var promise = new Constructor(noop);\n _resolve(promise, object);\n return promise;\n}","import { objectOrFunction, isFunction } from './utils';\n\nimport { asap } from './asap';\n\nimport originalThen from './then';\nimport originalResolve from './promise/resolve';\n\nexport var PROMISE_ID = Math.random().toString(36).substring(2);\n\nfunction noop() {}\n\nvar PENDING = void 0;\nvar FULFILLED = 1;\nvar REJECTED = 2;\n\nvar TRY_CATCH_ERROR = { error: null };\n\nfunction selfFulfillment() {\n return new TypeError(\"You cannot resolve a promise with itself\");\n}\n\nfunction cannotReturnOwn() {\n return new TypeError('A promises callback cannot return that same promise.');\n}\n\nfunction getThen(promise) {\n try {\n return promise.then;\n } catch (error) {\n TRY_CATCH_ERROR.error = error;\n return TRY_CATCH_ERROR;\n }\n}\n\nfunction tryThen(then, value, fulfillmentHandler, rejectionHandler) {\n try {\n then.call(value, fulfillmentHandler, rejectionHandler);\n } catch (e) {\n return e;\n }\n}\n\nfunction handleForeignThenable(promise, thenable, then) {\n asap(function (promise) {\n var sealed = false;\n var error = tryThen(then, thenable, function (value) {\n if (sealed) {\n return;\n }\n sealed = true;\n if (thenable !== value) {\n resolve(promise, value);\n } else {\n fulfill(promise, value);\n }\n }, function (reason) {\n if (sealed) {\n return;\n }\n sealed = true;\n\n reject(promise, reason);\n }, 'Settle: ' + (promise._label || ' unknown promise'));\n\n if (!sealed && error) {\n sealed = true;\n reject(promise, error);\n }\n }, promise);\n}\n\nfunction handleOwnThenable(promise, thenable) {\n if (thenable._state === FULFILLED) {\n fulfill(promise, thenable._result);\n } else if (thenable._state === REJECTED) {\n reject(promise, thenable._result);\n } else {\n subscribe(thenable, undefined, function (value) {\n return resolve(promise, value);\n }, function (reason) {\n return reject(promise, reason);\n });\n }\n}\n\nfunction handleMaybeThenable(promise, maybeThenable, then) {\n if (maybeThenable.constructor === promise.constructor && then === originalThen && maybeThenable.constructor.resolve === originalResolve) {\n handleOwnThenable(promise, maybeThenable);\n } else {\n if (then === TRY_CATCH_ERROR) {\n reject(promise, TRY_CATCH_ERROR.error);\n TRY_CATCH_ERROR.error = null;\n } else if (then === undefined) {\n fulfill(promise, maybeThenable);\n } else if (isFunction(then)) {\n handleForeignThenable(promise, maybeThenable, then);\n } else {\n fulfill(promise, maybeThenable);\n }\n }\n}\n\nfunction resolve(promise, value) {\n if (promise === value) {\n reject(promise, selfFulfillment());\n } else if (objectOrFunction(value)) {\n handleMaybeThenable(promise, value, getThen(value));\n } else {\n fulfill(promise, value);\n }\n}\n\nfunction publishRejection(promise) {\n if (promise._onerror) {\n promise._onerror(promise._result);\n }\n\n publish(promise);\n}\n\nfunction fulfill(promise, value) {\n if (promise._state !== PENDING) {\n return;\n }\n\n promise._result = value;\n promise._state = FULFILLED;\n\n if (promise._subscribers.length !== 0) {\n asap(publish, promise);\n }\n}\n\nfunction reject(promise, reason) {\n if (promise._state !== PENDING) {\n return;\n }\n promise._state = REJECTED;\n promise._result = reason;\n\n asap(publishRejection, promise);\n}\n\nfunction subscribe(parent, child, onFulfillment, onRejection) {\n var _subscribers = parent._subscribers;\n var length = _subscribers.length;\n\n\n parent._onerror = null;\n\n _subscribers[length] = child;\n _subscribers[length + FULFILLED] = onFulfillment;\n _subscribers[length + REJECTED] = onRejection;\n\n if (length === 0 && parent._state) {\n asap(publish, parent);\n }\n}\n\nfunction publish(promise) {\n var subscribers = promise._subscribers;\n var settled = promise._state;\n\n if (subscribers.length === 0) {\n return;\n }\n\n var child = void 0,\n callback = void 0,\n detail = promise._result;\n\n for (var i = 0; i < subscribers.length; i += 3) {\n child = subscribers[i];\n callback = subscribers[i + settled];\n\n if (child) {\n invokeCallback(settled, child, callback, detail);\n } else {\n callback(detail);\n }\n }\n\n promise._subscribers.length = 0;\n}\n\nfunction tryCatch(callback, detail) {\n try {\n return callback(detail);\n } catch (e) {\n TRY_CATCH_ERROR.error = e;\n return TRY_CATCH_ERROR;\n }\n}\n\nfunction invokeCallback(settled, promise, callback, detail) {\n var hasCallback = isFunction(callback),\n value = void 0,\n error = void 0,\n succeeded = void 0,\n failed = void 0;\n\n if (hasCallback) {\n value = tryCatch(callback, detail);\n\n if (value === TRY_CATCH_ERROR) {\n failed = true;\n error = value.error;\n value.error = null;\n } else {\n succeeded = true;\n }\n\n if (promise === value) {\n reject(promise, cannotReturnOwn());\n return;\n }\n } else {\n value = detail;\n succeeded = true;\n }\n\n if (promise._state !== PENDING) {\n // noop\n } else if (hasCallback && succeeded) {\n resolve(promise, value);\n } else if (failed) {\n reject(promise, error);\n } else if (settled === FULFILLED) {\n fulfill(promise, value);\n } else if (settled === REJECTED) {\n reject(promise, value);\n }\n}\n\nfunction initializePromise(promise, resolver) {\n try {\n resolver(function resolvePromise(value) {\n resolve(promise, value);\n }, function rejectPromise(reason) {\n reject(promise, reason);\n });\n } catch (e) {\n reject(promise, e);\n }\n}\n\nvar id = 0;\nfunction nextId() {\n return id++;\n}\n\nfunction makePromise(promise) {\n promise[PROMISE_ID] = id++;\n promise._state = undefined;\n promise._result = undefined;\n promise._subscribers = [];\n}\n\nexport { nextId, makePromise, getThen, noop, resolve, reject, fulfill, subscribe, publish, publishRejection, initializePromise, invokeCallback, FULFILLED, REJECTED, PENDING, handleMaybeThenable };","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isArray, isMaybeThenable } from './utils';\nimport { noop, reject, fulfill, subscribe, FULFILLED, REJECTED, PENDING, getThen, handleMaybeThenable } from './-internal';\n\nimport then from './then';\nimport Promise from './promise';\nimport originalResolve from './promise/resolve';\nimport originalThen from './then';\nimport { makePromise, PROMISE_ID } from './-internal';\n\nfunction validationError() {\n return new Error('Array Methods must be provided an Array');\n};\n\nvar Enumerator = function () {\n function Enumerator(Constructor, input) {\n this._instanceConstructor = Constructor;\n this.promise = new Constructor(noop);\n\n if (!this.promise[PROMISE_ID]) {\n makePromise(this.promise);\n }\n\n if (isArray(input)) {\n this.length = input.length;\n this._remaining = input.length;\n\n this._result = new Array(this.length);\n\n if (this.length === 0) {\n fulfill(this.promise, this._result);\n } else {\n this.length = this.length || 0;\n this._enumerate(input);\n if (this._remaining === 0) {\n fulfill(this.promise, this._result);\n }\n }\n } else {\n reject(this.promise, validationError());\n }\n }\n\n Enumerator.prototype._enumerate = function _enumerate(input) {\n for (var i = 0; this._state === PENDING && i < input.length; i++) {\n this._eachEntry(input[i], i);\n }\n };\n\n Enumerator.prototype._eachEntry = function _eachEntry(entry, i) {\n var c = this._instanceConstructor;\n var resolve = c.resolve;\n\n\n if (resolve === originalResolve) {\n var _then = getThen(entry);\n\n if (_then === originalThen && entry._state !== PENDING) {\n this._settledAt(entry._state, i, entry._result);\n } else if (typeof _then !== 'function') {\n this._remaining--;\n this._result[i] = entry;\n } else if (c === Promise) {\n var promise = new c(noop);\n handleMaybeThenable(promise, entry, _then);\n this._willSettleAt(promise, i);\n } else {\n this._willSettleAt(new c(function (resolve) {\n return resolve(entry);\n }), i);\n }\n } else {\n this._willSettleAt(resolve(entry), i);\n }\n };\n\n Enumerator.prototype._settledAt = function _settledAt(state, i, value) {\n var promise = this.promise;\n\n\n if (promise._state === PENDING) {\n this._remaining--;\n\n if (state === REJECTED) {\n reject(promise, value);\n } else {\n this._result[i] = value;\n }\n }\n\n if (this._remaining === 0) {\n fulfill(promise, this._result);\n }\n };\n\n Enumerator.prototype._willSettleAt = function _willSettleAt(promise, i) {\n var enumerator = this;\n\n subscribe(promise, undefined, function (value) {\n return enumerator._settledAt(FULFILLED, i, value);\n }, function (reason) {\n return enumerator._settledAt(REJECTED, i, reason);\n });\n };\n\n return Enumerator;\n}();\n\nexport default Enumerator;\n;","import Enumerator from '../enumerator';\n\n/**\n `Promise.all` accepts an array of promises, and returns a new promise which\n is fulfilled with an array of fulfillment values for the passed promises, or\n rejected with the reason of the first passed promise to be rejected. It casts all\n elements of the passed iterable to promises as it runs this algorithm.\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = resolve(2);\n let promise3 = resolve(3);\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // The array here would be [ 1, 2, 3 ];\n });\n ```\n\n If any of the `promises` given to `all` are rejected, the first promise\n that is rejected will be given as an argument to the returned promises's\n rejection handler. For example:\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = reject(new Error(\"2\"));\n let promise3 = reject(new Error(\"3\"));\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // Code here never runs because there are rejected promises!\n }, function(error) {\n // error.message === \"2\"\n });\n ```\n\n @method all\n @static\n @param {Array} entries array of promises\n @param {String} label optional string for labeling the promise.\n Useful for tooling.\n @return {Promise} promise that is fulfilled when all `promises` have been\n fulfilled, or rejected if any of them become rejected.\n @static\n*/\nexport default function all(entries) {\n return new Enumerator(this, entries).promise;\n}","import { isArray } from \"../utils\";\n\n/**\n `Promise.race` returns a new promise which is settled in the same way as the\n first passed promise to settle.\n\n Example:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 2');\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // result === 'promise 2' because it was resolved before promise1\n // was resolved.\n });\n ```\n\n `Promise.race` is deterministic in that only the state of the first\n settled promise matters. For example, even if other promises given to the\n `promises` array argument are resolved, but the first settled promise has\n become rejected before the other promises became fulfilled, the returned\n promise will become rejected:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n reject(new Error('promise 2'));\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // Code here never runs\n }, function(reason){\n // reason.message === 'promise 2' because promise 2 became rejected before\n // promise 1 became fulfilled\n });\n ```\n\n An example real-world use case is implementing timeouts:\n\n ```javascript\n Promise.race([ajax('foo.json'), timeout(5000)])\n ```\n\n @method race\n @static\n @param {Array} promises array of promises to observe\n Useful for tooling.\n @return {Promise} a promise which settles in the same way as the first passed\n promise to settle.\n*/\nexport default function race(entries) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (!isArray(entries)) {\n return new Constructor(function (_, reject) {\n return reject(new TypeError('You must pass an array to race.'));\n });\n } else {\n return new Constructor(function (resolve, reject) {\n var length = entries.length;\n for (var i = 0; i < length; i++) {\n Constructor.resolve(entries[i]).then(resolve, reject);\n }\n });\n }\n}","import { noop, reject as _reject } from '../-internal';\n\n/**\n `Promise.reject` returns a promise rejected with the passed `reason`.\n It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n reject(new Error('WHOOPS'));\n });\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.reject(new Error('WHOOPS'));\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n @method reject\n @static\n @param {Any} reason value that the returned promise will be rejected with.\n Useful for tooling.\n @return {Promise} a promise rejected with the given `reason`.\n*/\nexport default function reject(reason) {\n /*jshint validthis:true */\n var Constructor = this;\n var promise = new Constructor(noop);\n _reject(promise, reason);\n return promise;\n}","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isFunction } from './utils';\nimport { noop, nextId, PROMISE_ID, initializePromise } from './-internal';\nimport { asap, setAsap, setScheduler } from './asap';\n\nimport all from './promise/all';\nimport race from './promise/race';\nimport Resolve from './promise/resolve';\nimport Reject from './promise/reject';\nimport then from './then';\n\nfunction needsResolver() {\n throw new TypeError('You must pass a resolver function as the first argument to the promise constructor');\n}\n\nfunction needsNew() {\n throw new TypeError(\"Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.\");\n}\n\n/**\n Promise objects represent the eventual result of an asynchronous operation. The\n primary way of interacting with a promise is through its `then` method, which\n registers callbacks to receive either a promise's eventual value or the reason\n why the promise cannot be fulfilled.\n\n Terminology\n -----------\n\n - `promise` is an object or function with a `then` method whose behavior conforms to this specification.\n - `thenable` is an object or function that defines a `then` method.\n - `value` is any legal JavaScript value (including undefined, a thenable, or a promise).\n - `exception` is a value that is thrown using the throw statement.\n - `reason` is a value that indicates why a promise was rejected.\n - `settled` the final resting state of a promise, fulfilled or rejected.\n\n A promise can be in one of three states: pending, fulfilled, or rejected.\n\n Promises that are fulfilled have a fulfillment value and are in the fulfilled\n state. Promises that are rejected have a rejection reason and are in the\n rejected state. A fulfillment value is never a thenable.\n\n Promises can also be said to *resolve* a value. If this value is also a\n promise, then the original promise's settled state will match the value's\n settled state. So a promise that *resolves* a promise that rejects will\n itself reject, and a promise that *resolves* a promise that fulfills will\n itself fulfill.\n\n\n Basic Usage:\n ------------\n\n ```js\n let promise = new Promise(function(resolve, reject) {\n // on success\n resolve(value);\n\n // on failure\n reject(reason);\n });\n\n promise.then(function(value) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Advanced Usage:\n ---------------\n\n Promises shine when abstracting away asynchronous interactions such as\n `XMLHttpRequest`s.\n\n ```js\n function getJSON(url) {\n return new Promise(function(resolve, reject){\n let xhr = new XMLHttpRequest();\n\n xhr.open('GET', url);\n xhr.onreadystatechange = handler;\n xhr.responseType = 'json';\n xhr.setRequestHeader('Accept', 'application/json');\n xhr.send();\n\n function handler() {\n if (this.readyState === this.DONE) {\n if (this.status === 200) {\n resolve(this.response);\n } else {\n reject(new Error('getJSON: `' + url + '` failed with status: [' + this.status + ']'));\n }\n }\n };\n });\n }\n\n getJSON('/posts.json').then(function(json) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Unlike callbacks, promises are great composable primitives.\n\n ```js\n Promise.all([\n getJSON('/posts'),\n getJSON('/comments')\n ]).then(function(values){\n values[0] // => postsJSON\n values[1] // => commentsJSON\n\n return values;\n });\n ```\n\n @class Promise\n @param {Function} resolver\n Useful for tooling.\n @constructor\n*/\n\nvar Promise = function () {\n function Promise(resolver) {\n this[PROMISE_ID] = nextId();\n this._result = this._state = undefined;\n this._subscribers = [];\n\n if (noop !== resolver) {\n typeof resolver !== 'function' && needsResolver();\n this instanceof Promise ? initializePromise(this, resolver) : needsNew();\n }\n }\n\n /**\n The primary way of interacting with a promise is through its `then` method,\n which registers callbacks to receive either a promise's eventual value or the\n reason why the promise cannot be fulfilled.\n ```js\n findUser().then(function(user){\n // user is available\n }, function(reason){\n // user is unavailable, and you are given the reason why\n });\n ```\n Chaining\n --------\n The return value of `then` is itself a promise. This second, 'downstream'\n promise is resolved with the return value of the first promise's fulfillment\n or rejection handler, or rejected if the handler throws an exception.\n ```js\n findUser().then(function (user) {\n return user.name;\n }, function (reason) {\n return 'default name';\n }).then(function (userName) {\n // If `findUser` fulfilled, `userName` will be the user's name, otherwise it\n // will be `'default name'`\n });\n findUser().then(function (user) {\n throw new Error('Found user, but still unhappy');\n }, function (reason) {\n throw new Error('`findUser` rejected and we're unhappy');\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.\n // If `findUser` rejected, `reason` will be '`findUser` rejected and we're unhappy'.\n });\n ```\n If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.\n ```js\n findUser().then(function (user) {\n throw new PedagogicalException('Upstream error');\n }).then(function (value) {\n // never reached\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // The `PedgagocialException` is propagated all the way down to here\n });\n ```\n Assimilation\n ------------\n Sometimes the value you want to propagate to a downstream promise can only be\n retrieved asynchronously. This can be achieved by returning a promise in the\n fulfillment or rejection handler. The downstream promise will then be pending\n until the returned promise is settled. This is called *assimilation*.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // The user's comments are now available\n });\n ```\n If the assimliated promise rejects, then the downstream promise will also reject.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // If `findCommentsByAuthor` fulfills, we'll have the value here\n }, function (reason) {\n // If `findCommentsByAuthor` rejects, we'll have the reason here\n });\n ```\n Simple Example\n --------------\n Synchronous Example\n ```javascript\n let result;\n try {\n result = findResult();\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n findResult(function(result, err){\n if (err) {\n // failure\n } else {\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findResult().then(function(result){\n // success\n }, function(reason){\n // failure\n });\n ```\n Advanced Example\n --------------\n Synchronous Example\n ```javascript\n let author, books;\n try {\n author = findAuthor();\n books = findBooksByAuthor(author);\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n function foundBooks(books) {\n }\n function failure(reason) {\n }\n findAuthor(function(author, err){\n if (err) {\n failure(err);\n // failure\n } else {\n try {\n findBoooksByAuthor(author, function(books, err) {\n if (err) {\n failure(err);\n } else {\n try {\n foundBooks(books);\n } catch(reason) {\n failure(reason);\n }\n }\n });\n } catch(error) {\n failure(err);\n }\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findAuthor().\n then(findBooksByAuthor).\n then(function(books){\n // found books\n }).catch(function(reason){\n // something went wrong\n });\n ```\n @method then\n @param {Function} onFulfilled\n @param {Function} onRejected\n Useful for tooling.\n @return {Promise}\n */\n\n /**\n `catch` is simply sugar for `then(undefined, onRejection)` which makes it the same\n as the catch block of a try/catch statement.\n ```js\n function findAuthor(){\n throw new Error('couldn't find that author');\n }\n // synchronous\n try {\n findAuthor();\n } catch(reason) {\n // something went wrong\n }\n // async with promises\n findAuthor().catch(function(reason){\n // something went wrong\n });\n ```\n @method catch\n @param {Function} onRejection\n Useful for tooling.\n @return {Promise}\n */\n\n\n Promise.prototype.catch = function _catch(onRejection) {\n return this.then(null, onRejection);\n };\n\n /**\n `finally` will be invoked regardless of the promise's fate just as native\n try/catch/finally behaves\n \n Synchronous example:\n \n ```js\n findAuthor() {\n if (Math.random() > 0.5) {\n throw new Error();\n }\n return new Author();\n }\n \n try {\n return findAuthor(); // succeed or fail\n } catch(error) {\n return findOtherAuther();\n } finally {\n // always runs\n // doesn't affect the return value\n }\n ```\n \n Asynchronous example:\n \n ```js\n findAuthor().catch(function(reason){\n return findOtherAuther();\n }).finally(function(){\n // author was either found, or not\n });\n ```\n \n @method finally\n @param {Function} callback\n @return {Promise}\n */\n\n\n Promise.prototype.finally = function _finally(callback) {\n var promise = this;\n var constructor = promise.constructor;\n\n return promise.then(function (value) {\n return constructor.resolve(callback()).then(function () {\n return value;\n });\n }, function (reason) {\n return constructor.resolve(callback()).then(function () {\n throw reason;\n });\n });\n };\n\n return Promise;\n}();\n\nPromise.prototype.then = then;\nexport default Promise;\nPromise.all = all;\nPromise.race = race;\nPromise.resolve = Resolve;\nPromise.reject = Reject;\nPromise._setScheduler = setScheduler;\nPromise._setAsap = setAsap;\nPromise._asap = asap;","/*global self*/\nimport Promise from './promise';\n\nexport default function polyfill() {\n var local = void 0;\n\n if (typeof global !== 'undefined') {\n local = global;\n } else if (typeof self !== 'undefined') {\n local = self;\n } else {\n try {\n local = Function('return this')();\n } catch (e) {\n throw new Error('polyfill failed because global object is unavailable in this environment');\n }\n }\n\n var P = local.Promise;\n\n if (P) {\n var promiseToString = null;\n try {\n promiseToString = Object.prototype.toString.call(P.resolve());\n } catch (e) {\n // silently ignored\n }\n\n if (promiseToString === '[object Promise]' && !P.cast) {\n return;\n }\n }\n\n local.Promise = Promise;\n}","import Promise from './es6-promise/promise';\nimport polyfill from './es6-promise/polyfill';\n\n// Strange compat..\nPromise.polyfill = polyfill;\nPromise.Promise = Promise;\nexport default Promise;","import Promise from './es6-promise';\nPromise.polyfill();\nexport default Promise;"],"names":["resolve","_resolve","then","originalThen","originalResolve","Promise","reject","_reject","Resolve","Reject"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACNO,SAAS,gBAAgB,CAAC,CAAC,EAAE;EAClC,IAAI,IAAI,GAAG,OAAO,CAAC,CAAC;EACpB,OAAO,CAAC,KAAK,IAAI,KAAK,IAAI,KAAK,QAAQ,IAAI,IAAI,KAAK,UAAU,CAAC,CAAC;CACjE;;AAED,AAAO,SAAS,UAAU,CAAC,CAAC,EAAE;EAC5B,OAAO,OAAO,CAAC,KAAK,UAAU,CAAC;CAChC;;AAED,AAEC;;AAED,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC;AACtB,IAAI,KAAK,CAAC,OAAO,EAAE;EACjB,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC;CAC1B,MAAM;EACL,QAAQ,GAAG,UAAU,CAAC,EAAE;IACtB,OAAO,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,gBAAgB,CAAC;GAC/D,CAAC;CACH;;AAED,AAAO,IAAI,OAAO,GAAG,QAAQ;;ACtB7B,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAI,SAAS,GAAG,KAAK,CAAC,CAAC;AACvB,IAAI,iBAAiB,GAAG,KAAK,CAAC,CAAC;;AAE/B,AAAO,IAAI,IAAI,GAAG,SAAS,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;EAC7C,KAAK,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC;EACtB,KAAK,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;EACrB,GAAG,IAAI,CAAC,CAAC;EACT,IAAI,GAAG,KAAK,CAAC,EAAE;;;;IAIb,IAAI,iBAAiB,EAAE;MACrB,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAC1B,MAAM;MACL,aAAa,EAAE,CAAC;KACjB;GACF;CACF,CAAC;;AAEF,AAAO,SAAS,YAAY,CAAC,UAAU,EAAE;EACvC,iBAAiB,GAAG,UAAU,CAAC;CAChC;;AAED,AAAO,SAAS,OAAO,CAAC,MAAM,EAAE;EAC9B,IAAI,GAAG,MAAM,CAAC;CACf;;AAED,IAAI,aAAa,GAAG,OAAO,MAAM,KAAK,WAAW,GAAG,MAAM,GAAG,SAAS,CAAC;AACvE,IAAI,aAAa,GAAG,aAAa,IAAI,EAAE,CAAC;AACxC,IAAI,uBAAuB,GAAG,aAAa,CAAC,gBAAgB,IAAI,aAAa,CAAC,sBAAsB,CAAC;AACrG,IAAI,MAAM,GAAG,OAAO,IAAI,KAAK,WAAW,IAAI,OAAO,OAAO,KAAK,WAAW,IAAI,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,kBAAkB,CAAC;;;AAG/H,IAAI,QAAQ,GAAG,OAAO,iBAAiB,KAAK,WAAW,IAAI,OAAO,aAAa,KAAK,WAAW,IAAI,OAAO,cAAc,KAAK,WAAW,CAAC;;;AAGzI,SAAS,WAAW,GAAG;;;EAGrB,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;GAChC,CAAC;CACH;;;AAGD,SAAS,aAAa,GAAG;EACvB,IAAI,OAAO,SAAS,KAAK,WAAW,EAAE;IACpC,OAAO,YAAY;MACjB,SAAS,CAAC,KAAK,CAAC,CAAC;KAClB,CAAC;GACH;;EAED,OAAO,aAAa,EAAE,CAAC;CACxB;;AAED,SAAS,mBAAmB,GAAG;EAC7B,IAAI,UAAU,GAAG,CAAC,CAAC;EACnB,IAAI,QAAQ,GAAG,IAAI,uBAAuB,CAAC,KAAK,CAAC,CAAC;EAClD,IAAI,IAAI,GAAG,QAAQ,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC;EACvC,QAAQ,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;;EAEhD,OAAO,YAAY;IACjB,IAAI,CAAC,IAAI,GAAG,UAAU,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC;GAC3C,CAAC;CACH;;;AAGD,SAAS,iBAAiB,GAAG;EAC3B,IAAI,OAAO,GAAG,IAAI,cAAc,EAAE,CAAC;EACnC,OAAO,CAAC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC;EAChC,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;GACrC,CAAC;CACH;;AAED,SAAS,aAAa,GAAG;;;EAGvB,IAAI,gBAAgB,GAAG,UAAU,CAAC;EAClC,OAAO,YAAY;IACjB,OAAO,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;GACnC,CAAC;CACH;;AAED,IAAI,KAAK,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC;AAC5B,SAAS,KAAK,GAAG;EACf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE;IAC/B,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,GAAG,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;;IAEvB,QAAQ,CAAC,GAAG,CAAC,CAAC;;IAEd,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;IACrB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;GAC1B;;EAED,GAAG,GAAG,CAAC,CAAC;CACT;;AAED,SAAS,YAAY,GAAG;EACtB,IAAI;IACF,IAAI,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;IACvD,SAAS,GAAG,KAAK,CAAC,SAAS,IAAI,KAAK,CAAC,YAAY,CAAC;IAClD,OAAO,aAAa,EAAE,CAAC;GACxB,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,aAAa,EAAE,CAAC;GACxB;CACF;;AAED,IAAI,aAAa,GAAG,KAAK,CAAC,CAAC;;AAE3B,IAAI,MAAM,EAAE;EACV,aAAa,GAAG,WAAW,EAAE,CAAC;CAC/B,MAAM,IAAI,uBAAuB,EAAE;EAClC,aAAa,GAAG,mBAAmB,EAAE,CAAC;CACvC,MAAM,IAAI,QAAQ,EAAE;EACnB,aAAa,GAAG,iBAAiB,EAAE,CAAC;CACrC,MAAM,IAAI,aAAa,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;EACvE,aAAa,GAAG,YAAY,EAAE,CAAC;CAChC,MAAM;EACL,aAAa,GAAG,aAAa,EAAE,CAAC;;;CACjC,DCtHc,SAAS,IAAI,CAAC,aAAa,EAAE,WAAW,EAAE;EACvD,IAAI,MAAM,GAAG,IAAI,CAAC;;EAElB,IAAI,KAAK,GAAG,IAAI,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;;EAEvC,IAAI,KAAK,CAAC,UAAU,CAAC,KAAK,SAAS,EAAE;IACnC,WAAW,CAAC,KAAK,CAAC,CAAC;GACpB;;EAED,IAAI,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;;;EAG3B,IAAI,MAAM,EAAE;IACV,IAAI,QAAQ,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IACrC,IAAI,CAAC,YAAY;MACf,OAAO,cAAc,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;KAChE,CAAC,CAAC;GACJ,MAAM;IACL,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,CAAC,CAAC;GACtD;;EAED,OAAO,KAAK,CAAC;;;CACd,DCxBD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+BA,AAAe,SAASA,SAAO,CAAC,MAAM,EAAE;;EAEtC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,CAAC,WAAW,KAAK,WAAW,EAAE;IAC9E,OAAO,MAAM,CAAC;GACf;;EAED,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,OAAQ,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EAC1B,OAAO,OAAO,CAAC;;;CAChB,DCrCM,IAAI,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;;AAEhE,SAAS,IAAI,GAAG,EAAE;;AAElB,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC;AACrB,IAAI,SAAS,GAAG,CAAC,CAAC;AAClB,IAAI,QAAQ,GAAG,CAAC,CAAC;;AAEjB,IAAI,eAAe,GAAG,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC;;AAEtC,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,0CAA0C,CAAC,CAAC;CAClE;;AAED,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,sDAAsD,CAAC,CAAC;CAC9E;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE;EACxB,IAAI;IACF,OAAO,OAAO,CAAC,IAAI,CAAC;GACrB,CAAC,OAAO,KAAK,EAAE;IACd,eAAe,CAAC,KAAK,GAAG,KAAK,CAAC;IAC9B,OAAO,eAAe,CAAC;GACxB;CACF;;AAED,SAAS,OAAO,CAACC,OAAI,EAAE,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,EAAE;EAClE,IAAI;IACFA,OAAI,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;GACxD,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,CAAC;GACV;CACF;;AAED,SAAS,qBAAqB,CAAC,OAAO,EAAE,QAAQ,EAAEA,OAAI,EAAE;EACtD,IAAI,CAAC,UAAU,OAAO,EAAE;IACtB,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,IAAI,KAAK,GAAG,OAAO,CAACA,OAAI,EAAE,QAAQ,EAAE,UAAU,KAAK,EAAE;MACnD,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;MACd,IAAI,QAAQ,KAAK,KAAK,EAAE;QACtB,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB,MAAM;QACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB;KACF,EAAE,UAAU,MAAM,EAAE;MACnB,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;;MAEd,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,EAAE,UAAU,IAAI,OAAO,CAAC,MAAM,IAAI,kBAAkB,CAAC,CAAC,CAAC;;IAExD,IAAI,CAAC,MAAM,IAAI,KAAK,EAAE;MACpB,MAAM,GAAG,IAAI,CAAC;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACxB;GACF,EAAE,OAAO,CAAC,CAAC;CACb;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI,QAAQ,CAAC,MAAM,KAAK,SAAS,EAAE;IACjC,OAAO,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACpC,MAAM,IAAI,QAAQ,CAAC,MAAM,KAAK,QAAQ,EAAE;IACvC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACnC,MAAM;IACL,SAAS,CAAC,QAAQ,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC9C,OAAO,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KAChC,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KAChC,CAAC,CAAC;GACJ;CACF;;AAED,SAAS,mBAAmB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,EAAE;EACzD,IAAI,aAAa,CAAC,WAAW,KAAK,OAAO,CAAC,WAAW,IAAIA,OAAI,KAAKC,IAAY,IAAI,aAAa,CAAC,WAAW,CAAC,OAAO,KAAKC,SAAe,EAAE;IACvI,iBAAiB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;GAC3C,MAAM;IACL,IAAIF,OAAI,KAAK,eAAe,EAAE;MAC5B,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,KAAK,CAAC,CAAC;MACvC,eAAe,CAAC,KAAK,GAAG,IAAI,CAAC;KAC9B,MAAM,IAAIA,OAAI,KAAK,SAAS,EAAE;MAC7B,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC,MAAM,IAAI,UAAU,CAACA,OAAI,CAAC,EAAE;MAC3B,qBAAqB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,CAAC,CAAC;KACrD,MAAM;MACL,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC;GACF;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,KAAK,KAAK,EAAE;IACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;GACpC,MAAM,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;IAClC,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC;GACrD,MAAM;IACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB;CACF;;AAED,SAAS,gBAAgB,CAAC,OAAO,EAAE;EACjC,IAAI,OAAO,CAAC,QAAQ,EAAE;IACpB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;GACnC;;EAED,OAAO,CAAC,OAAO,CAAC,CAAC;CAClB;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;;EAED,OAAO,CAAC,OAAO,GAAG,KAAK,CAAC;EACxB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;;EAE3B,IAAI,OAAO,CAAC,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;IACrC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;EACD,OAAO,CAAC,MAAM,GAAG,QAAQ,CAAC;EAC1B,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC;;EAEzB,IAAI,CAAC,gBAAgB,EAAE,OAAO,CAAC,CAAC;CACjC;;AAED,SAAS,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,EAAE;EAC5D,IAAI,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;EACvC,IAAI,MAAM,GAAG,YAAY,CAAC,MAAM,CAAC;;;EAGjC,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC;;EAEvB,YAAY,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC;EAC7B,YAAY,CAAC,MAAM,GAAG,SAAS,CAAC,GAAG,aAAa,CAAC;EACjD,YAAY,CAAC,MAAM,GAAG,QAAQ,CAAC,GAAG,WAAW,CAAC;;EAE9C,IAAI,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,MAAM,EAAE;IACjC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;GACvB;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE;EACxB,IAAI,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;EACvC,IAAI,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC;;EAE7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;IAC5B,OAAO;GACR;;EAED,IAAI,KAAK,GAAG,KAAK,CAAC;MACd,QAAQ,GAAG,KAAK,CAAC;MACjB,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;;EAE7B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,IAAI,CAAC,EAAE;IAC9C,KAAK,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;IACvB,QAAQ,GAAG,WAAW,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC;;IAEpC,IAAI,KAAK,EAAE;MACT,cAAc,CAAC,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;KAClD,MAAM;MACL,QAAQ,CAAC,MAAM,CAAC,CAAC;KAClB;GACF;;EAED,OAAO,CAAC,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC;CACjC;;AAED,SAAS,QAAQ,CAAC,QAAQ,EAAE,MAAM,EAAE;EAClC,IAAI;IACF,OAAO,QAAQ,CAAC,MAAM,CAAC,CAAC;GACzB,CAAC,OAAO,CAAC,EAAE;IACV,eAAe,CAAC,KAAK,GAAG,CAAC,CAAC;IAC1B,OAAO,eAAe,CAAC;GACxB;CACF;;AAED,SAAS,cAAc,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE;EAC1D,IAAI,WAAW,GAAG,UAAU,CAAC,QAAQ,CAAC;MAClC,KAAK,GAAG,KAAK,CAAC;MACd,KAAK,GAAG,KAAK,CAAC;MACd,SAAS,GAAG,KAAK,CAAC;MAClB,MAAM,GAAG,KAAK,CAAC,CAAC;;EAEpB,IAAI,WAAW,EAAE;IACf,KAAK,GAAG,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;;IAEnC,IAAI,KAAK,KAAK,eAAe,EAAE;MAC7B,MAAM,GAAG,IAAI,CAAC;MACd,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC;MACpB,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC;KACpB,MAAM;MACL,SAAS,GAAG,IAAI,CAAC;KAClB;;IAED,IAAI,OAAO,KAAK,KAAK,EAAE;MACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;MACnC,OAAO;KACR;GACF,MAAM;IACL,KAAK,GAAG,MAAM,CAAC;IACf,SAAS,GAAG,IAAI,CAAC;GAClB;;EAED,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;;GAE/B,MAAM,IAAI,WAAW,IAAI,SAAS,EAAE;IACnC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,MAAM,EAAE;IACjB,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB,MAAM,IAAI,OAAO,KAAK,SAAS,EAAE;IAChC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,OAAO,KAAK,QAAQ,EAAE;IAC/B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI;IACF,QAAQ,CAAC,SAAS,cAAc,CAAC,KAAK,EAAE;MACtC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACzB,EAAE,SAAS,aAAa,CAAC,MAAM,EAAE;MAChC,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,CAAC,CAAC;GACJ,CAAC,OAAO,CAAC,EAAE;IACV,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;GACpB;CACF;;AAED,IAAI,EAAE,GAAG,CAAC,CAAC;AACX,SAAS,MAAM,GAAG;EAChB,OAAO,EAAE,EAAE,CAAC;CACb;;AAED,SAAS,WAAW,CAAC,OAAO,EAAE;EAC5B,OAAO,CAAC,UAAU,CAAC,GAAG,EAAE,EAAE,CAAC;EAC3B,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;EAC3B,OAAO,CAAC,OAAO,GAAG,SAAS,CAAC;EAC5B,OAAO,CAAC,YAAY,GAAG,EAAE,CAAC;CAC3B;;ACrPD,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;CAC7D,AAAC;;AAEF,IAAI,UAAU,GAAG,YAAY;EAC3B,SAAS,UAAU,CAAC,WAAW,EAAE,KAAK,EAAE;IACtC,IAAI,CAAC,oBAAoB,GAAG,WAAW,CAAC;IACxC,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;;IAErC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;MAC7B,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;KAC3B;;IAED,IAAI,OAAO,CAAC,KAAK,CAAC,EAAE;MAClB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;MAC3B,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,MAAM,CAAC;;MAE/B,IAAI,CAAC,OAAO,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;;MAEtC,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACrB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;OACrC,MAAM;QACL,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QACvB,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;UACzB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;SACrC;OACF;KACF,MAAM;MACL,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;KACzC;GACF;;EAED,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE;IAC3D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,KAAK,OAAO,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;MAChE,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC9B;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE;IAC9D,IAAI,CAAC,GAAG,IAAI,CAAC,oBAAoB,CAAC;IAClC,IAAIF,UAAO,GAAG,CAAC,CAAC,OAAO,CAAC;;;IAGxB,IAAIA,UAAO,KAAKI,SAAe,EAAE;MAC/B,IAAI,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC;;MAE3B,IAAI,KAAK,KAAKD,IAAY,IAAI,KAAK,CAAC,MAAM,KAAK,OAAO,EAAE;QACtD,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;OACjD,MAAM,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;QACtC,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB,MAAM,IAAI,CAAC,KAAKE,SAAO,EAAE;QACxB,IAAI,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC;QAC1B,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAC3C,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;OAChC,MAAM;QACL,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,UAAUL,UAAO,EAAE;UAC1C,OAAOA,UAAO,CAAC,KAAK,CAAC,CAAC;SACvB,CAAC,EAAE,CAAC,CAAC,CAAC;OACR;KACF,MAAM;MACL,IAAI,CAAC,aAAa,CAACA,UAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;KACvC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE,KAAK,EAAE;IACrE,IAAI,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;;;IAG3B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;MAC9B,IAAI,CAAC,UAAU,EAAE,CAAC;;MAElB,IAAI,KAAK,KAAK,QAAQ,EAAE;QACtB,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACxB,MAAM;QACL,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB;KACF;;IAED,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;MACzB,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;KAChC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,aAAa,GAAG,SAAS,aAAa,CAAC,OAAO,EAAE,CAAC,EAAE;IACtE,IAAI,UAAU,GAAG,IAAI,CAAC;;IAEtB,SAAS,CAAC,OAAO,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC7C,OAAO,UAAU,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC;KACnD,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,UAAU,CAAC,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;KACnD,CAAC,CAAC;GACJ,CAAC;;EAEF,OAAO,UAAU,CAAC;CACnB,EAAE;;ACzGH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+CA,AAAe,SAAS,GAAG,CAAC,OAAO,EAAE;EACnC,OAAO,IAAI,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,OAAO,CAAC;;;CAC9C,DCjDD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiEA,AAAe,SAAS,IAAI,CAAC,OAAO,EAAE;;EAEpC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;IACrB,OAAO,IAAI,WAAW,CAAC,UAAU,CAAC,EAAE,MAAM,EAAE;MAC1C,OAAO,MAAM,CAAC,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAC,CAAC;KACjE,CAAC,CAAC;GACJ,MAAM;IACL,OAAO,IAAI,WAAW,CAAC,UAAU,OAAO,EAAE,MAAM,EAAE;MAChD,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;MAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;QAC/B,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;OACvD;KACF,CAAC,CAAC;GACJ;;;CACF,DCjFD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCA,AAAe,SAASM,QAAM,CAAC,MAAM,EAAE;;EAErC,IAAI,WAAW,GAAG,IAAI,CAAC;EACvB,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,MAAO,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EACzB,OAAO,OAAO,CAAC;;;CAChB,DC9BD,SAAS,aAAa,GAAG;EACvB,MAAM,IAAI,SAAS,CAAC,oFAAoF,CAAC,CAAC;CAC3G;;AAED,SAAS,QAAQ,GAAG;EAClB,MAAM,IAAI,SAAS,CAAC,uHAAuH,CAAC,CAAC;CAC9I;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0GD,IAAIF,SAAO,GAAG,YAAY;EACxB,SAAS,OAAO,CAAC,QAAQ,EAAE;IACzB,IAAI,CAAC,UAAU,CAAC,GAAG,MAAM,EAAE,CAAC;IAC5B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;IACvC,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;;IAEvB,IAAI,IAAI,KAAK,QAAQ,EAAE;MACrB,OAAO,QAAQ,KAAK,UAAU,IAAI,aAAa,EAAE,CAAC;MAClD,IAAI,YAAY,OAAO,GAAG,iBAAiB,CAAC,IAAI,EAAE,QAAQ,CAAC,GAAG,QAAQ,EAAE,CAAC;KAC1E;GACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA4LD,OAAO,CAAC,SAAS,CAAC,KAAK,GAAG,SAAS,MAAM,CAAC,WAAW,EAAE;IACrD,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;GACrC,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA0CF,OAAO,CAAC,SAAS,CAAC,OAAO,GAAG,SAAS,QAAQ,CAAC,QAAQ,EAAE;IACtD,IAAI,OAAO,GAAG,IAAI,CAAC;IACnB,IAAI,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;;IAEtC,OAAO,OAAO,CAAC,IAAI,CAAC,UAAU,KAAK,EAAE;MACnC,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;QACtD,OAAO,KAAK,CAAC;OACd,CAAC,CAAC;KACJ,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;QACtD,MAAM,MAAM,CAAC;OACd,CAAC,CAAC;KACJ,CAAC,CAAC;GACJ,CAAC;;EAEF,OAAO,OAAO,CAAC;CAChB,EAAE,CAAC;;AAEJA,SAAO,CAAC,SAAS,CAAC,IAAI,GAAG,IAAI,CAAC;AAC9B,AACAA,SAAO,CAAC,GAAG,GAAG,GAAG,CAAC;AAClBA,SAAO,CAAC,IAAI,GAAG,IAAI,CAAC;AACpBA,SAAO,CAAC,OAAO,GAAGG,SAAO,CAAC;AAC1BH,SAAO,CAAC,MAAM,GAAGI,QAAM,CAAC;AACxBJ,SAAO,CAAC,aAAa,GAAG,YAAY,CAAC;AACrCA,SAAO,CAAC,QAAQ,GAAG,OAAO,CAAC;AAC3BA,SAAO,CAAC,KAAK,GAAG,IAAI;;ACxYpB;AACA,AAEe,SAAS,QAAQ,GAAG;EACjC,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;;EAEnB,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;IACjC,KAAK,GAAG,MAAM,CAAC;GAChB,MAAM,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;IACtC,KAAK,GAAG,IAAI,CAAC;GACd,MAAM;IACL,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC;KACnC,CAAC,OAAO,CAAC,EAAE;MACV,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;KAC7F;GACF;;EAED,IAAI,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC;;EAEtB,IAAI,CAAC,EAAE;IACL,IAAI,eAAe,GAAG,IAAI,CAAC;IAC3B,IAAI;MACF,eAAe,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KAC/D,CAAC,OAAO,CAAC,EAAE;;KAEX;;IAED,IAAI,eAAe,KAAK,kBAAkB,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE;MACrD,OAAO;KACR;GACF;;EAED,KAAK,CAAC,OAAO,GAAGA,SAAO,CAAC;;;CACzB,DC/BD;AACAA,SAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC;AAC5BA,SAAO,CAAC,OAAO,GAAGA,SAAO,CAAC;;ACJ1BA,SAAO,CAAC,QAAQ,EAAE,CAAC;;;;;;;;","file":"es6-promise.auto.js"} \ No newline at end of file +{"version":3,"sources":["config/versionTemplate.txt","lib/es6-promise/utils.js","lib/es6-promise/asap.js","lib/es6-promise/then.js","lib/es6-promise/promise/resolve.js","lib/es6-promise/-internal.js","lib/es6-promise/enumerator.js","lib/es6-promise/promise/all.js","lib/es6-promise/promise/race.js","lib/es6-promise/promise/reject.js","lib/es6-promise/promise.js","lib/es6-promise/polyfill.js","lib/es6-promise.js","lib/es6-promise.auto.js"],"sourcesContent":["/*!\n * @overview es6-promise - a tiny implementation of Promises/A+.\n * @copyright Copyright (c) 2014 Yehuda Katz, Tom Dale, Stefan Penner and contributors (Conversion to ES6 API by Jake Archibald)\n * @license Licensed under MIT license\n * See https://raw.githubusercontent.com/stefanpenner/es6-promise/master/LICENSE\n * @version v4.2.8+1e68dce6\n */\n","export function objectOrFunction(x) {\n var type = typeof x;\n return x !== null && (type === 'object' || type === 'function');\n}\n\nexport function isFunction(x) {\n return typeof x === 'function';\n}\n\nexport function isMaybeThenable(x) {\n return x !== null && typeof x === 'object';\n}\n\nvar _isArray = void 0;\nif (Array.isArray) {\n _isArray = Array.isArray;\n} else {\n _isArray = function (x) {\n return Object.prototype.toString.call(x) === '[object Array]';\n };\n}\n\nexport var isArray = _isArray;","var len = 0;\nvar vertxNext = void 0;\nvar customSchedulerFn = void 0;\n\nexport var asap = function asap(callback, arg) {\n queue[len] = callback;\n queue[len + 1] = arg;\n len += 2;\n if (len === 2) {\n // If len is 2, that means that we need to schedule an async flush.\n // If additional callbacks are queued before the queue is flushed, they\n // will be processed by this flush that we are scheduling.\n if (customSchedulerFn) {\n customSchedulerFn(flush);\n } else {\n scheduleFlush();\n }\n }\n};\n\nexport function setScheduler(scheduleFn) {\n customSchedulerFn = scheduleFn;\n}\n\nexport function setAsap(asapFn) {\n asap = asapFn;\n}\n\nvar browserWindow = typeof window !== 'undefined' ? window : undefined;\nvar browserGlobal = browserWindow || {};\nvar BrowserMutationObserver = browserGlobal.MutationObserver || browserGlobal.WebKitMutationObserver;\nvar isNode = typeof self === 'undefined' && typeof process !== 'undefined' && {}.toString.call(process) === '[object process]';\n\n// test for web worker but not in IE10\nvar isWorker = typeof Uint8ClampedArray !== 'undefined' && typeof importScripts !== 'undefined' && typeof MessageChannel !== 'undefined';\n\n// node\nfunction useNextTick() {\n // node version 0.10.x displays a deprecation warning when nextTick is used recursively\n // see https://github.com/cujojs/when/issues/410 for details\n return function () {\n return process.nextTick(flush);\n };\n}\n\n// vertx\nfunction useVertxTimer() {\n if (typeof vertxNext !== 'undefined') {\n return function () {\n vertxNext(flush);\n };\n }\n\n return useSetTimeout();\n}\n\nfunction useMutationObserver() {\n var iterations = 0;\n var observer = new BrowserMutationObserver(flush);\n var node = document.createTextNode('');\n observer.observe(node, { characterData: true });\n\n return function () {\n node.data = iterations = ++iterations % 2;\n };\n}\n\n// web worker\nfunction useMessageChannel() {\n var channel = new MessageChannel();\n channel.port1.onmessage = flush;\n return function () {\n return channel.port2.postMessage(0);\n };\n}\n\nfunction useSetTimeout() {\n // Store setTimeout reference so es6-promise will be unaffected by\n // other code modifying setTimeout (like sinon.useFakeTimers())\n var globalSetTimeout = setTimeout;\n return function () {\n return globalSetTimeout(flush, 1);\n };\n}\n\nvar queue = new Array(1000);\nfunction flush() {\n for (var i = 0; i < len; i += 2) {\n var callback = queue[i];\n var arg = queue[i + 1];\n\n callback(arg);\n\n queue[i] = undefined;\n queue[i + 1] = undefined;\n }\n\n len = 0;\n}\n\nfunction attemptVertx() {\n try {\n var vertx = Function('return this')().require('vertx');\n vertxNext = vertx.runOnLoop || vertx.runOnContext;\n return useVertxTimer();\n } catch (e) {\n return useSetTimeout();\n }\n}\n\nvar scheduleFlush = void 0;\n// Decide what async method to use to triggering processing of queued callbacks:\nif (isNode) {\n scheduleFlush = useNextTick();\n} else if (BrowserMutationObserver) {\n scheduleFlush = useMutationObserver();\n} else if (isWorker) {\n scheduleFlush = useMessageChannel();\n} else if (browserWindow === undefined && typeof require === 'function') {\n scheduleFlush = attemptVertx();\n} else {\n scheduleFlush = useSetTimeout();\n}","import { invokeCallback, subscribe, FULFILLED, REJECTED, noop, makePromise, PROMISE_ID } from './-internal';\n\nimport { asap } from './asap';\n\nexport default function then(onFulfillment, onRejection) {\n var parent = this;\n\n var child = new this.constructor(noop);\n\n if (child[PROMISE_ID] === undefined) {\n makePromise(child);\n }\n\n var _state = parent._state;\n\n\n if (_state) {\n var callback = arguments[_state - 1];\n asap(function () {\n return invokeCallback(_state, child, callback, parent._result);\n });\n } else {\n subscribe(parent, child, onFulfillment, onRejection);\n }\n\n return child;\n}","import { noop, resolve as _resolve } from '../-internal';\n\n/**\n `Promise.resolve` returns a promise that will become resolved with the\n passed `value`. It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n resolve(1);\n });\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.resolve(1);\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n @method resolve\n @static\n @param {Any} value value that the returned promise will be resolved with\n Useful for tooling.\n @return {Promise} a promise that will become fulfilled with the given\n `value`\n*/\nexport default function resolve(object) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (object && typeof object === 'object' && object.constructor === Constructor) {\n return object;\n }\n\n var promise = new Constructor(noop);\n _resolve(promise, object);\n return promise;\n}","import { objectOrFunction, isFunction } from './utils';\n\nimport { asap } from './asap';\n\nimport originalThen from './then';\nimport originalResolve from './promise/resolve';\n\nexport var PROMISE_ID = Math.random().toString(36).substring(2);\n\nfunction noop() {}\n\nvar PENDING = void 0;\nvar FULFILLED = 1;\nvar REJECTED = 2;\n\nfunction selfFulfillment() {\n return new TypeError(\"You cannot resolve a promise with itself\");\n}\n\nfunction cannotReturnOwn() {\n return new TypeError('A promises callback cannot return that same promise.');\n}\n\nfunction tryThen(then, value, fulfillmentHandler, rejectionHandler) {\n try {\n then.call(value, fulfillmentHandler, rejectionHandler);\n } catch (e) {\n return e;\n }\n}\n\nfunction handleForeignThenable(promise, thenable, then) {\n asap(function (promise) {\n var sealed = false;\n var error = tryThen(then, thenable, function (value) {\n if (sealed) {\n return;\n }\n sealed = true;\n if (thenable !== value) {\n resolve(promise, value);\n } else {\n fulfill(promise, value);\n }\n }, function (reason) {\n if (sealed) {\n return;\n }\n sealed = true;\n\n reject(promise, reason);\n }, 'Settle: ' + (promise._label || ' unknown promise'));\n\n if (!sealed && error) {\n sealed = true;\n reject(promise, error);\n }\n }, promise);\n}\n\nfunction handleOwnThenable(promise, thenable) {\n if (thenable._state === FULFILLED) {\n fulfill(promise, thenable._result);\n } else if (thenable._state === REJECTED) {\n reject(promise, thenable._result);\n } else {\n subscribe(thenable, undefined, function (value) {\n return resolve(promise, value);\n }, function (reason) {\n return reject(promise, reason);\n });\n }\n}\n\nfunction handleMaybeThenable(promise, maybeThenable, then) {\n if (maybeThenable.constructor === promise.constructor && then === originalThen && maybeThenable.constructor.resolve === originalResolve) {\n handleOwnThenable(promise, maybeThenable);\n } else {\n if (then === undefined) {\n fulfill(promise, maybeThenable);\n } else if (isFunction(then)) {\n handleForeignThenable(promise, maybeThenable, then);\n } else {\n fulfill(promise, maybeThenable);\n }\n }\n}\n\nfunction resolve(promise, value) {\n if (promise === value) {\n reject(promise, selfFulfillment());\n } else if (objectOrFunction(value)) {\n var then = void 0;\n try {\n then = value.then;\n } catch (error) {\n reject(promise, error);\n return;\n }\n handleMaybeThenable(promise, value, then);\n } else {\n fulfill(promise, value);\n }\n}\n\nfunction publishRejection(promise) {\n if (promise._onerror) {\n promise._onerror(promise._result);\n }\n\n publish(promise);\n}\n\nfunction fulfill(promise, value) {\n if (promise._state !== PENDING) {\n return;\n }\n\n promise._result = value;\n promise._state = FULFILLED;\n\n if (promise._subscribers.length !== 0) {\n asap(publish, promise);\n }\n}\n\nfunction reject(promise, reason) {\n if (promise._state !== PENDING) {\n return;\n }\n promise._state = REJECTED;\n promise._result = reason;\n\n asap(publishRejection, promise);\n}\n\nfunction subscribe(parent, child, onFulfillment, onRejection) {\n var _subscribers = parent._subscribers;\n var length = _subscribers.length;\n\n\n parent._onerror = null;\n\n _subscribers[length] = child;\n _subscribers[length + FULFILLED] = onFulfillment;\n _subscribers[length + REJECTED] = onRejection;\n\n if (length === 0 && parent._state) {\n asap(publish, parent);\n }\n}\n\nfunction publish(promise) {\n var subscribers = promise._subscribers;\n var settled = promise._state;\n\n if (subscribers.length === 0) {\n return;\n }\n\n var child = void 0,\n callback = void 0,\n detail = promise._result;\n\n for (var i = 0; i < subscribers.length; i += 3) {\n child = subscribers[i];\n callback = subscribers[i + settled];\n\n if (child) {\n invokeCallback(settled, child, callback, detail);\n } else {\n callback(detail);\n }\n }\n\n promise._subscribers.length = 0;\n}\n\nfunction invokeCallback(settled, promise, callback, detail) {\n var hasCallback = isFunction(callback),\n value = void 0,\n error = void 0,\n succeeded = true;\n\n if (hasCallback) {\n try {\n value = callback(detail);\n } catch (e) {\n succeeded = false;\n error = e;\n }\n\n if (promise === value) {\n reject(promise, cannotReturnOwn());\n return;\n }\n } else {\n value = detail;\n }\n\n if (promise._state !== PENDING) {\n // noop\n } else if (hasCallback && succeeded) {\n resolve(promise, value);\n } else if (succeeded === false) {\n reject(promise, error);\n } else if (settled === FULFILLED) {\n fulfill(promise, value);\n } else if (settled === REJECTED) {\n reject(promise, value);\n }\n}\n\nfunction initializePromise(promise, resolver) {\n try {\n resolver(function resolvePromise(value) {\n resolve(promise, value);\n }, function rejectPromise(reason) {\n reject(promise, reason);\n });\n } catch (e) {\n reject(promise, e);\n }\n}\n\nvar id = 0;\nfunction nextId() {\n return id++;\n}\n\nfunction makePromise(promise) {\n promise[PROMISE_ID] = id++;\n promise._state = undefined;\n promise._result = undefined;\n promise._subscribers = [];\n}\n\nexport { nextId, makePromise, noop, resolve, reject, fulfill, subscribe, publish, publishRejection, initializePromise, invokeCallback, FULFILLED, REJECTED, PENDING, handleMaybeThenable };","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isArray, isMaybeThenable } from './utils';\nimport { noop, reject, fulfill, subscribe, FULFILLED, REJECTED, PENDING, handleMaybeThenable } from './-internal';\n\nimport then from './then';\nimport Promise from './promise';\nimport originalResolve from './promise/resolve';\nimport originalThen from './then';\nimport { makePromise, PROMISE_ID } from './-internal';\n\nfunction validationError() {\n return new Error('Array Methods must be provided an Array');\n};\n\nvar Enumerator = function () {\n function Enumerator(Constructor, input) {\n this._instanceConstructor = Constructor;\n this.promise = new Constructor(noop);\n\n if (!this.promise[PROMISE_ID]) {\n makePromise(this.promise);\n }\n\n if (isArray(input)) {\n this.length = input.length;\n this._remaining = input.length;\n\n this._result = new Array(this.length);\n\n if (this.length === 0) {\n fulfill(this.promise, this._result);\n } else {\n this.length = this.length || 0;\n this._enumerate(input);\n if (this._remaining === 0) {\n fulfill(this.promise, this._result);\n }\n }\n } else {\n reject(this.promise, validationError());\n }\n }\n\n Enumerator.prototype._enumerate = function _enumerate(input) {\n for (var i = 0; this._state === PENDING && i < input.length; i++) {\n this._eachEntry(input[i], i);\n }\n };\n\n Enumerator.prototype._eachEntry = function _eachEntry(entry, i) {\n var c = this._instanceConstructor;\n var resolve = c.resolve;\n\n\n if (resolve === originalResolve) {\n var _then = void 0;\n var error = void 0;\n var didError = false;\n try {\n _then = entry.then;\n } catch (e) {\n didError = true;\n error = e;\n }\n\n if (_then === originalThen && entry._state !== PENDING) {\n this._settledAt(entry._state, i, entry._result);\n } else if (typeof _then !== 'function') {\n this._remaining--;\n this._result[i] = entry;\n } else if (c === Promise) {\n var promise = new c(noop);\n if (didError) {\n reject(promise, error);\n } else {\n handleMaybeThenable(promise, entry, _then);\n }\n this._willSettleAt(promise, i);\n } else {\n this._willSettleAt(new c(function (resolve) {\n return resolve(entry);\n }), i);\n }\n } else {\n this._willSettleAt(resolve(entry), i);\n }\n };\n\n Enumerator.prototype._settledAt = function _settledAt(state, i, value) {\n var promise = this.promise;\n\n\n if (promise._state === PENDING) {\n this._remaining--;\n\n if (state === REJECTED) {\n reject(promise, value);\n } else {\n this._result[i] = value;\n }\n }\n\n if (this._remaining === 0) {\n fulfill(promise, this._result);\n }\n };\n\n Enumerator.prototype._willSettleAt = function _willSettleAt(promise, i) {\n var enumerator = this;\n\n subscribe(promise, undefined, function (value) {\n return enumerator._settledAt(FULFILLED, i, value);\n }, function (reason) {\n return enumerator._settledAt(REJECTED, i, reason);\n });\n };\n\n return Enumerator;\n}();\n\nexport default Enumerator;\n;","import Enumerator from '../enumerator';\n\n/**\n `Promise.all` accepts an array of promises, and returns a new promise which\n is fulfilled with an array of fulfillment values for the passed promises, or\n rejected with the reason of the first passed promise to be rejected. It casts all\n elements of the passed iterable to promises as it runs this algorithm.\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = resolve(2);\n let promise3 = resolve(3);\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // The array here would be [ 1, 2, 3 ];\n });\n ```\n\n If any of the `promises` given to `all` are rejected, the first promise\n that is rejected will be given as an argument to the returned promises's\n rejection handler. For example:\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = reject(new Error(\"2\"));\n let promise3 = reject(new Error(\"3\"));\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // Code here never runs because there are rejected promises!\n }, function(error) {\n // error.message === \"2\"\n });\n ```\n\n @method all\n @static\n @param {Array} entries array of promises\n @param {String} label optional string for labeling the promise.\n Useful for tooling.\n @return {Promise} promise that is fulfilled when all `promises` have been\n fulfilled, or rejected if any of them become rejected.\n @static\n*/\nexport default function all(entries) {\n return new Enumerator(this, entries).promise;\n}","import { isArray } from \"../utils\";\n\n/**\n `Promise.race` returns a new promise which is settled in the same way as the\n first passed promise to settle.\n\n Example:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 2');\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // result === 'promise 2' because it was resolved before promise1\n // was resolved.\n });\n ```\n\n `Promise.race` is deterministic in that only the state of the first\n settled promise matters. For example, even if other promises given to the\n `promises` array argument are resolved, but the first settled promise has\n become rejected before the other promises became fulfilled, the returned\n promise will become rejected:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n reject(new Error('promise 2'));\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // Code here never runs\n }, function(reason){\n // reason.message === 'promise 2' because promise 2 became rejected before\n // promise 1 became fulfilled\n });\n ```\n\n An example real-world use case is implementing timeouts:\n\n ```javascript\n Promise.race([ajax('foo.json'), timeout(5000)])\n ```\n\n @method race\n @static\n @param {Array} promises array of promises to observe\n Useful for tooling.\n @return {Promise} a promise which settles in the same way as the first passed\n promise to settle.\n*/\nexport default function race(entries) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (!isArray(entries)) {\n return new Constructor(function (_, reject) {\n return reject(new TypeError('You must pass an array to race.'));\n });\n } else {\n return new Constructor(function (resolve, reject) {\n var length = entries.length;\n for (var i = 0; i < length; i++) {\n Constructor.resolve(entries[i]).then(resolve, reject);\n }\n });\n }\n}","import { noop, reject as _reject } from '../-internal';\n\n/**\n `Promise.reject` returns a promise rejected with the passed `reason`.\n It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n reject(new Error('WHOOPS'));\n });\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.reject(new Error('WHOOPS'));\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n @method reject\n @static\n @param {Any} reason value that the returned promise will be rejected with.\n Useful for tooling.\n @return {Promise} a promise rejected with the given `reason`.\n*/\nexport default function reject(reason) {\n /*jshint validthis:true */\n var Constructor = this;\n var promise = new Constructor(noop);\n _reject(promise, reason);\n return promise;\n}","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isFunction } from './utils';\nimport { noop, nextId, PROMISE_ID, initializePromise } from './-internal';\nimport { asap, setAsap, setScheduler } from './asap';\n\nimport all from './promise/all';\nimport race from './promise/race';\nimport Resolve from './promise/resolve';\nimport Reject from './promise/reject';\nimport then from './then';\n\nfunction needsResolver() {\n throw new TypeError('You must pass a resolver function as the first argument to the promise constructor');\n}\n\nfunction needsNew() {\n throw new TypeError(\"Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.\");\n}\n\n/**\n Promise objects represent the eventual result of an asynchronous operation. The\n primary way of interacting with a promise is through its `then` method, which\n registers callbacks to receive either a promise's eventual value or the reason\n why the promise cannot be fulfilled.\n\n Terminology\n -----------\n\n - `promise` is an object or function with a `then` method whose behavior conforms to this specification.\n - `thenable` is an object or function that defines a `then` method.\n - `value` is any legal JavaScript value (including undefined, a thenable, or a promise).\n - `exception` is a value that is thrown using the throw statement.\n - `reason` is a value that indicates why a promise was rejected.\n - `settled` the final resting state of a promise, fulfilled or rejected.\n\n A promise can be in one of three states: pending, fulfilled, or rejected.\n\n Promises that are fulfilled have a fulfillment value and are in the fulfilled\n state. Promises that are rejected have a rejection reason and are in the\n rejected state. A fulfillment value is never a thenable.\n\n Promises can also be said to *resolve* a value. If this value is also a\n promise, then the original promise's settled state will match the value's\n settled state. So a promise that *resolves* a promise that rejects will\n itself reject, and a promise that *resolves* a promise that fulfills will\n itself fulfill.\n\n\n Basic Usage:\n ------------\n\n ```js\n let promise = new Promise(function(resolve, reject) {\n // on success\n resolve(value);\n\n // on failure\n reject(reason);\n });\n\n promise.then(function(value) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Advanced Usage:\n ---------------\n\n Promises shine when abstracting away asynchronous interactions such as\n `XMLHttpRequest`s.\n\n ```js\n function getJSON(url) {\n return new Promise(function(resolve, reject){\n let xhr = new XMLHttpRequest();\n\n xhr.open('GET', url);\n xhr.onreadystatechange = handler;\n xhr.responseType = 'json';\n xhr.setRequestHeader('Accept', 'application/json');\n xhr.send();\n\n function handler() {\n if (this.readyState === this.DONE) {\n if (this.status === 200) {\n resolve(this.response);\n } else {\n reject(new Error('getJSON: `' + url + '` failed with status: [' + this.status + ']'));\n }\n }\n };\n });\n }\n\n getJSON('/posts.json').then(function(json) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Unlike callbacks, promises are great composable primitives.\n\n ```js\n Promise.all([\n getJSON('/posts'),\n getJSON('/comments')\n ]).then(function(values){\n values[0] // => postsJSON\n values[1] // => commentsJSON\n\n return values;\n });\n ```\n\n @class Promise\n @param {Function} resolver\n Useful for tooling.\n @constructor\n*/\n\nvar Promise = function () {\n function Promise(resolver) {\n this[PROMISE_ID] = nextId();\n this._result = this._state = undefined;\n this._subscribers = [];\n\n if (noop !== resolver) {\n typeof resolver !== 'function' && needsResolver();\n this instanceof Promise ? initializePromise(this, resolver) : needsNew();\n }\n }\n\n /**\n The primary way of interacting with a promise is through its `then` method,\n which registers callbacks to receive either a promise's eventual value or the\n reason why the promise cannot be fulfilled.\n ```js\n findUser().then(function(user){\n // user is available\n }, function(reason){\n // user is unavailable, and you are given the reason why\n });\n ```\n Chaining\n --------\n The return value of `then` is itself a promise. This second, 'downstream'\n promise is resolved with the return value of the first promise's fulfillment\n or rejection handler, or rejected if the handler throws an exception.\n ```js\n findUser().then(function (user) {\n return user.name;\n }, function (reason) {\n return 'default name';\n }).then(function (userName) {\n // If `findUser` fulfilled, `userName` will be the user's name, otherwise it\n // will be `'default name'`\n });\n findUser().then(function (user) {\n throw new Error('Found user, but still unhappy');\n }, function (reason) {\n throw new Error('`findUser` rejected and we're unhappy');\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.\n // If `findUser` rejected, `reason` will be '`findUser` rejected and we're unhappy'.\n });\n ```\n If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.\n ```js\n findUser().then(function (user) {\n throw new PedagogicalException('Upstream error');\n }).then(function (value) {\n // never reached\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // The `PedgagocialException` is propagated all the way down to here\n });\n ```\n Assimilation\n ------------\n Sometimes the value you want to propagate to a downstream promise can only be\n retrieved asynchronously. This can be achieved by returning a promise in the\n fulfillment or rejection handler. The downstream promise will then be pending\n until the returned promise is settled. This is called *assimilation*.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // The user's comments are now available\n });\n ```\n If the assimliated promise rejects, then the downstream promise will also reject.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // If `findCommentsByAuthor` fulfills, we'll have the value here\n }, function (reason) {\n // If `findCommentsByAuthor` rejects, we'll have the reason here\n });\n ```\n Simple Example\n --------------\n Synchronous Example\n ```javascript\n let result;\n try {\n result = findResult();\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n findResult(function(result, err){\n if (err) {\n // failure\n } else {\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findResult().then(function(result){\n // success\n }, function(reason){\n // failure\n });\n ```\n Advanced Example\n --------------\n Synchronous Example\n ```javascript\n let author, books;\n try {\n author = findAuthor();\n books = findBooksByAuthor(author);\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n function foundBooks(books) {\n }\n function failure(reason) {\n }\n findAuthor(function(author, err){\n if (err) {\n failure(err);\n // failure\n } else {\n try {\n findBoooksByAuthor(author, function(books, err) {\n if (err) {\n failure(err);\n } else {\n try {\n foundBooks(books);\n } catch(reason) {\n failure(reason);\n }\n }\n });\n } catch(error) {\n failure(err);\n }\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findAuthor().\n then(findBooksByAuthor).\n then(function(books){\n // found books\n }).catch(function(reason){\n // something went wrong\n });\n ```\n @method then\n @param {Function} onFulfilled\n @param {Function} onRejected\n Useful for tooling.\n @return {Promise}\n */\n\n /**\n `catch` is simply sugar for `then(undefined, onRejection)` which makes it the same\n as the catch block of a try/catch statement.\n ```js\n function findAuthor(){\n throw new Error('couldn't find that author');\n }\n // synchronous\n try {\n findAuthor();\n } catch(reason) {\n // something went wrong\n }\n // async with promises\n findAuthor().catch(function(reason){\n // something went wrong\n });\n ```\n @method catch\n @param {Function} onRejection\n Useful for tooling.\n @return {Promise}\n */\n\n\n Promise.prototype.catch = function _catch(onRejection) {\n return this.then(null, onRejection);\n };\n\n /**\n `finally` will be invoked regardless of the promise's fate just as native\n try/catch/finally behaves\n \n Synchronous example:\n \n ```js\n findAuthor() {\n if (Math.random() > 0.5) {\n throw new Error();\n }\n return new Author();\n }\n \n try {\n return findAuthor(); // succeed or fail\n } catch(error) {\n return findOtherAuther();\n } finally {\n // always runs\n // doesn't affect the return value\n }\n ```\n \n Asynchronous example:\n \n ```js\n findAuthor().catch(function(reason){\n return findOtherAuther();\n }).finally(function(){\n // author was either found, or not\n });\n ```\n \n @method finally\n @param {Function} callback\n @return {Promise}\n */\n\n\n Promise.prototype.finally = function _finally(callback) {\n var promise = this;\n var constructor = promise.constructor;\n\n if (isFunction(callback)) {\n return promise.then(function (value) {\n return constructor.resolve(callback()).then(function () {\n return value;\n });\n }, function (reason) {\n return constructor.resolve(callback()).then(function () {\n throw reason;\n });\n });\n }\n\n return promise.then(callback, callback);\n };\n\n return Promise;\n}();\n\nPromise.prototype.then = then;\nexport default Promise;\nPromise.all = all;\nPromise.race = race;\nPromise.resolve = Resolve;\nPromise.reject = Reject;\nPromise._setScheduler = setScheduler;\nPromise._setAsap = setAsap;\nPromise._asap = asap;","/*global self*/\nimport Promise from './promise';\n\nexport default function polyfill() {\n var local = void 0;\n\n if (typeof global !== 'undefined') {\n local = global;\n } else if (typeof self !== 'undefined') {\n local = self;\n } else {\n try {\n local = Function('return this')();\n } catch (e) {\n throw new Error('polyfill failed because global object is unavailable in this environment');\n }\n }\n\n var P = local.Promise;\n\n if (P) {\n var promiseToString = null;\n try {\n promiseToString = Object.prototype.toString.call(P.resolve());\n } catch (e) {\n // silently ignored\n }\n\n if (promiseToString === '[object Promise]' && !P.cast) {\n return;\n }\n }\n\n local.Promise = Promise;\n}","import Promise from './es6-promise/promise';\nimport polyfill from './es6-promise/polyfill';\n\n// Strange compat..\nPromise.polyfill = polyfill;\nPromise.Promise = Promise;\nexport default Promise;","import Promise from './es6-promise';\nPromise.polyfill();\nexport default Promise;"],"names":["resolve","_resolve","then","originalThen","originalResolve","Promise","reject","_reject","Resolve","Reject"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACNO,SAAS,gBAAgB,CAAC,CAAC,EAAE;EAClC,IAAI,IAAI,GAAG,OAAO,CAAC,CAAC;EACpB,OAAO,CAAC,KAAK,IAAI,KAAK,IAAI,KAAK,QAAQ,IAAI,IAAI,KAAK,UAAU,CAAC,CAAC;CACjE;;AAED,AAAO,SAAS,UAAU,CAAC,CAAC,EAAE;EAC5B,OAAO,OAAO,CAAC,KAAK,UAAU,CAAC;CAChC;;AAED,AAEC;;AAED,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC;AACtB,IAAI,KAAK,CAAC,OAAO,EAAE;EACjB,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC;CAC1B,MAAM;EACL,QAAQ,GAAG,UAAU,CAAC,EAAE;IACtB,OAAO,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,gBAAgB,CAAC;GAC/D,CAAC;CACH;;AAED,AAAO,IAAI,OAAO,GAAG,QAAQ;;ACtB7B,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAI,SAAS,GAAG,KAAK,CAAC,CAAC;AACvB,IAAI,iBAAiB,GAAG,KAAK,CAAC,CAAC;;AAE/B,AAAO,IAAI,IAAI,GAAG,SAAS,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;EAC7C,KAAK,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC;EACtB,KAAK,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;EACrB,GAAG,IAAI,CAAC,CAAC;EACT,IAAI,GAAG,KAAK,CAAC,EAAE;;;;IAIb,IAAI,iBAAiB,EAAE;MACrB,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAC1B,MAAM;MACL,aAAa,EAAE,CAAC;KACjB;GACF;CACF,CAAC;;AAEF,AAAO,SAAS,YAAY,CAAC,UAAU,EAAE;EACvC,iBAAiB,GAAG,UAAU,CAAC;CAChC;;AAED,AAAO,SAAS,OAAO,CAAC,MAAM,EAAE;EAC9B,IAAI,GAAG,MAAM,CAAC;CACf;;AAED,IAAI,aAAa,GAAG,OAAO,MAAM,KAAK,WAAW,GAAG,MAAM,GAAG,SAAS,CAAC;AACvE,IAAI,aAAa,GAAG,aAAa,IAAI,EAAE,CAAC;AACxC,IAAI,uBAAuB,GAAG,aAAa,CAAC,gBAAgB,IAAI,aAAa,CAAC,sBAAsB,CAAC;AACrG,IAAI,MAAM,GAAG,OAAO,IAAI,KAAK,WAAW,IAAI,OAAO,OAAO,KAAK,WAAW,IAAI,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,kBAAkB,CAAC;;;AAG/H,IAAI,QAAQ,GAAG,OAAO,iBAAiB,KAAK,WAAW,IAAI,OAAO,aAAa,KAAK,WAAW,IAAI,OAAO,cAAc,KAAK,WAAW,CAAC;;;AAGzI,SAAS,WAAW,GAAG;;;EAGrB,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;GAChC,CAAC;CACH;;;AAGD,SAAS,aAAa,GAAG;EACvB,IAAI,OAAO,SAAS,KAAK,WAAW,EAAE;IACpC,OAAO,YAAY;MACjB,SAAS,CAAC,KAAK,CAAC,CAAC;KAClB,CAAC;GACH;;EAED,OAAO,aAAa,EAAE,CAAC;CACxB;;AAED,SAAS,mBAAmB,GAAG;EAC7B,IAAI,UAAU,GAAG,CAAC,CAAC;EACnB,IAAI,QAAQ,GAAG,IAAI,uBAAuB,CAAC,KAAK,CAAC,CAAC;EAClD,IAAI,IAAI,GAAG,QAAQ,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC;EACvC,QAAQ,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;;EAEhD,OAAO,YAAY;IACjB,IAAI,CAAC,IAAI,GAAG,UAAU,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC;GAC3C,CAAC;CACH;;;AAGD,SAAS,iBAAiB,GAAG;EAC3B,IAAI,OAAO,GAAG,IAAI,cAAc,EAAE,CAAC;EACnC,OAAO,CAAC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC;EAChC,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;GACrC,CAAC;CACH;;AAED,SAAS,aAAa,GAAG;;;EAGvB,IAAI,gBAAgB,GAAG,UAAU,CAAC;EAClC,OAAO,YAAY;IACjB,OAAO,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;GACnC,CAAC;CACH;;AAED,IAAI,KAAK,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC;AAC5B,SAAS,KAAK,GAAG;EACf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE;IAC/B,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,GAAG,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;;IAEvB,QAAQ,CAAC,GAAG,CAAC,CAAC;;IAEd,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;IACrB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;GAC1B;;EAED,GAAG,GAAG,CAAC,CAAC;CACT;;AAED,SAAS,YAAY,GAAG;EACtB,IAAI;IACF,IAAI,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;IACvD,SAAS,GAAG,KAAK,CAAC,SAAS,IAAI,KAAK,CAAC,YAAY,CAAC;IAClD,OAAO,aAAa,EAAE,CAAC;GACxB,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,aAAa,EAAE,CAAC;GACxB;CACF;;AAED,IAAI,aAAa,GAAG,KAAK,CAAC,CAAC;;AAE3B,IAAI,MAAM,EAAE;EACV,aAAa,GAAG,WAAW,EAAE,CAAC;CAC/B,MAAM,IAAI,uBAAuB,EAAE;EAClC,aAAa,GAAG,mBAAmB,EAAE,CAAC;CACvC,MAAM,IAAI,QAAQ,EAAE;EACnB,aAAa,GAAG,iBAAiB,EAAE,CAAC;CACrC,MAAM,IAAI,aAAa,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;EACvE,aAAa,GAAG,YAAY,EAAE,CAAC;CAChC,MAAM;EACL,aAAa,GAAG,aAAa,EAAE,CAAC;;;CACjC,DCtHc,SAAS,IAAI,CAAC,aAAa,EAAE,WAAW,EAAE;EACvD,IAAI,MAAM,GAAG,IAAI,CAAC;;EAElB,IAAI,KAAK,GAAG,IAAI,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;;EAEvC,IAAI,KAAK,CAAC,UAAU,CAAC,KAAK,SAAS,EAAE;IACnC,WAAW,CAAC,KAAK,CAAC,CAAC;GACpB;;EAED,IAAI,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;;;EAG3B,IAAI,MAAM,EAAE;IACV,IAAI,QAAQ,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IACrC,IAAI,CAAC,YAAY;MACf,OAAO,cAAc,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;KAChE,CAAC,CAAC;GACJ,MAAM;IACL,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,CAAC,CAAC;GACtD;;EAED,OAAO,KAAK,CAAC;;;CACd,DCxBD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+BA,AAAe,SAASA,SAAO,CAAC,MAAM,EAAE;;EAEtC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,CAAC,WAAW,KAAK,WAAW,EAAE;IAC9E,OAAO,MAAM,CAAC;GACf;;EAED,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,OAAQ,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EAC1B,OAAO,OAAO,CAAC;;;CAChB,DCrCM,IAAI,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;;AAEhE,SAAS,IAAI,GAAG,EAAE;;AAElB,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC;AACrB,IAAI,SAAS,GAAG,CAAC,CAAC;AAClB,IAAI,QAAQ,GAAG,CAAC,CAAC;;AAEjB,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,0CAA0C,CAAC,CAAC;CAClE;;AAED,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,sDAAsD,CAAC,CAAC;CAC9E;;AAED,SAAS,OAAO,CAACC,OAAI,EAAE,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,EAAE;EAClE,IAAI;IACFA,OAAI,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;GACxD,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,CAAC;GACV;CACF;;AAED,SAAS,qBAAqB,CAAC,OAAO,EAAE,QAAQ,EAAEA,OAAI,EAAE;EACtD,IAAI,CAAC,UAAU,OAAO,EAAE;IACtB,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,IAAI,KAAK,GAAG,OAAO,CAACA,OAAI,EAAE,QAAQ,EAAE,UAAU,KAAK,EAAE;MACnD,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;MACd,IAAI,QAAQ,KAAK,KAAK,EAAE;QACtB,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB,MAAM;QACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB;KACF,EAAE,UAAU,MAAM,EAAE;MACnB,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;;MAEd,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,EAAE,UAAU,IAAI,OAAO,CAAC,MAAM,IAAI,kBAAkB,CAAC,CAAC,CAAC;;IAExD,IAAI,CAAC,MAAM,IAAI,KAAK,EAAE;MACpB,MAAM,GAAG,IAAI,CAAC;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACxB;GACF,EAAE,OAAO,CAAC,CAAC;CACb;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI,QAAQ,CAAC,MAAM,KAAK,SAAS,EAAE;IACjC,OAAO,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACpC,MAAM,IAAI,QAAQ,CAAC,MAAM,KAAK,QAAQ,EAAE;IACvC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACnC,MAAM;IACL,SAAS,CAAC,QAAQ,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC9C,OAAO,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KAChC,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KAChC,CAAC,CAAC;GACJ;CACF;;AAED,SAAS,mBAAmB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,EAAE;EACzD,IAAI,aAAa,CAAC,WAAW,KAAK,OAAO,CAAC,WAAW,IAAIA,OAAI,KAAKC,IAAY,IAAI,aAAa,CAAC,WAAW,CAAC,OAAO,KAAKC,SAAe,EAAE;IACvI,iBAAiB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;GAC3C,MAAM;IACL,IAAIF,OAAI,KAAK,SAAS,EAAE;MACtB,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC,MAAM,IAAI,UAAU,CAACA,OAAI,CAAC,EAAE;MAC3B,qBAAqB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,CAAC,CAAC;KACrD,MAAM;MACL,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC;GACF;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,KAAK,KAAK,EAAE;IACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;GACpC,MAAM,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;IAClC,IAAIA,OAAI,GAAG,KAAK,CAAC,CAAC;IAClB,IAAI;MACFA,OAAI,GAAG,KAAK,CAAC,IAAI,CAAC;KACnB,CAAC,OAAO,KAAK,EAAE;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;MACvB,OAAO;KACR;IACD,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAEA,OAAI,CAAC,CAAC;GAC3C,MAAM;IACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB;CACF;;AAED,SAAS,gBAAgB,CAAC,OAAO,EAAE;EACjC,IAAI,OAAO,CAAC,QAAQ,EAAE;IACpB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;GACnC;;EAED,OAAO,CAAC,OAAO,CAAC,CAAC;CAClB;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;;EAED,OAAO,CAAC,OAAO,GAAG,KAAK,CAAC;EACxB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;;EAE3B,IAAI,OAAO,CAAC,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;IACrC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;EACD,OAAO,CAAC,MAAM,GAAG,QAAQ,CAAC;EAC1B,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC;;EAEzB,IAAI,CAAC,gBAAgB,EAAE,OAAO,CAAC,CAAC;CACjC;;AAED,SAAS,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,EAAE;EAC5D,IAAI,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;EACvC,IAAI,MAAM,GAAG,YAAY,CAAC,MAAM,CAAC;;;EAGjC,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC;;EAEvB,YAAY,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC;EAC7B,YAAY,CAAC,MAAM,GAAG,SAAS,CAAC,GAAG,aAAa,CAAC;EACjD,YAAY,CAAC,MAAM,GAAG,QAAQ,CAAC,GAAG,WAAW,CAAC;;EAE9C,IAAI,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,MAAM,EAAE;IACjC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;GACvB;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE;EACxB,IAAI,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;EACvC,IAAI,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC;;EAE7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;IAC5B,OAAO;GACR;;EAED,IAAI,KAAK,GAAG,KAAK,CAAC;MACd,QAAQ,GAAG,KAAK,CAAC;MACjB,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;;EAE7B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,IAAI,CAAC,EAAE;IAC9C,KAAK,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;IACvB,QAAQ,GAAG,WAAW,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC;;IAEpC,IAAI,KAAK,EAAE;MACT,cAAc,CAAC,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;KAClD,MAAM;MACL,QAAQ,CAAC,MAAM,CAAC,CAAC;KAClB;GACF;;EAED,OAAO,CAAC,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC;CACjC;;AAED,SAAS,cAAc,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE;EAC1D,IAAI,WAAW,GAAG,UAAU,CAAC,QAAQ,CAAC;MAClC,KAAK,GAAG,KAAK,CAAC;MACd,KAAK,GAAG,KAAK,CAAC;MACd,SAAS,GAAG,IAAI,CAAC;;EAErB,IAAI,WAAW,EAAE;IACf,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC;KAC1B,CAAC,OAAO,CAAC,EAAE;MACV,SAAS,GAAG,KAAK,CAAC;MAClB,KAAK,GAAG,CAAC,CAAC;KACX;;IAED,IAAI,OAAO,KAAK,KAAK,EAAE;MACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;MACnC,OAAO;KACR;GACF,MAAM;IACL,KAAK,GAAG,MAAM,CAAC;GAChB;;EAED,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;;GAE/B,MAAM,IAAI,WAAW,IAAI,SAAS,EAAE;IACnC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,SAAS,KAAK,KAAK,EAAE;IAC9B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB,MAAM,IAAI,OAAO,KAAK,SAAS,EAAE;IAChC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,OAAO,KAAK,QAAQ,EAAE;IAC/B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI;IACF,QAAQ,CAAC,SAAS,cAAc,CAAC,KAAK,EAAE;MACtC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACzB,EAAE,SAAS,aAAa,CAAC,MAAM,EAAE;MAChC,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,CAAC,CAAC;GACJ,CAAC,OAAO,CAAC,EAAE;IACV,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;GACpB;CACF;;AAED,IAAI,EAAE,GAAG,CAAC,CAAC;AACX,SAAS,MAAM,GAAG;EAChB,OAAO,EAAE,EAAE,CAAC;CACb;;AAED,SAAS,WAAW,CAAC,OAAO,EAAE;EAC5B,OAAO,CAAC,UAAU,CAAC,GAAG,EAAE,EAAE,CAAC;EAC3B,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;EAC3B,OAAO,CAAC,OAAO,GAAG,SAAS,CAAC;EAC5B,OAAO,CAAC,YAAY,GAAG,EAAE,CAAC;CAC3B;;AChOD,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;CAC7D,AAAC;;AAEF,IAAI,UAAU,GAAG,YAAY;EAC3B,SAAS,UAAU,CAAC,WAAW,EAAE,KAAK,EAAE;IACtC,IAAI,CAAC,oBAAoB,GAAG,WAAW,CAAC;IACxC,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;;IAErC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;MAC7B,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;KAC3B;;IAED,IAAI,OAAO,CAAC,KAAK,CAAC,EAAE;MAClB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;MAC3B,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,MAAM,CAAC;;MAE/B,IAAI,CAAC,OAAO,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;;MAEtC,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACrB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;OACrC,MAAM;QACL,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QACvB,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;UACzB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;SACrC;OACF;KACF,MAAM;MACL,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;KACzC;GACF;;EAED,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE;IAC3D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,KAAK,OAAO,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;MAChE,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC9B;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE;IAC9D,IAAI,CAAC,GAAG,IAAI,CAAC,oBAAoB,CAAC;IAClC,IAAIF,UAAO,GAAG,CAAC,CAAC,OAAO,CAAC;;;IAGxB,IAAIA,UAAO,KAAKI,SAAe,EAAE;MAC/B,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;MACnB,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;MACnB,IAAI,QAAQ,GAAG,KAAK,CAAC;MACrB,IAAI;QACF,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC;OACpB,CAAC,OAAO,CAAC,EAAE;QACV,QAAQ,GAAG,IAAI,CAAC;QAChB,KAAK,GAAG,CAAC,CAAC;OACX;;MAED,IAAI,KAAK,KAAKD,IAAY,IAAI,KAAK,CAAC,MAAM,KAAK,OAAO,EAAE;QACtD,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;OACjD,MAAM,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;QACtC,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB,MAAM,IAAI,CAAC,KAAKE,SAAO,EAAE;QACxB,IAAI,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC;QAC1B,IAAI,QAAQ,EAAE;UACZ,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;SACxB,MAAM;UACL,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;SAC5C;QACD,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;OAChC,MAAM;QACL,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,UAAUL,UAAO,EAAE;UAC1C,OAAOA,UAAO,CAAC,KAAK,CAAC,CAAC;SACvB,CAAC,EAAE,CAAC,CAAC,CAAC;OACR;KACF,MAAM;MACL,IAAI,CAAC,aAAa,CAACA,UAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;KACvC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE,KAAK,EAAE;IACrE,IAAI,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;;;IAG3B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;MAC9B,IAAI,CAAC,UAAU,EAAE,CAAC;;MAElB,IAAI,KAAK,KAAK,QAAQ,EAAE;QACtB,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACxB,MAAM;QACL,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB;KACF;;IAED,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;MACzB,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;KAChC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,aAAa,GAAG,SAAS,aAAa,CAAC,OAAO,EAAE,CAAC,EAAE;IACtE,IAAI,UAAU,GAAG,IAAI,CAAC;;IAEtB,SAAS,CAAC,OAAO,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC7C,OAAO,UAAU,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC;KACnD,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,UAAU,CAAC,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;KACnD,CAAC,CAAC;GACJ,CAAC;;EAEF,OAAO,UAAU,CAAC;CACnB,EAAE;;ACrHH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+CA,AAAe,SAAS,GAAG,CAAC,OAAO,EAAE;EACnC,OAAO,IAAI,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,OAAO,CAAC;;;CAC9C,DCjDD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiEA,AAAe,SAAS,IAAI,CAAC,OAAO,EAAE;;EAEpC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;IACrB,OAAO,IAAI,WAAW,CAAC,UAAU,CAAC,EAAE,MAAM,EAAE;MAC1C,OAAO,MAAM,CAAC,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAC,CAAC;KACjE,CAAC,CAAC;GACJ,MAAM;IACL,OAAO,IAAI,WAAW,CAAC,UAAU,OAAO,EAAE,MAAM,EAAE;MAChD,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;MAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;QAC/B,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;OACvD;KACF,CAAC,CAAC;GACJ;;;CACF,DCjFD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCA,AAAe,SAASM,QAAM,CAAC,MAAM,EAAE;;EAErC,IAAI,WAAW,GAAG,IAAI,CAAC;EACvB,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,MAAO,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EACzB,OAAO,OAAO,CAAC;;;CAChB,DC9BD,SAAS,aAAa,GAAG;EACvB,MAAM,IAAI,SAAS,CAAC,oFAAoF,CAAC,CAAC;CAC3G;;AAED,SAAS,QAAQ,GAAG;EAClB,MAAM,IAAI,SAAS,CAAC,uHAAuH,CAAC,CAAC;CAC9I;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0GD,IAAIF,SAAO,GAAG,YAAY;EACxB,SAAS,OAAO,CAAC,QAAQ,EAAE;IACzB,IAAI,CAAC,UAAU,CAAC,GAAG,MAAM,EAAE,CAAC;IAC5B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;IACvC,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;;IAEvB,IAAI,IAAI,KAAK,QAAQ,EAAE;MACrB,OAAO,QAAQ,KAAK,UAAU,IAAI,aAAa,EAAE,CAAC;MAClD,IAAI,YAAY,OAAO,GAAG,iBAAiB,CAAC,IAAI,EAAE,QAAQ,CAAC,GAAG,QAAQ,EAAE,CAAC;KAC1E;GACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA4LD,OAAO,CAAC,SAAS,CAAC,KAAK,GAAG,SAAS,MAAM,CAAC,WAAW,EAAE;IACrD,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;GACrC,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA0CF,OAAO,CAAC,SAAS,CAAC,OAAO,GAAG,SAAS,QAAQ,CAAC,QAAQ,EAAE;IACtD,IAAI,OAAO,GAAG,IAAI,CAAC;IACnB,IAAI,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;;IAEtC,IAAI,UAAU,CAAC,QAAQ,CAAC,EAAE;MACxB,OAAO,OAAO,CAAC,IAAI,CAAC,UAAU,KAAK,EAAE;QACnC,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;UACtD,OAAO,KAAK,CAAC;SACd,CAAC,CAAC;OACJ,EAAE,UAAU,MAAM,EAAE;QACnB,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;UACtD,MAAM,MAAM,CAAC;SACd,CAAC,CAAC;OACJ,CAAC,CAAC;KACJ;;IAED,OAAO,OAAO,CAAC,IAAI,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;GACzC,CAAC;;EAEF,OAAO,OAAO,CAAC;CAChB,EAAE,CAAC;;AAEJA,SAAO,CAAC,SAAS,CAAC,IAAI,GAAG,IAAI,CAAC;AAC9B,AACAA,SAAO,CAAC,GAAG,GAAG,GAAG,CAAC;AAClBA,SAAO,CAAC,IAAI,GAAG,IAAI,CAAC;AACpBA,SAAO,CAAC,OAAO,GAAGG,SAAO,CAAC;AAC1BH,SAAO,CAAC,MAAM,GAAGI,QAAM,CAAC;AACxBJ,SAAO,CAAC,aAAa,GAAG,YAAY,CAAC;AACrCA,SAAO,CAAC,QAAQ,GAAG,OAAO,CAAC;AAC3BA,SAAO,CAAC,KAAK,GAAG,IAAI;;AC5YpB;AACA,AAEe,SAAS,QAAQ,GAAG;EACjC,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;;EAEnB,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;IACjC,KAAK,GAAG,MAAM,CAAC;GAChB,MAAM,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;IACtC,KAAK,GAAG,IAAI,CAAC;GACd,MAAM;IACL,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC;KACnC,CAAC,OAAO,CAAC,EAAE;MACV,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;KAC7F;GACF;;EAED,IAAI,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC;;EAEtB,IAAI,CAAC,EAAE;IACL,IAAI,eAAe,GAAG,IAAI,CAAC;IAC3B,IAAI;MACF,eAAe,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KAC/D,CAAC,OAAO,CAAC,EAAE;;KAEX;;IAED,IAAI,eAAe,KAAK,kBAAkB,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE;MACrD,OAAO;KACR;GACF;;EAED,KAAK,CAAC,OAAO,GAAGA,SAAO,CAAC;;;CACzB,DC/BD;AACAA,SAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC;AAC5BA,SAAO,CAAC,OAAO,GAAGA,SAAO,CAAC;;ACJ1BA,SAAO,CAAC,QAAQ,EAAE,CAAC;;;;;;;;","file":"es6-promise.auto.js"} \ No newline at end of file diff --git a/node_modules/es6-promise/dist/es6-promise.auto.min.js b/node_modules/es6-promise/dist/es6-promise.auto.min.js index fdf8bff2676cc..5a44a3b086e84 100644 --- a/node_modules/es6-promise/dist/es6-promise.auto.min.js +++ b/node_modules/es6-promise/dist/es6-promise.auto.min.js @@ -1 +1 @@ -!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):t.ES6Promise=e()}(this,function(){"use strict";function t(t){var e=typeof t;return null!==t&&("object"===e||"function"===e)}function e(t){return"function"==typeof t}function n(t){B=t}function r(t){G=t}function o(){return function(){return process.nextTick(a)}}function i(){return"undefined"!=typeof z?function(){z(a)}:c()}function s(){var t=0,e=new J(a),n=document.createTextNode("");return e.observe(n,{characterData:!0}),function(){n.data=t=++t%2}}function u(){var t=new MessageChannel;return t.port1.onmessage=a,function(){return t.port2.postMessage(0)}}function c(){var t=setTimeout;return function(){return t(a,1)}}function a(){for(var t=0;t postsJSON\n values[1] // => commentsJSON\n\n return values;\n });\n ```\n\n @class Promise\n @param {Function} resolver\n Useful for tooling.\n @constructor\n*/\n\nvar Promise = function () {\n function Promise(resolver) {\n this[PROMISE_ID] = nextId();\n this._result = this._state = undefined;\n this._subscribers = [];\n\n if (noop !== resolver) {\n typeof resolver !== 'function' && needsResolver();\n this instanceof Promise ? initializePromise(this, resolver) : needsNew();\n }\n }\n\n /**\n The primary way of interacting with a promise is through its `then` method,\n which registers callbacks to receive either a promise's eventual value or the\n reason why the promise cannot be fulfilled.\n ```js\n findUser().then(function(user){\n // user is available\n }, function(reason){\n // user is unavailable, and you are given the reason why\n });\n ```\n Chaining\n --------\n The return value of `then` is itself a promise. This second, 'downstream'\n promise is resolved with the return value of the first promise's fulfillment\n or rejection handler, or rejected if the handler throws an exception.\n ```js\n findUser().then(function (user) {\n return user.name;\n }, function (reason) {\n return 'default name';\n }).then(function (userName) {\n // If `findUser` fulfilled, `userName` will be the user's name, otherwise it\n // will be `'default name'`\n });\n findUser().then(function (user) {\n throw new Error('Found user, but still unhappy');\n }, function (reason) {\n throw new Error('`findUser` rejected and we're unhappy');\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.\n // If `findUser` rejected, `reason` will be '`findUser` rejected and we're unhappy'.\n });\n ```\n If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.\n ```js\n findUser().then(function (user) {\n throw new PedagogicalException('Upstream error');\n }).then(function (value) {\n // never reached\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // The `PedgagocialException` is propagated all the way down to here\n });\n ```\n Assimilation\n ------------\n Sometimes the value you want to propagate to a downstream promise can only be\n retrieved asynchronously. This can be achieved by returning a promise in the\n fulfillment or rejection handler. The downstream promise will then be pending\n until the returned promise is settled. This is called *assimilation*.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // The user's comments are now available\n });\n ```\n If the assimliated promise rejects, then the downstream promise will also reject.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // If `findCommentsByAuthor` fulfills, we'll have the value here\n }, function (reason) {\n // If `findCommentsByAuthor` rejects, we'll have the reason here\n });\n ```\n Simple Example\n --------------\n Synchronous Example\n ```javascript\n let result;\n try {\n result = findResult();\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n findResult(function(result, err){\n if (err) {\n // failure\n } else {\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findResult().then(function(result){\n // success\n }, function(reason){\n // failure\n });\n ```\n Advanced Example\n --------------\n Synchronous Example\n ```javascript\n let author, books;\n try {\n author = findAuthor();\n books = findBooksByAuthor(author);\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n function foundBooks(books) {\n }\n function failure(reason) {\n }\n findAuthor(function(author, err){\n if (err) {\n failure(err);\n // failure\n } else {\n try {\n findBoooksByAuthor(author, function(books, err) {\n if (err) {\n failure(err);\n } else {\n try {\n foundBooks(books);\n } catch(reason) {\n failure(reason);\n }\n }\n });\n } catch(error) {\n failure(err);\n }\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findAuthor().\n then(findBooksByAuthor).\n then(function(books){\n // found books\n }).catch(function(reason){\n // something went wrong\n });\n ```\n @method then\n @param {Function} onFulfilled\n @param {Function} onRejected\n Useful for tooling.\n @return {Promise}\n */\n\n /**\n `catch` is simply sugar for `then(undefined, onRejection)` which makes it the same\n as the catch block of a try/catch statement.\n ```js\n function findAuthor(){\n throw new Error('couldn't find that author');\n }\n // synchronous\n try {\n findAuthor();\n } catch(reason) {\n // something went wrong\n }\n // async with promises\n findAuthor().catch(function(reason){\n // something went wrong\n });\n ```\n @method catch\n @param {Function} onRejection\n Useful for tooling.\n @return {Promise}\n */\n\n\n Promise.prototype.catch = function _catch(onRejection) {\n return this.then(null, onRejection);\n };\n\n /**\n `finally` will be invoked regardless of the promise's fate just as native\n try/catch/finally behaves\n \n Synchronous example:\n \n ```js\n findAuthor() {\n if (Math.random() > 0.5) {\n throw new Error();\n }\n return new Author();\n }\n \n try {\n return findAuthor(); // succeed or fail\n } catch(error) {\n return findOtherAuther();\n } finally {\n // always runs\n // doesn't affect the return value\n }\n ```\n \n Asynchronous example:\n \n ```js\n findAuthor().catch(function(reason){\n return findOtherAuther();\n }).finally(function(){\n // author was either found, or not\n });\n ```\n \n @method finally\n @param {Function} callback\n @return {Promise}\n */\n\n\n Promise.prototype.finally = function _finally(callback) {\n var promise = this;\n var constructor = promise.constructor;\n\n return promise.then(function (value) {\n return constructor.resolve(callback()).then(function () {\n return value;\n });\n }, function (reason) {\n return constructor.resolve(callback()).then(function () {\n throw reason;\n });\n });\n };\n\n return Promise;\n}();\n\nPromise.prototype.then = then;\nexport default Promise;\nPromise.all = all;\nPromise.race = race;\nPromise.resolve = Resolve;\nPromise.reject = Reject;\nPromise._setScheduler = setScheduler;\nPromise._setAsap = setAsap;\nPromise._asap = asap;","/*global self*/\nimport Promise from './promise';\n\nexport default function polyfill() {\n var local = void 0;\n\n if (typeof global !== 'undefined') {\n local = global;\n } else if (typeof self !== 'undefined') {\n local = self;\n } else {\n try {\n local = Function('return this')();\n } catch (e) {\n throw new Error('polyfill failed because global object is unavailable in this environment');\n }\n }\n\n var P = local.Promise;\n\n if (P) {\n var promiseToString = null;\n try {\n promiseToString = Object.prototype.toString.call(P.resolve());\n } catch (e) {\n // silently ignored\n }\n\n if (promiseToString === '[object Promise]' && !P.cast) {\n return;\n }\n }\n\n local.Promise = Promise;\n}","import Promise from './es6-promise/promise';\nimport polyfill from './es6-promise/polyfill';\n\n// Strange compat..\nPromise.polyfill = polyfill;\nPromise.Promise = Promise;\nexport default Promise;","import Promise from './es6-promise';\nPromise.polyfill();\nexport default Promise;"],"names":["resolve","_resolve","then","originalThen","originalResolve","Promise","reject","_reject","Resolve","Reject"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACNO,SAAS,gBAAgB,CAAC,CAAC,EAAE;EAClC,IAAI,IAAI,GAAG,OAAO,CAAC,CAAC;EACpB,OAAO,CAAC,KAAK,IAAI,KAAK,IAAI,KAAK,QAAQ,IAAI,IAAI,KAAK,UAAU,CAAC,CAAC;CACjE;;AAED,AAAO,SAAS,UAAU,CAAC,CAAC,EAAE;EAC5B,OAAO,OAAO,CAAC,KAAK,UAAU,CAAC;CAChC;;AAED,AAEC;;AAED,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC;AACtB,IAAI,KAAK,CAAC,OAAO,EAAE;EACjB,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC;CAC1B,MAAM;EACL,QAAQ,GAAG,UAAU,CAAC,EAAE;IACtB,OAAO,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,gBAAgB,CAAC;GAC/D,CAAC;CACH;;AAED,AAAO,IAAI,OAAO,GAAG,QAAQ;;ACtB7B,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAI,SAAS,GAAG,KAAK,CAAC,CAAC;AACvB,IAAI,iBAAiB,GAAG,KAAK,CAAC,CAAC;;AAE/B,AAAO,IAAI,IAAI,GAAG,SAAS,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;EAC7C,KAAK,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC;EACtB,KAAK,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;EACrB,GAAG,IAAI,CAAC,CAAC;EACT,IAAI,GAAG,KAAK,CAAC,EAAE;;;;IAIb,IAAI,iBAAiB,EAAE;MACrB,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAC1B,MAAM;MACL,aAAa,EAAE,CAAC;KACjB;GACF;CACF,CAAC;;AAEF,AAAO,SAAS,YAAY,CAAC,UAAU,EAAE;EACvC,iBAAiB,GAAG,UAAU,CAAC;CAChC;;AAED,AAAO,SAAS,OAAO,CAAC,MAAM,EAAE;EAC9B,IAAI,GAAG,MAAM,CAAC;CACf;;AAED,IAAI,aAAa,GAAG,OAAO,MAAM,KAAK,WAAW,GAAG,MAAM,GAAG,SAAS,CAAC;AACvE,IAAI,aAAa,GAAG,aAAa,IAAI,EAAE,CAAC;AACxC,IAAI,uBAAuB,GAAG,aAAa,CAAC,gBAAgB,IAAI,aAAa,CAAC,sBAAsB,CAAC;AACrG,IAAI,MAAM,GAAG,OAAO,IAAI,KAAK,WAAW,IAAI,OAAO,OAAO,KAAK,WAAW,IAAI,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,kBAAkB,CAAC;;;AAG/H,IAAI,QAAQ,GAAG,OAAO,iBAAiB,KAAK,WAAW,IAAI,OAAO,aAAa,KAAK,WAAW,IAAI,OAAO,cAAc,KAAK,WAAW,CAAC;;;AAGzI,SAAS,WAAW,GAAG;;;EAGrB,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;GAChC,CAAC;CACH;;;AAGD,SAAS,aAAa,GAAG;EACvB,IAAI,OAAO,SAAS,KAAK,WAAW,EAAE;IACpC,OAAO,YAAY;MACjB,SAAS,CAAC,KAAK,CAAC,CAAC;KAClB,CAAC;GACH;;EAED,OAAO,aAAa,EAAE,CAAC;CACxB;;AAED,SAAS,mBAAmB,GAAG;EAC7B,IAAI,UAAU,GAAG,CAAC,CAAC;EACnB,IAAI,QAAQ,GAAG,IAAI,uBAAuB,CAAC,KAAK,CAAC,CAAC;EAClD,IAAI,IAAI,GAAG,QAAQ,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC;EACvC,QAAQ,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;;EAEhD,OAAO,YAAY;IACjB,IAAI,CAAC,IAAI,GAAG,UAAU,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC;GAC3C,CAAC;CACH;;;AAGD,SAAS,iBAAiB,GAAG;EAC3B,IAAI,OAAO,GAAG,IAAI,cAAc,EAAE,CAAC;EACnC,OAAO,CAAC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC;EAChC,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;GACrC,CAAC;CACH;;AAED,SAAS,aAAa,GAAG;;;EAGvB,IAAI,gBAAgB,GAAG,UAAU,CAAC;EAClC,OAAO,YAAY;IACjB,OAAO,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;GACnC,CAAC;CACH;;AAED,IAAI,KAAK,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC;AAC5B,SAAS,KAAK,GAAG;EACf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE;IAC/B,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,GAAG,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;;IAEvB,QAAQ,CAAC,GAAG,CAAC,CAAC;;IAEd,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;IACrB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;GAC1B;;EAED,GAAG,GAAG,CAAC,CAAC;CACT;;AAED,SAAS,YAAY,GAAG;EACtB,IAAI;IACF,IAAI,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;IACvD,SAAS,GAAG,KAAK,CAAC,SAAS,IAAI,KAAK,CAAC,YAAY,CAAC;IAClD,OAAO,aAAa,EAAE,CAAC;GACxB,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,aAAa,EAAE,CAAC;GACxB;CACF;;AAED,IAAI,aAAa,GAAG,KAAK,CAAC,CAAC;;AAE3B,IAAI,MAAM,EAAE;EACV,aAAa,GAAG,WAAW,EAAE,CAAC;CAC/B,MAAM,IAAI,uBAAuB,EAAE;EAClC,aAAa,GAAG,mBAAmB,EAAE,CAAC;CACvC,MAAM,IAAI,QAAQ,EAAE;EACnB,aAAa,GAAG,iBAAiB,EAAE,CAAC;CACrC,MAAM,IAAI,aAAa,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;EACvE,aAAa,GAAG,YAAY,EAAE,CAAC;CAChC,MAAM;EACL,aAAa,GAAG,aAAa,EAAE,CAAC;;;CACjC,DCtHc,SAAS,IAAI,CAAC,aAAa,EAAE,WAAW,EAAE;EACvD,IAAI,MAAM,GAAG,IAAI,CAAC;;EAElB,IAAI,KAAK,GAAG,IAAI,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;;EAEvC,IAAI,KAAK,CAAC,UAAU,CAAC,KAAK,SAAS,EAAE;IACnC,WAAW,CAAC,KAAK,CAAC,CAAC;GACpB;;EAED,IAAI,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;;;EAG3B,IAAI,MAAM,EAAE;IACV,IAAI,QAAQ,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IACrC,IAAI,CAAC,YAAY;MACf,OAAO,cAAc,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;KAChE,CAAC,CAAC;GACJ,MAAM;IACL,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,CAAC,CAAC;GACtD;;EAED,OAAO,KAAK,CAAC;;;CACd,DCxBD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+BA,AAAe,SAASA,SAAO,CAAC,MAAM,EAAE;;EAEtC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,CAAC,WAAW,KAAK,WAAW,EAAE;IAC9E,OAAO,MAAM,CAAC;GACf;;EAED,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,OAAQ,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EAC1B,OAAO,OAAO,CAAC;;;CAChB,DCrCM,IAAI,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;;AAEhE,SAAS,IAAI,GAAG,EAAE;;AAElB,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC;AACrB,IAAI,SAAS,GAAG,CAAC,CAAC;AAClB,IAAI,QAAQ,GAAG,CAAC,CAAC;;AAEjB,IAAI,eAAe,GAAG,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC;;AAEtC,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,0CAA0C,CAAC,CAAC;CAClE;;AAED,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,sDAAsD,CAAC,CAAC;CAC9E;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE;EACxB,IAAI;IACF,OAAO,OAAO,CAAC,IAAI,CAAC;GACrB,CAAC,OAAO,KAAK,EAAE;IACd,eAAe,CAAC,KAAK,GAAG,KAAK,CAAC;IAC9B,OAAO,eAAe,CAAC;GACxB;CACF;;AAED,SAAS,OAAO,CAACC,OAAI,EAAE,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,EAAE;EAClE,IAAI;IACFA,OAAI,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;GACxD,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,CAAC;GACV;CACF;;AAED,SAAS,qBAAqB,CAAC,OAAO,EAAE,QAAQ,EAAEA,OAAI,EAAE;EACtD,IAAI,CAAC,UAAU,OAAO,EAAE;IACtB,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,IAAI,KAAK,GAAG,OAAO,CAACA,OAAI,EAAE,QAAQ,EAAE,UAAU,KAAK,EAAE;MACnD,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;MACd,IAAI,QAAQ,KAAK,KAAK,EAAE;QACtB,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB,MAAM;QACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB;KACF,EAAE,UAAU,MAAM,EAAE;MACnB,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;;MAEd,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,EAAE,UAAU,IAAI,OAAO,CAAC,MAAM,IAAI,kBAAkB,CAAC,CAAC,CAAC;;IAExD,IAAI,CAAC,MAAM,IAAI,KAAK,EAAE;MACpB,MAAM,GAAG,IAAI,CAAC;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACxB;GACF,EAAE,OAAO,CAAC,CAAC;CACb;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI,QAAQ,CAAC,MAAM,KAAK,SAAS,EAAE;IACjC,OAAO,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACpC,MAAM,IAAI,QAAQ,CAAC,MAAM,KAAK,QAAQ,EAAE;IACvC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACnC,MAAM;IACL,SAAS,CAAC,QAAQ,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC9C,OAAO,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KAChC,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KAChC,CAAC,CAAC;GACJ;CACF;;AAED,SAAS,mBAAmB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,EAAE;EACzD,IAAI,aAAa,CAAC,WAAW,KAAK,OAAO,CAAC,WAAW,IAAIA,OAAI,KAAKC,IAAY,IAAI,aAAa,CAAC,WAAW,CAAC,OAAO,KAAKC,SAAe,EAAE;IACvI,iBAAiB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;GAC3C,MAAM;IACL,IAAIF,OAAI,KAAK,eAAe,EAAE;MAC5B,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,KAAK,CAAC,CAAC;MACvC,eAAe,CAAC,KAAK,GAAG,IAAI,CAAC;KAC9B,MAAM,IAAIA,OAAI,KAAK,SAAS,EAAE;MAC7B,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC,MAAM,IAAI,UAAU,CAACA,OAAI,CAAC,EAAE;MAC3B,qBAAqB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,CAAC,CAAC;KACrD,MAAM;MACL,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC;GACF;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,KAAK,KAAK,EAAE;IACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;GACpC,MAAM,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;IAClC,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC;GACrD,MAAM;IACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB;CACF;;AAED,SAAS,gBAAgB,CAAC,OAAO,EAAE;EACjC,IAAI,OAAO,CAAC,QAAQ,EAAE;IACpB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;GACnC;;EAED,OAAO,CAAC,OAAO,CAAC,CAAC;CAClB;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;;EAED,OAAO,CAAC,OAAO,GAAG,KAAK,CAAC;EACxB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;;EAE3B,IAAI,OAAO,CAAC,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;IACrC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;EACD,OAAO,CAAC,MAAM,GAAG,QAAQ,CAAC;EAC1B,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC;;EAEzB,IAAI,CAAC,gBAAgB,EAAE,OAAO,CAAC,CAAC;CACjC;;AAED,SAAS,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,EAAE;EAC5D,IAAI,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;EACvC,IAAI,MAAM,GAAG,YAAY,CAAC,MAAM,CAAC;;;EAGjC,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC;;EAEvB,YAAY,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC;EAC7B,YAAY,CAAC,MAAM,GAAG,SAAS,CAAC,GAAG,aAAa,CAAC;EACjD,YAAY,CAAC,MAAM,GAAG,QAAQ,CAAC,GAAG,WAAW,CAAC;;EAE9C,IAAI,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,MAAM,EAAE;IACjC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;GACvB;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE;EACxB,IAAI,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;EACvC,IAAI,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC;;EAE7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;IAC5B,OAAO;GACR;;EAED,IAAI,KAAK,GAAG,KAAK,CAAC;MACd,QAAQ,GAAG,KAAK,CAAC;MACjB,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;;EAE7B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,IAAI,CAAC,EAAE;IAC9C,KAAK,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;IACvB,QAAQ,GAAG,WAAW,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC;;IAEpC,IAAI,KAAK,EAAE;MACT,cAAc,CAAC,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;KAClD,MAAM;MACL,QAAQ,CAAC,MAAM,CAAC,CAAC;KAClB;GACF;;EAED,OAAO,CAAC,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC;CACjC;;AAED,SAAS,QAAQ,CAAC,QAAQ,EAAE,MAAM,EAAE;EAClC,IAAI;IACF,OAAO,QAAQ,CAAC,MAAM,CAAC,CAAC;GACzB,CAAC,OAAO,CAAC,EAAE;IACV,eAAe,CAAC,KAAK,GAAG,CAAC,CAAC;IAC1B,OAAO,eAAe,CAAC;GACxB;CACF;;AAED,SAAS,cAAc,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE;EAC1D,IAAI,WAAW,GAAG,UAAU,CAAC,QAAQ,CAAC;MAClC,KAAK,GAAG,KAAK,CAAC;MACd,KAAK,GAAG,KAAK,CAAC;MACd,SAAS,GAAG,KAAK,CAAC;MAClB,MAAM,GAAG,KAAK,CAAC,CAAC;;EAEpB,IAAI,WAAW,EAAE;IACf,KAAK,GAAG,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;;IAEnC,IAAI,KAAK,KAAK,eAAe,EAAE;MAC7B,MAAM,GAAG,IAAI,CAAC;MACd,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC;MACpB,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC;KACpB,MAAM;MACL,SAAS,GAAG,IAAI,CAAC;KAClB;;IAED,IAAI,OAAO,KAAK,KAAK,EAAE;MACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;MACnC,OAAO;KACR;GACF,MAAM;IACL,KAAK,GAAG,MAAM,CAAC;IACf,SAAS,GAAG,IAAI,CAAC;GAClB;;EAED,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;;GAE/B,MAAM,IAAI,WAAW,IAAI,SAAS,EAAE;IACnC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,MAAM,EAAE;IACjB,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB,MAAM,IAAI,OAAO,KAAK,SAAS,EAAE;IAChC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,OAAO,KAAK,QAAQ,EAAE;IAC/B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI;IACF,QAAQ,CAAC,SAAS,cAAc,CAAC,KAAK,EAAE;MACtC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACzB,EAAE,SAAS,aAAa,CAAC,MAAM,EAAE;MAChC,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,CAAC,CAAC;GACJ,CAAC,OAAO,CAAC,EAAE;IACV,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;GACpB;CACF;;AAED,IAAI,EAAE,GAAG,CAAC,CAAC;AACX,SAAS,MAAM,GAAG;EAChB,OAAO,EAAE,EAAE,CAAC;CACb;;AAED,SAAS,WAAW,CAAC,OAAO,EAAE;EAC5B,OAAO,CAAC,UAAU,CAAC,GAAG,EAAE,EAAE,CAAC;EAC3B,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;EAC3B,OAAO,CAAC,OAAO,GAAG,SAAS,CAAC;EAC5B,OAAO,CAAC,YAAY,GAAG,EAAE,CAAC;CAC3B;;ACrPD,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;CAC7D,AAAC;;AAEF,IAAI,UAAU,GAAG,YAAY;EAC3B,SAAS,UAAU,CAAC,WAAW,EAAE,KAAK,EAAE;IACtC,IAAI,CAAC,oBAAoB,GAAG,WAAW,CAAC;IACxC,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;;IAErC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;MAC7B,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;KAC3B;;IAED,IAAI,OAAO,CAAC,KAAK,CAAC,EAAE;MAClB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;MAC3B,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,MAAM,CAAC;;MAE/B,IAAI,CAAC,OAAO,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;;MAEtC,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACrB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;OACrC,MAAM;QACL,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QACvB,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;UACzB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;SACrC;OACF;KACF,MAAM;MACL,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;KACzC;GACF;;EAED,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE;IAC3D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,KAAK,OAAO,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;MAChE,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC9B;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE;IAC9D,IAAI,CAAC,GAAG,IAAI,CAAC,oBAAoB,CAAC;IAClC,IAAIF,UAAO,GAAG,CAAC,CAAC,OAAO,CAAC;;;IAGxB,IAAIA,UAAO,KAAKI,SAAe,EAAE;MAC/B,IAAI,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC;;MAE3B,IAAI,KAAK,KAAKD,IAAY,IAAI,KAAK,CAAC,MAAM,KAAK,OAAO,EAAE;QACtD,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;OACjD,MAAM,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;QACtC,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB,MAAM,IAAI,CAAC,KAAKE,SAAO,EAAE;QACxB,IAAI,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC;QAC1B,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAC3C,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;OAChC,MAAM;QACL,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,UAAUL,UAAO,EAAE;UAC1C,OAAOA,UAAO,CAAC,KAAK,CAAC,CAAC;SACvB,CAAC,EAAE,CAAC,CAAC,CAAC;OACR;KACF,MAAM;MACL,IAAI,CAAC,aAAa,CAACA,UAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;KACvC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE,KAAK,EAAE;IACrE,IAAI,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;;;IAG3B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;MAC9B,IAAI,CAAC,UAAU,EAAE,CAAC;;MAElB,IAAI,KAAK,KAAK,QAAQ,EAAE;QACtB,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACxB,MAAM;QACL,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB;KACF;;IAED,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;MACzB,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;KAChC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,aAAa,GAAG,SAAS,aAAa,CAAC,OAAO,EAAE,CAAC,EAAE;IACtE,IAAI,UAAU,GAAG,IAAI,CAAC;;IAEtB,SAAS,CAAC,OAAO,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC7C,OAAO,UAAU,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC;KACnD,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,UAAU,CAAC,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;KACnD,CAAC,CAAC;GACJ,CAAC;;EAEF,OAAO,UAAU,CAAC;CACnB,EAAE;;ACzGH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+CA,AAAe,SAAS,GAAG,CAAC,OAAO,EAAE;EACnC,OAAO,IAAI,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,OAAO,CAAC;;;CAC9C,DCjDD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiEA,AAAe,SAAS,IAAI,CAAC,OAAO,EAAE;;EAEpC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;IACrB,OAAO,IAAI,WAAW,CAAC,UAAU,CAAC,EAAE,MAAM,EAAE;MAC1C,OAAO,MAAM,CAAC,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAC,CAAC;KACjE,CAAC,CAAC;GACJ,MAAM;IACL,OAAO,IAAI,WAAW,CAAC,UAAU,OAAO,EAAE,MAAM,EAAE;MAChD,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;MAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;QAC/B,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;OACvD;KACF,CAAC,CAAC;GACJ;;;CACF,DCjFD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCA,AAAe,SAASM,QAAM,CAAC,MAAM,EAAE;;EAErC,IAAI,WAAW,GAAG,IAAI,CAAC;EACvB,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,MAAO,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EACzB,OAAO,OAAO,CAAC;;;CAChB,DC9BD,SAAS,aAAa,GAAG;EACvB,MAAM,IAAI,SAAS,CAAC,oFAAoF,CAAC,CAAC;CAC3G;;AAED,SAAS,QAAQ,GAAG;EAClB,MAAM,IAAI,SAAS,CAAC,uHAAuH,CAAC,CAAC;CAC9I;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0GD,IAAIF,SAAO,GAAG,YAAY;EACxB,SAAS,OAAO,CAAC,QAAQ,EAAE;IACzB,IAAI,CAAC,UAAU,CAAC,GAAG,MAAM,EAAE,CAAC;IAC5B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;IACvC,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;;IAEvB,IAAI,IAAI,KAAK,QAAQ,EAAE;MACrB,OAAO,QAAQ,KAAK,UAAU,IAAI,aAAa,EAAE,CAAC;MAClD,IAAI,YAAY,OAAO,GAAG,iBAAiB,CAAC,IAAI,EAAE,QAAQ,CAAC,GAAG,QAAQ,EAAE,CAAC;KAC1E;GACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA4LD,OAAO,CAAC,SAAS,CAAC,KAAK,GAAG,SAAS,MAAM,CAAC,WAAW,EAAE;IACrD,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;GACrC,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA0CF,OAAO,CAAC,SAAS,CAAC,OAAO,GAAG,SAAS,QAAQ,CAAC,QAAQ,EAAE;IACtD,IAAI,OAAO,GAAG,IAAI,CAAC;IACnB,IAAI,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;;IAEtC,OAAO,OAAO,CAAC,IAAI,CAAC,UAAU,KAAK,EAAE;MACnC,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;QACtD,OAAO,KAAK,CAAC;OACd,CAAC,CAAC;KACJ,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;QACtD,MAAM,MAAM,CAAC;OACd,CAAC,CAAC;KACJ,CAAC,CAAC;GACJ,CAAC;;EAEF,OAAO,OAAO,CAAC;CAChB,EAAE,CAAC;;AAEJA,SAAO,CAAC,SAAS,CAAC,IAAI,GAAG,IAAI,CAAC;AAC9B,AACAA,SAAO,CAAC,GAAG,GAAG,GAAG,CAAC;AAClBA,SAAO,CAAC,IAAI,GAAG,IAAI,CAAC;AACpBA,SAAO,CAAC,OAAO,GAAGG,SAAO,CAAC;AAC1BH,SAAO,CAAC,MAAM,GAAGI,QAAM,CAAC;AACxBJ,SAAO,CAAC,aAAa,GAAG,YAAY,CAAC;AACrCA,SAAO,CAAC,QAAQ,GAAG,OAAO,CAAC;AAC3BA,SAAO,CAAC,KAAK,GAAG,IAAI;;ACxYpB;AACA,AAEe,SAAS,QAAQ,GAAG;EACjC,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;;EAEnB,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;IACjC,KAAK,GAAG,MAAM,CAAC;GAChB,MAAM,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;IACtC,KAAK,GAAG,IAAI,CAAC;GACd,MAAM;IACL,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC;KACnC,CAAC,OAAO,CAAC,EAAE;MACV,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;KAC7F;GACF;;EAED,IAAI,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC;;EAEtB,IAAI,CAAC,EAAE;IACL,IAAI,eAAe,GAAG,IAAI,CAAC;IAC3B,IAAI;MACF,eAAe,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KAC/D,CAAC,OAAO,CAAC,EAAE;;KAEX;;IAED,IAAI,eAAe,KAAK,kBAAkB,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE;MACrD,OAAO;KACR;GACF;;EAED,KAAK,CAAC,OAAO,GAAGA,SAAO,CAAC;;;CACzB,DC/BD;AACAA,SAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC;AAC5BA,SAAO,CAAC,OAAO,GAAGA,SAAO,CAAC;;ACJ1BA,SAAO,CAAC,QAAQ,EAAE,CAAC;;;;;;;;","file":"es6-promise.auto.min.js"} \ No newline at end of file +{"version":3,"sources":["config/versionTemplate.txt","lib/es6-promise/utils.js","lib/es6-promise/asap.js","lib/es6-promise/then.js","lib/es6-promise/promise/resolve.js","lib/es6-promise/-internal.js","lib/es6-promise/enumerator.js","lib/es6-promise/promise/all.js","lib/es6-promise/promise/race.js","lib/es6-promise/promise/reject.js","lib/es6-promise/promise.js","lib/es6-promise/polyfill.js","lib/es6-promise.js","lib/es6-promise.auto.js"],"sourcesContent":["/*!\n * @overview es6-promise - a tiny implementation of Promises/A+.\n * @copyright Copyright (c) 2014 Yehuda Katz, Tom Dale, Stefan Penner and contributors (Conversion to ES6 API by Jake Archibald)\n * @license Licensed under MIT license\n * See https://raw.githubusercontent.com/stefanpenner/es6-promise/master/LICENSE\n * @version v4.2.8+1e68dce6\n */\n","export function objectOrFunction(x) {\n var type = typeof x;\n return x !== null && (type === 'object' || type === 'function');\n}\n\nexport function isFunction(x) {\n return typeof x === 'function';\n}\n\nexport function isMaybeThenable(x) {\n return x !== null && typeof x === 'object';\n}\n\nvar _isArray = void 0;\nif (Array.isArray) {\n _isArray = Array.isArray;\n} else {\n _isArray = function (x) {\n return Object.prototype.toString.call(x) === '[object Array]';\n };\n}\n\nexport var isArray = _isArray;","var len = 0;\nvar vertxNext = void 0;\nvar customSchedulerFn = void 0;\n\nexport var asap = function asap(callback, arg) {\n queue[len] = callback;\n queue[len + 1] = arg;\n len += 2;\n if (len === 2) {\n // If len is 2, that means that we need to schedule an async flush.\n // If additional callbacks are queued before the queue is flushed, they\n // will be processed by this flush that we are scheduling.\n if (customSchedulerFn) {\n customSchedulerFn(flush);\n } else {\n scheduleFlush();\n }\n }\n};\n\nexport function setScheduler(scheduleFn) {\n customSchedulerFn = scheduleFn;\n}\n\nexport function setAsap(asapFn) {\n asap = asapFn;\n}\n\nvar browserWindow = typeof window !== 'undefined' ? window : undefined;\nvar browserGlobal = browserWindow || {};\nvar BrowserMutationObserver = browserGlobal.MutationObserver || browserGlobal.WebKitMutationObserver;\nvar isNode = typeof self === 'undefined' && typeof process !== 'undefined' && {}.toString.call(process) === '[object process]';\n\n// test for web worker but not in IE10\nvar isWorker = typeof Uint8ClampedArray !== 'undefined' && typeof importScripts !== 'undefined' && typeof MessageChannel !== 'undefined';\n\n// node\nfunction useNextTick() {\n // node version 0.10.x displays a deprecation warning when nextTick is used recursively\n // see https://github.com/cujojs/when/issues/410 for details\n return function () {\n return process.nextTick(flush);\n };\n}\n\n// vertx\nfunction useVertxTimer() {\n if (typeof vertxNext !== 'undefined') {\n return function () {\n vertxNext(flush);\n };\n }\n\n return useSetTimeout();\n}\n\nfunction useMutationObserver() {\n var iterations = 0;\n var observer = new BrowserMutationObserver(flush);\n var node = document.createTextNode('');\n observer.observe(node, { characterData: true });\n\n return function () {\n node.data = iterations = ++iterations % 2;\n };\n}\n\n// web worker\nfunction useMessageChannel() {\n var channel = new MessageChannel();\n channel.port1.onmessage = flush;\n return function () {\n return channel.port2.postMessage(0);\n };\n}\n\nfunction useSetTimeout() {\n // Store setTimeout reference so es6-promise will be unaffected by\n // other code modifying setTimeout (like sinon.useFakeTimers())\n var globalSetTimeout = setTimeout;\n return function () {\n return globalSetTimeout(flush, 1);\n };\n}\n\nvar queue = new Array(1000);\nfunction flush() {\n for (var i = 0; i < len; i += 2) {\n var callback = queue[i];\n var arg = queue[i + 1];\n\n callback(arg);\n\n queue[i] = undefined;\n queue[i + 1] = undefined;\n }\n\n len = 0;\n}\n\nfunction attemptVertx() {\n try {\n var vertx = Function('return this')().require('vertx');\n vertxNext = vertx.runOnLoop || vertx.runOnContext;\n return useVertxTimer();\n } catch (e) {\n return useSetTimeout();\n }\n}\n\nvar scheduleFlush = void 0;\n// Decide what async method to use to triggering processing of queued callbacks:\nif (isNode) {\n scheduleFlush = useNextTick();\n} else if (BrowserMutationObserver) {\n scheduleFlush = useMutationObserver();\n} else if (isWorker) {\n scheduleFlush = useMessageChannel();\n} else if (browserWindow === undefined && typeof require === 'function') {\n scheduleFlush = attemptVertx();\n} else {\n scheduleFlush = useSetTimeout();\n}","import { invokeCallback, subscribe, FULFILLED, REJECTED, noop, makePromise, PROMISE_ID } from './-internal';\n\nimport { asap } from './asap';\n\nexport default function then(onFulfillment, onRejection) {\n var parent = this;\n\n var child = new this.constructor(noop);\n\n if (child[PROMISE_ID] === undefined) {\n makePromise(child);\n }\n\n var _state = parent._state;\n\n\n if (_state) {\n var callback = arguments[_state - 1];\n asap(function () {\n return invokeCallback(_state, child, callback, parent._result);\n });\n } else {\n subscribe(parent, child, onFulfillment, onRejection);\n }\n\n return child;\n}","import { noop, resolve as _resolve } from '../-internal';\n\n/**\n `Promise.resolve` returns a promise that will become resolved with the\n passed `value`. It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n resolve(1);\n });\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.resolve(1);\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n @method resolve\n @static\n @param {Any} value value that the returned promise will be resolved with\n Useful for tooling.\n @return {Promise} a promise that will become fulfilled with the given\n `value`\n*/\nexport default function resolve(object) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (object && typeof object === 'object' && object.constructor === Constructor) {\n return object;\n }\n\n var promise = new Constructor(noop);\n _resolve(promise, object);\n return promise;\n}","import { objectOrFunction, isFunction } from './utils';\n\nimport { asap } from './asap';\n\nimport originalThen from './then';\nimport originalResolve from './promise/resolve';\n\nexport var PROMISE_ID = Math.random().toString(36).substring(2);\n\nfunction noop() {}\n\nvar PENDING = void 0;\nvar FULFILLED = 1;\nvar REJECTED = 2;\n\nfunction selfFulfillment() {\n return new TypeError(\"You cannot resolve a promise with itself\");\n}\n\nfunction cannotReturnOwn() {\n return new TypeError('A promises callback cannot return that same promise.');\n}\n\nfunction tryThen(then, value, fulfillmentHandler, rejectionHandler) {\n try {\n then.call(value, fulfillmentHandler, rejectionHandler);\n } catch (e) {\n return e;\n }\n}\n\nfunction handleForeignThenable(promise, thenable, then) {\n asap(function (promise) {\n var sealed = false;\n var error = tryThen(then, thenable, function (value) {\n if (sealed) {\n return;\n }\n sealed = true;\n if (thenable !== value) {\n resolve(promise, value);\n } else {\n fulfill(promise, value);\n }\n }, function (reason) {\n if (sealed) {\n return;\n }\n sealed = true;\n\n reject(promise, reason);\n }, 'Settle: ' + (promise._label || ' unknown promise'));\n\n if (!sealed && error) {\n sealed = true;\n reject(promise, error);\n }\n }, promise);\n}\n\nfunction handleOwnThenable(promise, thenable) {\n if (thenable._state === FULFILLED) {\n fulfill(promise, thenable._result);\n } else if (thenable._state === REJECTED) {\n reject(promise, thenable._result);\n } else {\n subscribe(thenable, undefined, function (value) {\n return resolve(promise, value);\n }, function (reason) {\n return reject(promise, reason);\n });\n }\n}\n\nfunction handleMaybeThenable(promise, maybeThenable, then) {\n if (maybeThenable.constructor === promise.constructor && then === originalThen && maybeThenable.constructor.resolve === originalResolve) {\n handleOwnThenable(promise, maybeThenable);\n } else {\n if (then === undefined) {\n fulfill(promise, maybeThenable);\n } else if (isFunction(then)) {\n handleForeignThenable(promise, maybeThenable, then);\n } else {\n fulfill(promise, maybeThenable);\n }\n }\n}\n\nfunction resolve(promise, value) {\n if (promise === value) {\n reject(promise, selfFulfillment());\n } else if (objectOrFunction(value)) {\n var then = void 0;\n try {\n then = value.then;\n } catch (error) {\n reject(promise, error);\n return;\n }\n handleMaybeThenable(promise, value, then);\n } else {\n fulfill(promise, value);\n }\n}\n\nfunction publishRejection(promise) {\n if (promise._onerror) {\n promise._onerror(promise._result);\n }\n\n publish(promise);\n}\n\nfunction fulfill(promise, value) {\n if (promise._state !== PENDING) {\n return;\n }\n\n promise._result = value;\n promise._state = FULFILLED;\n\n if (promise._subscribers.length !== 0) {\n asap(publish, promise);\n }\n}\n\nfunction reject(promise, reason) {\n if (promise._state !== PENDING) {\n return;\n }\n promise._state = REJECTED;\n promise._result = reason;\n\n asap(publishRejection, promise);\n}\n\nfunction subscribe(parent, child, onFulfillment, onRejection) {\n var _subscribers = parent._subscribers;\n var length = _subscribers.length;\n\n\n parent._onerror = null;\n\n _subscribers[length] = child;\n _subscribers[length + FULFILLED] = onFulfillment;\n _subscribers[length + REJECTED] = onRejection;\n\n if (length === 0 && parent._state) {\n asap(publish, parent);\n }\n}\n\nfunction publish(promise) {\n var subscribers = promise._subscribers;\n var settled = promise._state;\n\n if (subscribers.length === 0) {\n return;\n }\n\n var child = void 0,\n callback = void 0,\n detail = promise._result;\n\n for (var i = 0; i < subscribers.length; i += 3) {\n child = subscribers[i];\n callback = subscribers[i + settled];\n\n if (child) {\n invokeCallback(settled, child, callback, detail);\n } else {\n callback(detail);\n }\n }\n\n promise._subscribers.length = 0;\n}\n\nfunction invokeCallback(settled, promise, callback, detail) {\n var hasCallback = isFunction(callback),\n value = void 0,\n error = void 0,\n succeeded = true;\n\n if (hasCallback) {\n try {\n value = callback(detail);\n } catch (e) {\n succeeded = false;\n error = e;\n }\n\n if (promise === value) {\n reject(promise, cannotReturnOwn());\n return;\n }\n } else {\n value = detail;\n }\n\n if (promise._state !== PENDING) {\n // noop\n } else if (hasCallback && succeeded) {\n resolve(promise, value);\n } else if (succeeded === false) {\n reject(promise, error);\n } else if (settled === FULFILLED) {\n fulfill(promise, value);\n } else if (settled === REJECTED) {\n reject(promise, value);\n }\n}\n\nfunction initializePromise(promise, resolver) {\n try {\n resolver(function resolvePromise(value) {\n resolve(promise, value);\n }, function rejectPromise(reason) {\n reject(promise, reason);\n });\n } catch (e) {\n reject(promise, e);\n }\n}\n\nvar id = 0;\nfunction nextId() {\n return id++;\n}\n\nfunction makePromise(promise) {\n promise[PROMISE_ID] = id++;\n promise._state = undefined;\n promise._result = undefined;\n promise._subscribers = [];\n}\n\nexport { nextId, makePromise, noop, resolve, reject, fulfill, subscribe, publish, publishRejection, initializePromise, invokeCallback, FULFILLED, REJECTED, PENDING, handleMaybeThenable };","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isArray, isMaybeThenable } from './utils';\nimport { noop, reject, fulfill, subscribe, FULFILLED, REJECTED, PENDING, handleMaybeThenable } from './-internal';\n\nimport then from './then';\nimport Promise from './promise';\nimport originalResolve from './promise/resolve';\nimport originalThen from './then';\nimport { makePromise, PROMISE_ID } from './-internal';\n\nfunction validationError() {\n return new Error('Array Methods must be provided an Array');\n};\n\nvar Enumerator = function () {\n function Enumerator(Constructor, input) {\n this._instanceConstructor = Constructor;\n this.promise = new Constructor(noop);\n\n if (!this.promise[PROMISE_ID]) {\n makePromise(this.promise);\n }\n\n if (isArray(input)) {\n this.length = input.length;\n this._remaining = input.length;\n\n this._result = new Array(this.length);\n\n if (this.length === 0) {\n fulfill(this.promise, this._result);\n } else {\n this.length = this.length || 0;\n this._enumerate(input);\n if (this._remaining === 0) {\n fulfill(this.promise, this._result);\n }\n }\n } else {\n reject(this.promise, validationError());\n }\n }\n\n Enumerator.prototype._enumerate = function _enumerate(input) {\n for (var i = 0; this._state === PENDING && i < input.length; i++) {\n this._eachEntry(input[i], i);\n }\n };\n\n Enumerator.prototype._eachEntry = function _eachEntry(entry, i) {\n var c = this._instanceConstructor;\n var resolve = c.resolve;\n\n\n if (resolve === originalResolve) {\n var _then = void 0;\n var error = void 0;\n var didError = false;\n try {\n _then = entry.then;\n } catch (e) {\n didError = true;\n error = e;\n }\n\n if (_then === originalThen && entry._state !== PENDING) {\n this._settledAt(entry._state, i, entry._result);\n } else if (typeof _then !== 'function') {\n this._remaining--;\n this._result[i] = entry;\n } else if (c === Promise) {\n var promise = new c(noop);\n if (didError) {\n reject(promise, error);\n } else {\n handleMaybeThenable(promise, entry, _then);\n }\n this._willSettleAt(promise, i);\n } else {\n this._willSettleAt(new c(function (resolve) {\n return resolve(entry);\n }), i);\n }\n } else {\n this._willSettleAt(resolve(entry), i);\n }\n };\n\n Enumerator.prototype._settledAt = function _settledAt(state, i, value) {\n var promise = this.promise;\n\n\n if (promise._state === PENDING) {\n this._remaining--;\n\n if (state === REJECTED) {\n reject(promise, value);\n } else {\n this._result[i] = value;\n }\n }\n\n if (this._remaining === 0) {\n fulfill(promise, this._result);\n }\n };\n\n Enumerator.prototype._willSettleAt = function _willSettleAt(promise, i) {\n var enumerator = this;\n\n subscribe(promise, undefined, function (value) {\n return enumerator._settledAt(FULFILLED, i, value);\n }, function (reason) {\n return enumerator._settledAt(REJECTED, i, reason);\n });\n };\n\n return Enumerator;\n}();\n\nexport default Enumerator;\n;","import Enumerator from '../enumerator';\n\n/**\n `Promise.all` accepts an array of promises, and returns a new promise which\n is fulfilled with an array of fulfillment values for the passed promises, or\n rejected with the reason of the first passed promise to be rejected. It casts all\n elements of the passed iterable to promises as it runs this algorithm.\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = resolve(2);\n let promise3 = resolve(3);\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // The array here would be [ 1, 2, 3 ];\n });\n ```\n\n If any of the `promises` given to `all` are rejected, the first promise\n that is rejected will be given as an argument to the returned promises's\n rejection handler. For example:\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = reject(new Error(\"2\"));\n let promise3 = reject(new Error(\"3\"));\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // Code here never runs because there are rejected promises!\n }, function(error) {\n // error.message === \"2\"\n });\n ```\n\n @method all\n @static\n @param {Array} entries array of promises\n @param {String} label optional string for labeling the promise.\n Useful for tooling.\n @return {Promise} promise that is fulfilled when all `promises` have been\n fulfilled, or rejected if any of them become rejected.\n @static\n*/\nexport default function all(entries) {\n return new Enumerator(this, entries).promise;\n}","import { isArray } from \"../utils\";\n\n/**\n `Promise.race` returns a new promise which is settled in the same way as the\n first passed promise to settle.\n\n Example:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 2');\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // result === 'promise 2' because it was resolved before promise1\n // was resolved.\n });\n ```\n\n `Promise.race` is deterministic in that only the state of the first\n settled promise matters. For example, even if other promises given to the\n `promises` array argument are resolved, but the first settled promise has\n become rejected before the other promises became fulfilled, the returned\n promise will become rejected:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n reject(new Error('promise 2'));\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // Code here never runs\n }, function(reason){\n // reason.message === 'promise 2' because promise 2 became rejected before\n // promise 1 became fulfilled\n });\n ```\n\n An example real-world use case is implementing timeouts:\n\n ```javascript\n Promise.race([ajax('foo.json'), timeout(5000)])\n ```\n\n @method race\n @static\n @param {Array} promises array of promises to observe\n Useful for tooling.\n @return {Promise} a promise which settles in the same way as the first passed\n promise to settle.\n*/\nexport default function race(entries) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (!isArray(entries)) {\n return new Constructor(function (_, reject) {\n return reject(new TypeError('You must pass an array to race.'));\n });\n } else {\n return new Constructor(function (resolve, reject) {\n var length = entries.length;\n for (var i = 0; i < length; i++) {\n Constructor.resolve(entries[i]).then(resolve, reject);\n }\n });\n }\n}","import { noop, reject as _reject } from '../-internal';\n\n/**\n `Promise.reject` returns a promise rejected with the passed `reason`.\n It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n reject(new Error('WHOOPS'));\n });\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.reject(new Error('WHOOPS'));\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n @method reject\n @static\n @param {Any} reason value that the returned promise will be rejected with.\n Useful for tooling.\n @return {Promise} a promise rejected with the given `reason`.\n*/\nexport default function reject(reason) {\n /*jshint validthis:true */\n var Constructor = this;\n var promise = new Constructor(noop);\n _reject(promise, reason);\n return promise;\n}","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isFunction } from './utils';\nimport { noop, nextId, PROMISE_ID, initializePromise } from './-internal';\nimport { asap, setAsap, setScheduler } from './asap';\n\nimport all from './promise/all';\nimport race from './promise/race';\nimport Resolve from './promise/resolve';\nimport Reject from './promise/reject';\nimport then from './then';\n\nfunction needsResolver() {\n throw new TypeError('You must pass a resolver function as the first argument to the promise constructor');\n}\n\nfunction needsNew() {\n throw new TypeError(\"Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.\");\n}\n\n/**\n Promise objects represent the eventual result of an asynchronous operation. The\n primary way of interacting with a promise is through its `then` method, which\n registers callbacks to receive either a promise's eventual value or the reason\n why the promise cannot be fulfilled.\n\n Terminology\n -----------\n\n - `promise` is an object or function with a `then` method whose behavior conforms to this specification.\n - `thenable` is an object or function that defines a `then` method.\n - `value` is any legal JavaScript value (including undefined, a thenable, or a promise).\n - `exception` is a value that is thrown using the throw statement.\n - `reason` is a value that indicates why a promise was rejected.\n - `settled` the final resting state of a promise, fulfilled or rejected.\n\n A promise can be in one of three states: pending, fulfilled, or rejected.\n\n Promises that are fulfilled have a fulfillment value and are in the fulfilled\n state. Promises that are rejected have a rejection reason and are in the\n rejected state. A fulfillment value is never a thenable.\n\n Promises can also be said to *resolve* a value. If this value is also a\n promise, then the original promise's settled state will match the value's\n settled state. So a promise that *resolves* a promise that rejects will\n itself reject, and a promise that *resolves* a promise that fulfills will\n itself fulfill.\n\n\n Basic Usage:\n ------------\n\n ```js\n let promise = new Promise(function(resolve, reject) {\n // on success\n resolve(value);\n\n // on failure\n reject(reason);\n });\n\n promise.then(function(value) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Advanced Usage:\n ---------------\n\n Promises shine when abstracting away asynchronous interactions such as\n `XMLHttpRequest`s.\n\n ```js\n function getJSON(url) {\n return new Promise(function(resolve, reject){\n let xhr = new XMLHttpRequest();\n\n xhr.open('GET', url);\n xhr.onreadystatechange = handler;\n xhr.responseType = 'json';\n xhr.setRequestHeader('Accept', 'application/json');\n xhr.send();\n\n function handler() {\n if (this.readyState === this.DONE) {\n if (this.status === 200) {\n resolve(this.response);\n } else {\n reject(new Error('getJSON: `' + url + '` failed with status: [' + this.status + ']'));\n }\n }\n };\n });\n }\n\n getJSON('/posts.json').then(function(json) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Unlike callbacks, promises are great composable primitives.\n\n ```js\n Promise.all([\n getJSON('/posts'),\n getJSON('/comments')\n ]).then(function(values){\n values[0] // => postsJSON\n values[1] // => commentsJSON\n\n return values;\n });\n ```\n\n @class Promise\n @param {Function} resolver\n Useful for tooling.\n @constructor\n*/\n\nvar Promise = function () {\n function Promise(resolver) {\n this[PROMISE_ID] = nextId();\n this._result = this._state = undefined;\n this._subscribers = [];\n\n if (noop !== resolver) {\n typeof resolver !== 'function' && needsResolver();\n this instanceof Promise ? initializePromise(this, resolver) : needsNew();\n }\n }\n\n /**\n The primary way of interacting with a promise is through its `then` method,\n which registers callbacks to receive either a promise's eventual value or the\n reason why the promise cannot be fulfilled.\n ```js\n findUser().then(function(user){\n // user is available\n }, function(reason){\n // user is unavailable, and you are given the reason why\n });\n ```\n Chaining\n --------\n The return value of `then` is itself a promise. This second, 'downstream'\n promise is resolved with the return value of the first promise's fulfillment\n or rejection handler, or rejected if the handler throws an exception.\n ```js\n findUser().then(function (user) {\n return user.name;\n }, function (reason) {\n return 'default name';\n }).then(function (userName) {\n // If `findUser` fulfilled, `userName` will be the user's name, otherwise it\n // will be `'default name'`\n });\n findUser().then(function (user) {\n throw new Error('Found user, but still unhappy');\n }, function (reason) {\n throw new Error('`findUser` rejected and we're unhappy');\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.\n // If `findUser` rejected, `reason` will be '`findUser` rejected and we're unhappy'.\n });\n ```\n If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.\n ```js\n findUser().then(function (user) {\n throw new PedagogicalException('Upstream error');\n }).then(function (value) {\n // never reached\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // The `PedgagocialException` is propagated all the way down to here\n });\n ```\n Assimilation\n ------------\n Sometimes the value you want to propagate to a downstream promise can only be\n retrieved asynchronously. This can be achieved by returning a promise in the\n fulfillment or rejection handler. The downstream promise will then be pending\n until the returned promise is settled. This is called *assimilation*.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // The user's comments are now available\n });\n ```\n If the assimliated promise rejects, then the downstream promise will also reject.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // If `findCommentsByAuthor` fulfills, we'll have the value here\n }, function (reason) {\n // If `findCommentsByAuthor` rejects, we'll have the reason here\n });\n ```\n Simple Example\n --------------\n Synchronous Example\n ```javascript\n let result;\n try {\n result = findResult();\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n findResult(function(result, err){\n if (err) {\n // failure\n } else {\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findResult().then(function(result){\n // success\n }, function(reason){\n // failure\n });\n ```\n Advanced Example\n --------------\n Synchronous Example\n ```javascript\n let author, books;\n try {\n author = findAuthor();\n books = findBooksByAuthor(author);\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n function foundBooks(books) {\n }\n function failure(reason) {\n }\n findAuthor(function(author, err){\n if (err) {\n failure(err);\n // failure\n } else {\n try {\n findBoooksByAuthor(author, function(books, err) {\n if (err) {\n failure(err);\n } else {\n try {\n foundBooks(books);\n } catch(reason) {\n failure(reason);\n }\n }\n });\n } catch(error) {\n failure(err);\n }\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findAuthor().\n then(findBooksByAuthor).\n then(function(books){\n // found books\n }).catch(function(reason){\n // something went wrong\n });\n ```\n @method then\n @param {Function} onFulfilled\n @param {Function} onRejected\n Useful for tooling.\n @return {Promise}\n */\n\n /**\n `catch` is simply sugar for `then(undefined, onRejection)` which makes it the same\n as the catch block of a try/catch statement.\n ```js\n function findAuthor(){\n throw new Error('couldn't find that author');\n }\n // synchronous\n try {\n findAuthor();\n } catch(reason) {\n // something went wrong\n }\n // async with promises\n findAuthor().catch(function(reason){\n // something went wrong\n });\n ```\n @method catch\n @param {Function} onRejection\n Useful for tooling.\n @return {Promise}\n */\n\n\n Promise.prototype.catch = function _catch(onRejection) {\n return this.then(null, onRejection);\n };\n\n /**\n `finally` will be invoked regardless of the promise's fate just as native\n try/catch/finally behaves\n \n Synchronous example:\n \n ```js\n findAuthor() {\n if (Math.random() > 0.5) {\n throw new Error();\n }\n return new Author();\n }\n \n try {\n return findAuthor(); // succeed or fail\n } catch(error) {\n return findOtherAuther();\n } finally {\n // always runs\n // doesn't affect the return value\n }\n ```\n \n Asynchronous example:\n \n ```js\n findAuthor().catch(function(reason){\n return findOtherAuther();\n }).finally(function(){\n // author was either found, or not\n });\n ```\n \n @method finally\n @param {Function} callback\n @return {Promise}\n */\n\n\n Promise.prototype.finally = function _finally(callback) {\n var promise = this;\n var constructor = promise.constructor;\n\n if (isFunction(callback)) {\n return promise.then(function (value) {\n return constructor.resolve(callback()).then(function () {\n return value;\n });\n }, function (reason) {\n return constructor.resolve(callback()).then(function () {\n throw reason;\n });\n });\n }\n\n return promise.then(callback, callback);\n };\n\n return Promise;\n}();\n\nPromise.prototype.then = then;\nexport default Promise;\nPromise.all = all;\nPromise.race = race;\nPromise.resolve = Resolve;\nPromise.reject = Reject;\nPromise._setScheduler = setScheduler;\nPromise._setAsap = setAsap;\nPromise._asap = asap;","/*global self*/\nimport Promise from './promise';\n\nexport default function polyfill() {\n var local = void 0;\n\n if (typeof global !== 'undefined') {\n local = global;\n } else if (typeof self !== 'undefined') {\n local = self;\n } else {\n try {\n local = Function('return this')();\n } catch (e) {\n throw new Error('polyfill failed because global object is unavailable in this environment');\n }\n }\n\n var P = local.Promise;\n\n if (P) {\n var promiseToString = null;\n try {\n promiseToString = Object.prototype.toString.call(P.resolve());\n } catch (e) {\n // silently ignored\n }\n\n if (promiseToString === '[object Promise]' && !P.cast) {\n return;\n }\n }\n\n local.Promise = Promise;\n}","import Promise from './es6-promise/promise';\nimport polyfill from './es6-promise/polyfill';\n\n// Strange compat..\nPromise.polyfill = polyfill;\nPromise.Promise = Promise;\nexport default Promise;","import Promise from './es6-promise';\nPromise.polyfill();\nexport default Promise;"],"names":["resolve","_resolve","then","originalThen","originalResolve","Promise","reject","_reject","Resolve","Reject"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACNO,SAAS,gBAAgB,CAAC,CAAC,EAAE;EAClC,IAAI,IAAI,GAAG,OAAO,CAAC,CAAC;EACpB,OAAO,CAAC,KAAK,IAAI,KAAK,IAAI,KAAK,QAAQ,IAAI,IAAI,KAAK,UAAU,CAAC,CAAC;CACjE;;AAED,AAAO,SAAS,UAAU,CAAC,CAAC,EAAE;EAC5B,OAAO,OAAO,CAAC,KAAK,UAAU,CAAC;CAChC;;AAED,AAEC;;AAED,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC;AACtB,IAAI,KAAK,CAAC,OAAO,EAAE;EACjB,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC;CAC1B,MAAM;EACL,QAAQ,GAAG,UAAU,CAAC,EAAE;IACtB,OAAO,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,gBAAgB,CAAC;GAC/D,CAAC;CACH;;AAED,AAAO,IAAI,OAAO,GAAG,QAAQ;;ACtB7B,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAI,SAAS,GAAG,KAAK,CAAC,CAAC;AACvB,IAAI,iBAAiB,GAAG,KAAK,CAAC,CAAC;;AAE/B,AAAO,IAAI,IAAI,GAAG,SAAS,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;EAC7C,KAAK,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC;EACtB,KAAK,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;EACrB,GAAG,IAAI,CAAC,CAAC;EACT,IAAI,GAAG,KAAK,CAAC,EAAE;;;;IAIb,IAAI,iBAAiB,EAAE;MACrB,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAC1B,MAAM;MACL,aAAa,EAAE,CAAC;KACjB;GACF;CACF,CAAC;;AAEF,AAAO,SAAS,YAAY,CAAC,UAAU,EAAE;EACvC,iBAAiB,GAAG,UAAU,CAAC;CAChC;;AAED,AAAO,SAAS,OAAO,CAAC,MAAM,EAAE;EAC9B,IAAI,GAAG,MAAM,CAAC;CACf;;AAED,IAAI,aAAa,GAAG,OAAO,MAAM,KAAK,WAAW,GAAG,MAAM,GAAG,SAAS,CAAC;AACvE,IAAI,aAAa,GAAG,aAAa,IAAI,EAAE,CAAC;AACxC,IAAI,uBAAuB,GAAG,aAAa,CAAC,gBAAgB,IAAI,aAAa,CAAC,sBAAsB,CAAC;AACrG,IAAI,MAAM,GAAG,OAAO,IAAI,KAAK,WAAW,IAAI,OAAO,OAAO,KAAK,WAAW,IAAI,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,kBAAkB,CAAC;;;AAG/H,IAAI,QAAQ,GAAG,OAAO,iBAAiB,KAAK,WAAW,IAAI,OAAO,aAAa,KAAK,WAAW,IAAI,OAAO,cAAc,KAAK,WAAW,CAAC;;;AAGzI,SAAS,WAAW,GAAG;;;EAGrB,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;GAChC,CAAC;CACH;;;AAGD,SAAS,aAAa,GAAG;EACvB,IAAI,OAAO,SAAS,KAAK,WAAW,EAAE;IACpC,OAAO,YAAY;MACjB,SAAS,CAAC,KAAK,CAAC,CAAC;KAClB,CAAC;GACH;;EAED,OAAO,aAAa,EAAE,CAAC;CACxB;;AAED,SAAS,mBAAmB,GAAG;EAC7B,IAAI,UAAU,GAAG,CAAC,CAAC;EACnB,IAAI,QAAQ,GAAG,IAAI,uBAAuB,CAAC,KAAK,CAAC,CAAC;EAClD,IAAI,IAAI,GAAG,QAAQ,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC;EACvC,QAAQ,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;;EAEhD,OAAO,YAAY;IACjB,IAAI,CAAC,IAAI,GAAG,UAAU,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC;GAC3C,CAAC;CACH;;;AAGD,SAAS,iBAAiB,GAAG;EAC3B,IAAI,OAAO,GAAG,IAAI,cAAc,EAAE,CAAC;EACnC,OAAO,CAAC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC;EAChC,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;GACrC,CAAC;CACH;;AAED,SAAS,aAAa,GAAG;;;EAGvB,IAAI,gBAAgB,GAAG,UAAU,CAAC;EAClC,OAAO,YAAY;IACjB,OAAO,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;GACnC,CAAC;CACH;;AAED,IAAI,KAAK,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC;AAC5B,SAAS,KAAK,GAAG;EACf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE;IAC/B,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,GAAG,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;;IAEvB,QAAQ,CAAC,GAAG,CAAC,CAAC;;IAEd,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;IACrB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;GAC1B;;EAED,GAAG,GAAG,CAAC,CAAC;CACT;;AAED,SAAS,YAAY,GAAG;EACtB,IAAI;IACF,IAAI,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;IACvD,SAAS,GAAG,KAAK,CAAC,SAAS,IAAI,KAAK,CAAC,YAAY,CAAC;IAClD,OAAO,aAAa,EAAE,CAAC;GACxB,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,aAAa,EAAE,CAAC;GACxB;CACF;;AAED,IAAI,aAAa,GAAG,KAAK,CAAC,CAAC;;AAE3B,IAAI,MAAM,EAAE;EACV,aAAa,GAAG,WAAW,EAAE,CAAC;CAC/B,MAAM,IAAI,uBAAuB,EAAE;EAClC,aAAa,GAAG,mBAAmB,EAAE,CAAC;CACvC,MAAM,IAAI,QAAQ,EAAE;EACnB,aAAa,GAAG,iBAAiB,EAAE,CAAC;CACrC,MAAM,IAAI,aAAa,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;EACvE,aAAa,GAAG,YAAY,EAAE,CAAC;CAChC,MAAM;EACL,aAAa,GAAG,aAAa,EAAE,CAAC;;;CACjC,DCtHc,SAAS,IAAI,CAAC,aAAa,EAAE,WAAW,EAAE;EACvD,IAAI,MAAM,GAAG,IAAI,CAAC;;EAElB,IAAI,KAAK,GAAG,IAAI,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;;EAEvC,IAAI,KAAK,CAAC,UAAU,CAAC,KAAK,SAAS,EAAE;IACnC,WAAW,CAAC,KAAK,CAAC,CAAC;GACpB;;EAED,IAAI,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;;;EAG3B,IAAI,MAAM,EAAE;IACV,IAAI,QAAQ,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IACrC,IAAI,CAAC,YAAY;MACf,OAAO,cAAc,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;KAChE,CAAC,CAAC;GACJ,MAAM;IACL,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,CAAC,CAAC;GACtD;;EAED,OAAO,KAAK,CAAC;;;CACd,DCxBD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+BA,AAAe,SAASA,SAAO,CAAC,MAAM,EAAE;;EAEtC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,CAAC,WAAW,KAAK,WAAW,EAAE;IAC9E,OAAO,MAAM,CAAC;GACf;;EAED,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,OAAQ,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EAC1B,OAAO,OAAO,CAAC;;;CAChB,DCrCM,IAAI,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;;AAEhE,SAAS,IAAI,GAAG,EAAE;;AAElB,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC;AACrB,IAAI,SAAS,GAAG,CAAC,CAAC;AAClB,IAAI,QAAQ,GAAG,CAAC,CAAC;;AAEjB,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,0CAA0C,CAAC,CAAC;CAClE;;AAED,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,sDAAsD,CAAC,CAAC;CAC9E;;AAED,SAAS,OAAO,CAACC,OAAI,EAAE,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,EAAE;EAClE,IAAI;IACFA,OAAI,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;GACxD,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,CAAC;GACV;CACF;;AAED,SAAS,qBAAqB,CAAC,OAAO,EAAE,QAAQ,EAAEA,OAAI,EAAE;EACtD,IAAI,CAAC,UAAU,OAAO,EAAE;IACtB,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,IAAI,KAAK,GAAG,OAAO,CAACA,OAAI,EAAE,QAAQ,EAAE,UAAU,KAAK,EAAE;MACnD,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;MACd,IAAI,QAAQ,KAAK,KAAK,EAAE;QACtB,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB,MAAM;QACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB;KACF,EAAE,UAAU,MAAM,EAAE;MACnB,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;;MAEd,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,EAAE,UAAU,IAAI,OAAO,CAAC,MAAM,IAAI,kBAAkB,CAAC,CAAC,CAAC;;IAExD,IAAI,CAAC,MAAM,IAAI,KAAK,EAAE;MACpB,MAAM,GAAG,IAAI,CAAC;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACxB;GACF,EAAE,OAAO,CAAC,CAAC;CACb;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI,QAAQ,CAAC,MAAM,KAAK,SAAS,EAAE;IACjC,OAAO,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACpC,MAAM,IAAI,QAAQ,CAAC,MAAM,KAAK,QAAQ,EAAE;IACvC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACnC,MAAM;IACL,SAAS,CAAC,QAAQ,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC9C,OAAO,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KAChC,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KAChC,CAAC,CAAC;GACJ;CACF;;AAED,SAAS,mBAAmB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,EAAE;EACzD,IAAI,aAAa,CAAC,WAAW,KAAK,OAAO,CAAC,WAAW,IAAIA,OAAI,KAAKC,IAAY,IAAI,aAAa,CAAC,WAAW,CAAC,OAAO,KAAKC,SAAe,EAAE;IACvI,iBAAiB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;GAC3C,MAAM;IACL,IAAIF,OAAI,KAAK,SAAS,EAAE;MACtB,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC,MAAM,IAAI,UAAU,CAACA,OAAI,CAAC,EAAE;MAC3B,qBAAqB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,CAAC,CAAC;KACrD,MAAM;MACL,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC;GACF;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,KAAK,KAAK,EAAE;IACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;GACpC,MAAM,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;IAClC,IAAIA,OAAI,GAAG,KAAK,CAAC,CAAC;IAClB,IAAI;MACFA,OAAI,GAAG,KAAK,CAAC,IAAI,CAAC;KACnB,CAAC,OAAO,KAAK,EAAE;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;MACvB,OAAO;KACR;IACD,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAEA,OAAI,CAAC,CAAC;GAC3C,MAAM;IACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB;CACF;;AAED,SAAS,gBAAgB,CAAC,OAAO,EAAE;EACjC,IAAI,OAAO,CAAC,QAAQ,EAAE;IACpB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;GACnC;;EAED,OAAO,CAAC,OAAO,CAAC,CAAC;CAClB;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;;EAED,OAAO,CAAC,OAAO,GAAG,KAAK,CAAC;EACxB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;;EAE3B,IAAI,OAAO,CAAC,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;IACrC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;EACD,OAAO,CAAC,MAAM,GAAG,QAAQ,CAAC;EAC1B,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC;;EAEzB,IAAI,CAAC,gBAAgB,EAAE,OAAO,CAAC,CAAC;CACjC;;AAED,SAAS,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,EAAE;EAC5D,IAAI,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;EACvC,IAAI,MAAM,GAAG,YAAY,CAAC,MAAM,CAAC;;;EAGjC,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC;;EAEvB,YAAY,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC;EAC7B,YAAY,CAAC,MAAM,GAAG,SAAS,CAAC,GAAG,aAAa,CAAC;EACjD,YAAY,CAAC,MAAM,GAAG,QAAQ,CAAC,GAAG,WAAW,CAAC;;EAE9C,IAAI,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,MAAM,EAAE;IACjC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;GACvB;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE;EACxB,IAAI,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;EACvC,IAAI,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC;;EAE7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;IAC5B,OAAO;GACR;;EAED,IAAI,KAAK,GAAG,KAAK,CAAC;MACd,QAAQ,GAAG,KAAK,CAAC;MACjB,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;;EAE7B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,IAAI,CAAC,EAAE;IAC9C,KAAK,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;IACvB,QAAQ,GAAG,WAAW,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC;;IAEpC,IAAI,KAAK,EAAE;MACT,cAAc,CAAC,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;KAClD,MAAM;MACL,QAAQ,CAAC,MAAM,CAAC,CAAC;KAClB;GACF;;EAED,OAAO,CAAC,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC;CACjC;;AAED,SAAS,cAAc,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE;EAC1D,IAAI,WAAW,GAAG,UAAU,CAAC,QAAQ,CAAC;MAClC,KAAK,GAAG,KAAK,CAAC;MACd,KAAK,GAAG,KAAK,CAAC;MACd,SAAS,GAAG,IAAI,CAAC;;EAErB,IAAI,WAAW,EAAE;IACf,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC;KAC1B,CAAC,OAAO,CAAC,EAAE;MACV,SAAS,GAAG,KAAK,CAAC;MAClB,KAAK,GAAG,CAAC,CAAC;KACX;;IAED,IAAI,OAAO,KAAK,KAAK,EAAE;MACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;MACnC,OAAO;KACR;GACF,MAAM;IACL,KAAK,GAAG,MAAM,CAAC;GAChB;;EAED,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;;GAE/B,MAAM,IAAI,WAAW,IAAI,SAAS,EAAE;IACnC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,SAAS,KAAK,KAAK,EAAE;IAC9B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB,MAAM,IAAI,OAAO,KAAK,SAAS,EAAE;IAChC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,OAAO,KAAK,QAAQ,EAAE;IAC/B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI;IACF,QAAQ,CAAC,SAAS,cAAc,CAAC,KAAK,EAAE;MACtC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACzB,EAAE,SAAS,aAAa,CAAC,MAAM,EAAE;MAChC,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,CAAC,CAAC;GACJ,CAAC,OAAO,CAAC,EAAE;IACV,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;GACpB;CACF;;AAED,IAAI,EAAE,GAAG,CAAC,CAAC;AACX,SAAS,MAAM,GAAG;EAChB,OAAO,EAAE,EAAE,CAAC;CACb;;AAED,SAAS,WAAW,CAAC,OAAO,EAAE;EAC5B,OAAO,CAAC,UAAU,CAAC,GAAG,EAAE,EAAE,CAAC;EAC3B,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;EAC3B,OAAO,CAAC,OAAO,GAAG,SAAS,CAAC;EAC5B,OAAO,CAAC,YAAY,GAAG,EAAE,CAAC;CAC3B;;AChOD,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;CAC7D,AAAC;;AAEF,IAAI,UAAU,GAAG,YAAY;EAC3B,SAAS,UAAU,CAAC,WAAW,EAAE,KAAK,EAAE;IACtC,IAAI,CAAC,oBAAoB,GAAG,WAAW,CAAC;IACxC,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;;IAErC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;MAC7B,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;KAC3B;;IAED,IAAI,OAAO,CAAC,KAAK,CAAC,EAAE;MAClB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;MAC3B,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,MAAM,CAAC;;MAE/B,IAAI,CAAC,OAAO,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;;MAEtC,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACrB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;OACrC,MAAM;QACL,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QACvB,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;UACzB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;SACrC;OACF;KACF,MAAM;MACL,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;KACzC;GACF;;EAED,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE;IAC3D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,KAAK,OAAO,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;MAChE,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC9B;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE;IAC9D,IAAI,CAAC,GAAG,IAAI,CAAC,oBAAoB,CAAC;IAClC,IAAIF,UAAO,GAAG,CAAC,CAAC,OAAO,CAAC;;;IAGxB,IAAIA,UAAO,KAAKI,SAAe,EAAE;MAC/B,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;MACnB,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;MACnB,IAAI,QAAQ,GAAG,KAAK,CAAC;MACrB,IAAI;QACF,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC;OACpB,CAAC,OAAO,CAAC,EAAE;QACV,QAAQ,GAAG,IAAI,CAAC;QAChB,KAAK,GAAG,CAAC,CAAC;OACX;;MAED,IAAI,KAAK,KAAKD,IAAY,IAAI,KAAK,CAAC,MAAM,KAAK,OAAO,EAAE;QACtD,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;OACjD,MAAM,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;QACtC,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB,MAAM,IAAI,CAAC,KAAKE,SAAO,EAAE;QACxB,IAAI,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC;QAC1B,IAAI,QAAQ,EAAE;UACZ,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;SACxB,MAAM;UACL,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;SAC5C;QACD,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;OAChC,MAAM;QACL,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,UAAUL,UAAO,EAAE;UAC1C,OAAOA,UAAO,CAAC,KAAK,CAAC,CAAC;SACvB,CAAC,EAAE,CAAC,CAAC,CAAC;OACR;KACF,MAAM;MACL,IAAI,CAAC,aAAa,CAACA,UAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;KACvC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE,KAAK,EAAE;IACrE,IAAI,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;;;IAG3B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;MAC9B,IAAI,CAAC,UAAU,EAAE,CAAC;;MAElB,IAAI,KAAK,KAAK,QAAQ,EAAE;QACtB,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACxB,MAAM;QACL,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB;KACF;;IAED,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;MACzB,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;KAChC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,aAAa,GAAG,SAAS,aAAa,CAAC,OAAO,EAAE,CAAC,EAAE;IACtE,IAAI,UAAU,GAAG,IAAI,CAAC;;IAEtB,SAAS,CAAC,OAAO,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC7C,OAAO,UAAU,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC;KACnD,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,UAAU,CAAC,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;KACnD,CAAC,CAAC;GACJ,CAAC;;EAEF,OAAO,UAAU,CAAC;CACnB,EAAE;;ACrHH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+CA,AAAe,SAAS,GAAG,CAAC,OAAO,EAAE;EACnC,OAAO,IAAI,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,OAAO,CAAC;;;CAC9C,DCjDD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiEA,AAAe,SAAS,IAAI,CAAC,OAAO,EAAE;;EAEpC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;IACrB,OAAO,IAAI,WAAW,CAAC,UAAU,CAAC,EAAE,MAAM,EAAE;MAC1C,OAAO,MAAM,CAAC,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAC,CAAC;KACjE,CAAC,CAAC;GACJ,MAAM;IACL,OAAO,IAAI,WAAW,CAAC,UAAU,OAAO,EAAE,MAAM,EAAE;MAChD,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;MAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;QAC/B,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;OACvD;KACF,CAAC,CAAC;GACJ;;;CACF,DCjFD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCA,AAAe,SAASM,QAAM,CAAC,MAAM,EAAE;;EAErC,IAAI,WAAW,GAAG,IAAI,CAAC;EACvB,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,MAAO,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EACzB,OAAO,OAAO,CAAC;;;CAChB,DC9BD,SAAS,aAAa,GAAG;EACvB,MAAM,IAAI,SAAS,CAAC,oFAAoF,CAAC,CAAC;CAC3G;;AAED,SAAS,QAAQ,GAAG;EAClB,MAAM,IAAI,SAAS,CAAC,uHAAuH,CAAC,CAAC;CAC9I;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0GD,IAAIF,SAAO,GAAG,YAAY;EACxB,SAAS,OAAO,CAAC,QAAQ,EAAE;IACzB,IAAI,CAAC,UAAU,CAAC,GAAG,MAAM,EAAE,CAAC;IAC5B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;IACvC,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;;IAEvB,IAAI,IAAI,KAAK,QAAQ,EAAE;MACrB,OAAO,QAAQ,KAAK,UAAU,IAAI,aAAa,EAAE,CAAC;MAClD,IAAI,YAAY,OAAO,GAAG,iBAAiB,CAAC,IAAI,EAAE,QAAQ,CAAC,GAAG,QAAQ,EAAE,CAAC;KAC1E;GACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA4LD,OAAO,CAAC,SAAS,CAAC,KAAK,GAAG,SAAS,MAAM,CAAC,WAAW,EAAE;IACrD,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;GACrC,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA0CF,OAAO,CAAC,SAAS,CAAC,OAAO,GAAG,SAAS,QAAQ,CAAC,QAAQ,EAAE;IACtD,IAAI,OAAO,GAAG,IAAI,CAAC;IACnB,IAAI,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;;IAEtC,IAAI,UAAU,CAAC,QAAQ,CAAC,EAAE;MACxB,OAAO,OAAO,CAAC,IAAI,CAAC,UAAU,KAAK,EAAE;QACnC,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;UACtD,OAAO,KAAK,CAAC;SACd,CAAC,CAAC;OACJ,EAAE,UAAU,MAAM,EAAE;QACnB,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;UACtD,MAAM,MAAM,CAAC;SACd,CAAC,CAAC;OACJ,CAAC,CAAC;KACJ;;IAED,OAAO,OAAO,CAAC,IAAI,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;GACzC,CAAC;;EAEF,OAAO,OAAO,CAAC;CAChB,EAAE,CAAC;;AAEJA,SAAO,CAAC,SAAS,CAAC,IAAI,GAAG,IAAI,CAAC;AAC9B,AACAA,SAAO,CAAC,GAAG,GAAG,GAAG,CAAC;AAClBA,SAAO,CAAC,IAAI,GAAG,IAAI,CAAC;AACpBA,SAAO,CAAC,OAAO,GAAGG,SAAO,CAAC;AAC1BH,SAAO,CAAC,MAAM,GAAGI,QAAM,CAAC;AACxBJ,SAAO,CAAC,aAAa,GAAG,YAAY,CAAC;AACrCA,SAAO,CAAC,QAAQ,GAAG,OAAO,CAAC;AAC3BA,SAAO,CAAC,KAAK,GAAG,IAAI;;AC5YpB;AACA,AAEe,SAAS,QAAQ,GAAG;EACjC,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;;EAEnB,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;IACjC,KAAK,GAAG,MAAM,CAAC;GAChB,MAAM,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;IACtC,KAAK,GAAG,IAAI,CAAC;GACd,MAAM;IACL,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC;KACnC,CAAC,OAAO,CAAC,EAAE;MACV,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;KAC7F;GACF;;EAED,IAAI,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC;;EAEtB,IAAI,CAAC,EAAE;IACL,IAAI,eAAe,GAAG,IAAI,CAAC;IAC3B,IAAI;MACF,eAAe,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KAC/D,CAAC,OAAO,CAAC,EAAE;;KAEX;;IAED,IAAI,eAAe,KAAK,kBAAkB,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE;MACrD,OAAO;KACR;GACF;;EAED,KAAK,CAAC,OAAO,GAAGA,SAAO,CAAC;;;CACzB,DC/BD;AACAA,SAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC;AAC5BA,SAAO,CAAC,OAAO,GAAGA,SAAO,CAAC;;ACJ1BA,SAAO,CAAC,QAAQ,EAAE,CAAC;;;;;;;;","file":"es6-promise.auto.min.js"} \ No newline at end of file diff --git a/node_modules/es6-promise/dist/es6-promise.js b/node_modules/es6-promise/dist/es6-promise.js index de8392feff720..72fa0da4d3ed7 100644 --- a/node_modules/es6-promise/dist/es6-promise.js +++ b/node_modules/es6-promise/dist/es6-promise.js @@ -3,7 +3,7 @@ * @copyright Copyright (c) 2014 Yehuda Katz, Tom Dale, Stefan Penner and contributors (Conversion to ES6 API by Jake Archibald) * @license Licensed under MIT license * See https://raw.githubusercontent.com/stefanpenner/es6-promise/master/LICENSE - * @version v4.2.4+314e4831 + * @version v4.2.8+1e68dce6 */ (function (global, factory) { @@ -234,8 +234,6 @@ var PENDING = void 0; var FULFILLED = 1; var REJECTED = 2; -var TRY_CATCH_ERROR = { error: null }; - function selfFulfillment() { return new TypeError("You cannot resolve a promise with itself"); } @@ -244,15 +242,6 @@ function cannotReturnOwn() { return new TypeError('A promises callback cannot return that same promise.'); } -function getThen(promise) { - try { - return promise.then; - } catch (error) { - TRY_CATCH_ERROR.error = error; - return TRY_CATCH_ERROR; - } -} - function tryThen(then$$1, value, fulfillmentHandler, rejectionHandler) { try { then$$1.call(value, fulfillmentHandler, rejectionHandler); @@ -308,10 +297,7 @@ function handleMaybeThenable(promise, maybeThenable, then$$1) { if (maybeThenable.constructor === promise.constructor && then$$1 === then && maybeThenable.constructor.resolve === resolve$1) { handleOwnThenable(promise, maybeThenable); } else { - if (then$$1 === TRY_CATCH_ERROR) { - reject(promise, TRY_CATCH_ERROR.error); - TRY_CATCH_ERROR.error = null; - } else if (then$$1 === undefined) { + if (then$$1 === undefined) { fulfill(promise, maybeThenable); } else if (isFunction(then$$1)) { handleForeignThenable(promise, maybeThenable, then$$1); @@ -325,7 +311,14 @@ function resolve(promise, value) { if (promise === value) { reject(promise, selfFulfillment()); } else if (objectOrFunction(value)) { - handleMaybeThenable(promise, value, getThen(value)); + var then$$1 = void 0; + try { + then$$1 = value.then; + } catch (error) { + reject(promise, error); + return; + } + handleMaybeThenable(promise, value, then$$1); } else { fulfill(promise, value); } @@ -404,31 +397,18 @@ function publish(promise) { promise._subscribers.length = 0; } -function tryCatch(callback, detail) { - try { - return callback(detail); - } catch (e) { - TRY_CATCH_ERROR.error = e; - return TRY_CATCH_ERROR; - } -} - function invokeCallback(settled, promise, callback, detail) { var hasCallback = isFunction(callback), value = void 0, error = void 0, - succeeded = void 0, - failed = void 0; + succeeded = true; if (hasCallback) { - value = tryCatch(callback, detail); - - if (value === TRY_CATCH_ERROR) { - failed = true; - error = value.error; - value.error = null; - } else { - succeeded = true; + try { + value = callback(detail); + } catch (e) { + succeeded = false; + error = e; } if (promise === value) { @@ -437,14 +417,13 @@ function invokeCallback(settled, promise, callback, detail) { } } else { value = detail; - succeeded = true; } if (promise._state !== PENDING) { // noop } else if (hasCallback && succeeded) { resolve(promise, value); - } else if (failed) { + } else if (succeeded === false) { reject(promise, error); } else if (settled === FULFILLED) { fulfill(promise, value); @@ -522,7 +501,15 @@ var Enumerator = function () { if (resolve$$1 === resolve$1) { - var _then = getThen(entry); + var _then = void 0; + var error = void 0; + var didError = false; + try { + _then = entry.then; + } catch (e) { + didError = true; + error = e; + } if (_then === then && entry._state !== PENDING) { this._settledAt(entry._state, i, entry._result); @@ -531,7 +518,11 @@ var Enumerator = function () { this._result[i] = entry; } else if (c === Promise$1) { var promise = new c(noop); - handleMaybeThenable(promise, entry, _then); + if (didError) { + reject(promise, error); + } else { + handleMaybeThenable(promise, entry, _then); + } this._willSettleAt(promise, i); } else { this._willSettleAt(new c(function (resolve$$1) { @@ -1109,15 +1100,19 @@ var Promise$1 = function () { var promise = this; var constructor = promise.constructor; - return promise.then(function (value) { - return constructor.resolve(callback()).then(function () { - return value; - }); - }, function (reason) { - return constructor.resolve(callback()).then(function () { - throw reason; + if (isFunction(callback)) { + return promise.then(function (value) { + return constructor.resolve(callback()).then(function () { + return value; + }); + }, function (reason) { + return constructor.resolve(callback()).then(function () { + throw reason; + }); }); - }); + } + + return promise.then(callback, callback); }; return Promise; diff --git a/node_modules/es6-promise/dist/es6-promise.map b/node_modules/es6-promise/dist/es6-promise.map index bbe71e43fc0e3..27db4142fdcd7 100644 --- a/node_modules/es6-promise/dist/es6-promise.map +++ b/node_modules/es6-promise/dist/es6-promise.map @@ -1 +1 @@ -{"version":3,"sources":["config/versionTemplate.txt","lib/es6-promise/utils.js","lib/es6-promise/asap.js","lib/es6-promise/then.js","lib/es6-promise/promise/resolve.js","lib/es6-promise/-internal.js","lib/es6-promise/enumerator.js","lib/es6-promise/promise/all.js","lib/es6-promise/promise/race.js","lib/es6-promise/promise/reject.js","lib/es6-promise/promise.js","lib/es6-promise/polyfill.js","lib/es6-promise.js"],"sourcesContent":["/*!\n * @overview es6-promise - a tiny implementation of Promises/A+.\n * @copyright Copyright (c) 2014 Yehuda Katz, Tom Dale, Stefan Penner and contributors (Conversion to ES6 API by Jake Archibald)\n * @license Licensed under MIT license\n * See https://raw.githubusercontent.com/stefanpenner/es6-promise/master/LICENSE\n * @version v4.2.4+314e4831\n */\n","export function objectOrFunction(x) {\n var type = typeof x;\n return x !== null && (type === 'object' || type === 'function');\n}\n\nexport function isFunction(x) {\n return typeof x === 'function';\n}\n\nexport function isMaybeThenable(x) {\n return x !== null && typeof x === 'object';\n}\n\nvar _isArray = void 0;\nif (Array.isArray) {\n _isArray = Array.isArray;\n} else {\n _isArray = function (x) {\n return Object.prototype.toString.call(x) === '[object Array]';\n };\n}\n\nexport var isArray = _isArray;","var len = 0;\nvar vertxNext = void 0;\nvar customSchedulerFn = void 0;\n\nexport var asap = function asap(callback, arg) {\n queue[len] = callback;\n queue[len + 1] = arg;\n len += 2;\n if (len === 2) {\n // If len is 2, that means that we need to schedule an async flush.\n // If additional callbacks are queued before the queue is flushed, they\n // will be processed by this flush that we are scheduling.\n if (customSchedulerFn) {\n customSchedulerFn(flush);\n } else {\n scheduleFlush();\n }\n }\n};\n\nexport function setScheduler(scheduleFn) {\n customSchedulerFn = scheduleFn;\n}\n\nexport function setAsap(asapFn) {\n asap = asapFn;\n}\n\nvar browserWindow = typeof window !== 'undefined' ? window : undefined;\nvar browserGlobal = browserWindow || {};\nvar BrowserMutationObserver = browserGlobal.MutationObserver || browserGlobal.WebKitMutationObserver;\nvar isNode = typeof self === 'undefined' && typeof process !== 'undefined' && {}.toString.call(process) === '[object process]';\n\n// test for web worker but not in IE10\nvar isWorker = typeof Uint8ClampedArray !== 'undefined' && typeof importScripts !== 'undefined' && typeof MessageChannel !== 'undefined';\n\n// node\nfunction useNextTick() {\n // node version 0.10.x displays a deprecation warning when nextTick is used recursively\n // see https://github.com/cujojs/when/issues/410 for details\n return function () {\n return process.nextTick(flush);\n };\n}\n\n// vertx\nfunction useVertxTimer() {\n if (typeof vertxNext !== 'undefined') {\n return function () {\n vertxNext(flush);\n };\n }\n\n return useSetTimeout();\n}\n\nfunction useMutationObserver() {\n var iterations = 0;\n var observer = new BrowserMutationObserver(flush);\n var node = document.createTextNode('');\n observer.observe(node, { characterData: true });\n\n return function () {\n node.data = iterations = ++iterations % 2;\n };\n}\n\n// web worker\nfunction useMessageChannel() {\n var channel = new MessageChannel();\n channel.port1.onmessage = flush;\n return function () {\n return channel.port2.postMessage(0);\n };\n}\n\nfunction useSetTimeout() {\n // Store setTimeout reference so es6-promise will be unaffected by\n // other code modifying setTimeout (like sinon.useFakeTimers())\n var globalSetTimeout = setTimeout;\n return function () {\n return globalSetTimeout(flush, 1);\n };\n}\n\nvar queue = new Array(1000);\nfunction flush() {\n for (var i = 0; i < len; i += 2) {\n var callback = queue[i];\n var arg = queue[i + 1];\n\n callback(arg);\n\n queue[i] = undefined;\n queue[i + 1] = undefined;\n }\n\n len = 0;\n}\n\nfunction attemptVertx() {\n try {\n var vertx = Function('return this')().require('vertx');\n vertxNext = vertx.runOnLoop || vertx.runOnContext;\n return useVertxTimer();\n } catch (e) {\n return useSetTimeout();\n }\n}\n\nvar scheduleFlush = void 0;\n// Decide what async method to use to triggering processing of queued callbacks:\nif (isNode) {\n scheduleFlush = useNextTick();\n} else if (BrowserMutationObserver) {\n scheduleFlush = useMutationObserver();\n} else if (isWorker) {\n scheduleFlush = useMessageChannel();\n} else if (browserWindow === undefined && typeof require === 'function') {\n scheduleFlush = attemptVertx();\n} else {\n scheduleFlush = useSetTimeout();\n}","import { invokeCallback, subscribe, FULFILLED, REJECTED, noop, makePromise, PROMISE_ID } from './-internal';\n\nimport { asap } from './asap';\n\nexport default function then(onFulfillment, onRejection) {\n var parent = this;\n\n var child = new this.constructor(noop);\n\n if (child[PROMISE_ID] === undefined) {\n makePromise(child);\n }\n\n var _state = parent._state;\n\n\n if (_state) {\n var callback = arguments[_state - 1];\n asap(function () {\n return invokeCallback(_state, child, callback, parent._result);\n });\n } else {\n subscribe(parent, child, onFulfillment, onRejection);\n }\n\n return child;\n}","import { noop, resolve as _resolve } from '../-internal';\n\n/**\n `Promise.resolve` returns a promise that will become resolved with the\n passed `value`. It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n resolve(1);\n });\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.resolve(1);\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n @method resolve\n @static\n @param {Any} value value that the returned promise will be resolved with\n Useful for tooling.\n @return {Promise} a promise that will become fulfilled with the given\n `value`\n*/\nexport default function resolve(object) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (object && typeof object === 'object' && object.constructor === Constructor) {\n return object;\n }\n\n var promise = new Constructor(noop);\n _resolve(promise, object);\n return promise;\n}","import { objectOrFunction, isFunction } from './utils';\n\nimport { asap } from './asap';\n\nimport originalThen from './then';\nimport originalResolve from './promise/resolve';\n\nexport var PROMISE_ID = Math.random().toString(36).substring(2);\n\nfunction noop() {}\n\nvar PENDING = void 0;\nvar FULFILLED = 1;\nvar REJECTED = 2;\n\nvar TRY_CATCH_ERROR = { error: null };\n\nfunction selfFulfillment() {\n return new TypeError(\"You cannot resolve a promise with itself\");\n}\n\nfunction cannotReturnOwn() {\n return new TypeError('A promises callback cannot return that same promise.');\n}\n\nfunction getThen(promise) {\n try {\n return promise.then;\n } catch (error) {\n TRY_CATCH_ERROR.error = error;\n return TRY_CATCH_ERROR;\n }\n}\n\nfunction tryThen(then, value, fulfillmentHandler, rejectionHandler) {\n try {\n then.call(value, fulfillmentHandler, rejectionHandler);\n } catch (e) {\n return e;\n }\n}\n\nfunction handleForeignThenable(promise, thenable, then) {\n asap(function (promise) {\n var sealed = false;\n var error = tryThen(then, thenable, function (value) {\n if (sealed) {\n return;\n }\n sealed = true;\n if (thenable !== value) {\n resolve(promise, value);\n } else {\n fulfill(promise, value);\n }\n }, function (reason) {\n if (sealed) {\n return;\n }\n sealed = true;\n\n reject(promise, reason);\n }, 'Settle: ' + (promise._label || ' unknown promise'));\n\n if (!sealed && error) {\n sealed = true;\n reject(promise, error);\n }\n }, promise);\n}\n\nfunction handleOwnThenable(promise, thenable) {\n if (thenable._state === FULFILLED) {\n fulfill(promise, thenable._result);\n } else if (thenable._state === REJECTED) {\n reject(promise, thenable._result);\n } else {\n subscribe(thenable, undefined, function (value) {\n return resolve(promise, value);\n }, function (reason) {\n return reject(promise, reason);\n });\n }\n}\n\nfunction handleMaybeThenable(promise, maybeThenable, then) {\n if (maybeThenable.constructor === promise.constructor && then === originalThen && maybeThenable.constructor.resolve === originalResolve) {\n handleOwnThenable(promise, maybeThenable);\n } else {\n if (then === TRY_CATCH_ERROR) {\n reject(promise, TRY_CATCH_ERROR.error);\n TRY_CATCH_ERROR.error = null;\n } else if (then === undefined) {\n fulfill(promise, maybeThenable);\n } else if (isFunction(then)) {\n handleForeignThenable(promise, maybeThenable, then);\n } else {\n fulfill(promise, maybeThenable);\n }\n }\n}\n\nfunction resolve(promise, value) {\n if (promise === value) {\n reject(promise, selfFulfillment());\n } else if (objectOrFunction(value)) {\n handleMaybeThenable(promise, value, getThen(value));\n } else {\n fulfill(promise, value);\n }\n}\n\nfunction publishRejection(promise) {\n if (promise._onerror) {\n promise._onerror(promise._result);\n }\n\n publish(promise);\n}\n\nfunction fulfill(promise, value) {\n if (promise._state !== PENDING) {\n return;\n }\n\n promise._result = value;\n promise._state = FULFILLED;\n\n if (promise._subscribers.length !== 0) {\n asap(publish, promise);\n }\n}\n\nfunction reject(promise, reason) {\n if (promise._state !== PENDING) {\n return;\n }\n promise._state = REJECTED;\n promise._result = reason;\n\n asap(publishRejection, promise);\n}\n\nfunction subscribe(parent, child, onFulfillment, onRejection) {\n var _subscribers = parent._subscribers;\n var length = _subscribers.length;\n\n\n parent._onerror = null;\n\n _subscribers[length] = child;\n _subscribers[length + FULFILLED] = onFulfillment;\n _subscribers[length + REJECTED] = onRejection;\n\n if (length === 0 && parent._state) {\n asap(publish, parent);\n }\n}\n\nfunction publish(promise) {\n var subscribers = promise._subscribers;\n var settled = promise._state;\n\n if (subscribers.length === 0) {\n return;\n }\n\n var child = void 0,\n callback = void 0,\n detail = promise._result;\n\n for (var i = 0; i < subscribers.length; i += 3) {\n child = subscribers[i];\n callback = subscribers[i + settled];\n\n if (child) {\n invokeCallback(settled, child, callback, detail);\n } else {\n callback(detail);\n }\n }\n\n promise._subscribers.length = 0;\n}\n\nfunction tryCatch(callback, detail) {\n try {\n return callback(detail);\n } catch (e) {\n TRY_CATCH_ERROR.error = e;\n return TRY_CATCH_ERROR;\n }\n}\n\nfunction invokeCallback(settled, promise, callback, detail) {\n var hasCallback = isFunction(callback),\n value = void 0,\n error = void 0,\n succeeded = void 0,\n failed = void 0;\n\n if (hasCallback) {\n value = tryCatch(callback, detail);\n\n if (value === TRY_CATCH_ERROR) {\n failed = true;\n error = value.error;\n value.error = null;\n } else {\n succeeded = true;\n }\n\n if (promise === value) {\n reject(promise, cannotReturnOwn());\n return;\n }\n } else {\n value = detail;\n succeeded = true;\n }\n\n if (promise._state !== PENDING) {\n // noop\n } else if (hasCallback && succeeded) {\n resolve(promise, value);\n } else if (failed) {\n reject(promise, error);\n } else if (settled === FULFILLED) {\n fulfill(promise, value);\n } else if (settled === REJECTED) {\n reject(promise, value);\n }\n}\n\nfunction initializePromise(promise, resolver) {\n try {\n resolver(function resolvePromise(value) {\n resolve(promise, value);\n }, function rejectPromise(reason) {\n reject(promise, reason);\n });\n } catch (e) {\n reject(promise, e);\n }\n}\n\nvar id = 0;\nfunction nextId() {\n return id++;\n}\n\nfunction makePromise(promise) {\n promise[PROMISE_ID] = id++;\n promise._state = undefined;\n promise._result = undefined;\n promise._subscribers = [];\n}\n\nexport { nextId, makePromise, getThen, noop, resolve, reject, fulfill, subscribe, publish, publishRejection, initializePromise, invokeCallback, FULFILLED, REJECTED, PENDING, handleMaybeThenable };","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isArray, isMaybeThenable } from './utils';\nimport { noop, reject, fulfill, subscribe, FULFILLED, REJECTED, PENDING, getThen, handleMaybeThenable } from './-internal';\n\nimport then from './then';\nimport Promise from './promise';\nimport originalResolve from './promise/resolve';\nimport originalThen from './then';\nimport { makePromise, PROMISE_ID } from './-internal';\n\nfunction validationError() {\n return new Error('Array Methods must be provided an Array');\n};\n\nvar Enumerator = function () {\n function Enumerator(Constructor, input) {\n this._instanceConstructor = Constructor;\n this.promise = new Constructor(noop);\n\n if (!this.promise[PROMISE_ID]) {\n makePromise(this.promise);\n }\n\n if (isArray(input)) {\n this.length = input.length;\n this._remaining = input.length;\n\n this._result = new Array(this.length);\n\n if (this.length === 0) {\n fulfill(this.promise, this._result);\n } else {\n this.length = this.length || 0;\n this._enumerate(input);\n if (this._remaining === 0) {\n fulfill(this.promise, this._result);\n }\n }\n } else {\n reject(this.promise, validationError());\n }\n }\n\n Enumerator.prototype._enumerate = function _enumerate(input) {\n for (var i = 0; this._state === PENDING && i < input.length; i++) {\n this._eachEntry(input[i], i);\n }\n };\n\n Enumerator.prototype._eachEntry = function _eachEntry(entry, i) {\n var c = this._instanceConstructor;\n var resolve = c.resolve;\n\n\n if (resolve === originalResolve) {\n var _then = getThen(entry);\n\n if (_then === originalThen && entry._state !== PENDING) {\n this._settledAt(entry._state, i, entry._result);\n } else if (typeof _then !== 'function') {\n this._remaining--;\n this._result[i] = entry;\n } else if (c === Promise) {\n var promise = new c(noop);\n handleMaybeThenable(promise, entry, _then);\n this._willSettleAt(promise, i);\n } else {\n this._willSettleAt(new c(function (resolve) {\n return resolve(entry);\n }), i);\n }\n } else {\n this._willSettleAt(resolve(entry), i);\n }\n };\n\n Enumerator.prototype._settledAt = function _settledAt(state, i, value) {\n var promise = this.promise;\n\n\n if (promise._state === PENDING) {\n this._remaining--;\n\n if (state === REJECTED) {\n reject(promise, value);\n } else {\n this._result[i] = value;\n }\n }\n\n if (this._remaining === 0) {\n fulfill(promise, this._result);\n }\n };\n\n Enumerator.prototype._willSettleAt = function _willSettleAt(promise, i) {\n var enumerator = this;\n\n subscribe(promise, undefined, function (value) {\n return enumerator._settledAt(FULFILLED, i, value);\n }, function (reason) {\n return enumerator._settledAt(REJECTED, i, reason);\n });\n };\n\n return Enumerator;\n}();\n\nexport default Enumerator;\n;","import Enumerator from '../enumerator';\n\n/**\n `Promise.all` accepts an array of promises, and returns a new promise which\n is fulfilled with an array of fulfillment values for the passed promises, or\n rejected with the reason of the first passed promise to be rejected. It casts all\n elements of the passed iterable to promises as it runs this algorithm.\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = resolve(2);\n let promise3 = resolve(3);\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // The array here would be [ 1, 2, 3 ];\n });\n ```\n\n If any of the `promises` given to `all` are rejected, the first promise\n that is rejected will be given as an argument to the returned promises's\n rejection handler. For example:\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = reject(new Error(\"2\"));\n let promise3 = reject(new Error(\"3\"));\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // Code here never runs because there are rejected promises!\n }, function(error) {\n // error.message === \"2\"\n });\n ```\n\n @method all\n @static\n @param {Array} entries array of promises\n @param {String} label optional string for labeling the promise.\n Useful for tooling.\n @return {Promise} promise that is fulfilled when all `promises` have been\n fulfilled, or rejected if any of them become rejected.\n @static\n*/\nexport default function all(entries) {\n return new Enumerator(this, entries).promise;\n}","import { isArray } from \"../utils\";\n\n/**\n `Promise.race` returns a new promise which is settled in the same way as the\n first passed promise to settle.\n\n Example:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 2');\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // result === 'promise 2' because it was resolved before promise1\n // was resolved.\n });\n ```\n\n `Promise.race` is deterministic in that only the state of the first\n settled promise matters. For example, even if other promises given to the\n `promises` array argument are resolved, but the first settled promise has\n become rejected before the other promises became fulfilled, the returned\n promise will become rejected:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n reject(new Error('promise 2'));\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // Code here never runs\n }, function(reason){\n // reason.message === 'promise 2' because promise 2 became rejected before\n // promise 1 became fulfilled\n });\n ```\n\n An example real-world use case is implementing timeouts:\n\n ```javascript\n Promise.race([ajax('foo.json'), timeout(5000)])\n ```\n\n @method race\n @static\n @param {Array} promises array of promises to observe\n Useful for tooling.\n @return {Promise} a promise which settles in the same way as the first passed\n promise to settle.\n*/\nexport default function race(entries) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (!isArray(entries)) {\n return new Constructor(function (_, reject) {\n return reject(new TypeError('You must pass an array to race.'));\n });\n } else {\n return new Constructor(function (resolve, reject) {\n var length = entries.length;\n for (var i = 0; i < length; i++) {\n Constructor.resolve(entries[i]).then(resolve, reject);\n }\n });\n }\n}","import { noop, reject as _reject } from '../-internal';\n\n/**\n `Promise.reject` returns a promise rejected with the passed `reason`.\n It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n reject(new Error('WHOOPS'));\n });\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.reject(new Error('WHOOPS'));\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n @method reject\n @static\n @param {Any} reason value that the returned promise will be rejected with.\n Useful for tooling.\n @return {Promise} a promise rejected with the given `reason`.\n*/\nexport default function reject(reason) {\n /*jshint validthis:true */\n var Constructor = this;\n var promise = new Constructor(noop);\n _reject(promise, reason);\n return promise;\n}","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isFunction } from './utils';\nimport { noop, nextId, PROMISE_ID, initializePromise } from './-internal';\nimport { asap, setAsap, setScheduler } from './asap';\n\nimport all from './promise/all';\nimport race from './promise/race';\nimport Resolve from './promise/resolve';\nimport Reject from './promise/reject';\nimport then from './then';\n\nfunction needsResolver() {\n throw new TypeError('You must pass a resolver function as the first argument to the promise constructor');\n}\n\nfunction needsNew() {\n throw new TypeError(\"Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.\");\n}\n\n/**\n Promise objects represent the eventual result of an asynchronous operation. The\n primary way of interacting with a promise is through its `then` method, which\n registers callbacks to receive either a promise's eventual value or the reason\n why the promise cannot be fulfilled.\n\n Terminology\n -----------\n\n - `promise` is an object or function with a `then` method whose behavior conforms to this specification.\n - `thenable` is an object or function that defines a `then` method.\n - `value` is any legal JavaScript value (including undefined, a thenable, or a promise).\n - `exception` is a value that is thrown using the throw statement.\n - `reason` is a value that indicates why a promise was rejected.\n - `settled` the final resting state of a promise, fulfilled or rejected.\n\n A promise can be in one of three states: pending, fulfilled, or rejected.\n\n Promises that are fulfilled have a fulfillment value and are in the fulfilled\n state. Promises that are rejected have a rejection reason and are in the\n rejected state. A fulfillment value is never a thenable.\n\n Promises can also be said to *resolve* a value. If this value is also a\n promise, then the original promise's settled state will match the value's\n settled state. So a promise that *resolves* a promise that rejects will\n itself reject, and a promise that *resolves* a promise that fulfills will\n itself fulfill.\n\n\n Basic Usage:\n ------------\n\n ```js\n let promise = new Promise(function(resolve, reject) {\n // on success\n resolve(value);\n\n // on failure\n reject(reason);\n });\n\n promise.then(function(value) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Advanced Usage:\n ---------------\n\n Promises shine when abstracting away asynchronous interactions such as\n `XMLHttpRequest`s.\n\n ```js\n function getJSON(url) {\n return new Promise(function(resolve, reject){\n let xhr = new XMLHttpRequest();\n\n xhr.open('GET', url);\n xhr.onreadystatechange = handler;\n xhr.responseType = 'json';\n xhr.setRequestHeader('Accept', 'application/json');\n xhr.send();\n\n function handler() {\n if (this.readyState === this.DONE) {\n if (this.status === 200) {\n resolve(this.response);\n } else {\n reject(new Error('getJSON: `' + url + '` failed with status: [' + this.status + ']'));\n }\n }\n };\n });\n }\n\n getJSON('/posts.json').then(function(json) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Unlike callbacks, promises are great composable primitives.\n\n ```js\n Promise.all([\n getJSON('/posts'),\n getJSON('/comments')\n ]).then(function(values){\n values[0] // => postsJSON\n values[1] // => commentsJSON\n\n return values;\n });\n ```\n\n @class Promise\n @param {Function} resolver\n Useful for tooling.\n @constructor\n*/\n\nvar Promise = function () {\n function Promise(resolver) {\n this[PROMISE_ID] = nextId();\n this._result = this._state = undefined;\n this._subscribers = [];\n\n if (noop !== resolver) {\n typeof resolver !== 'function' && needsResolver();\n this instanceof Promise ? initializePromise(this, resolver) : needsNew();\n }\n }\n\n /**\n The primary way of interacting with a promise is through its `then` method,\n which registers callbacks to receive either a promise's eventual value or the\n reason why the promise cannot be fulfilled.\n ```js\n findUser().then(function(user){\n // user is available\n }, function(reason){\n // user is unavailable, and you are given the reason why\n });\n ```\n Chaining\n --------\n The return value of `then` is itself a promise. This second, 'downstream'\n promise is resolved with the return value of the first promise's fulfillment\n or rejection handler, or rejected if the handler throws an exception.\n ```js\n findUser().then(function (user) {\n return user.name;\n }, function (reason) {\n return 'default name';\n }).then(function (userName) {\n // If `findUser` fulfilled, `userName` will be the user's name, otherwise it\n // will be `'default name'`\n });\n findUser().then(function (user) {\n throw new Error('Found user, but still unhappy');\n }, function (reason) {\n throw new Error('`findUser` rejected and we're unhappy');\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.\n // If `findUser` rejected, `reason` will be '`findUser` rejected and we're unhappy'.\n });\n ```\n If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.\n ```js\n findUser().then(function (user) {\n throw new PedagogicalException('Upstream error');\n }).then(function (value) {\n // never reached\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // The `PedgagocialException` is propagated all the way down to here\n });\n ```\n Assimilation\n ------------\n Sometimes the value you want to propagate to a downstream promise can only be\n retrieved asynchronously. This can be achieved by returning a promise in the\n fulfillment or rejection handler. The downstream promise will then be pending\n until the returned promise is settled. This is called *assimilation*.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // The user's comments are now available\n });\n ```\n If the assimliated promise rejects, then the downstream promise will also reject.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // If `findCommentsByAuthor` fulfills, we'll have the value here\n }, function (reason) {\n // If `findCommentsByAuthor` rejects, we'll have the reason here\n });\n ```\n Simple Example\n --------------\n Synchronous Example\n ```javascript\n let result;\n try {\n result = findResult();\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n findResult(function(result, err){\n if (err) {\n // failure\n } else {\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findResult().then(function(result){\n // success\n }, function(reason){\n // failure\n });\n ```\n Advanced Example\n --------------\n Synchronous Example\n ```javascript\n let author, books;\n try {\n author = findAuthor();\n books = findBooksByAuthor(author);\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n function foundBooks(books) {\n }\n function failure(reason) {\n }\n findAuthor(function(author, err){\n if (err) {\n failure(err);\n // failure\n } else {\n try {\n findBoooksByAuthor(author, function(books, err) {\n if (err) {\n failure(err);\n } else {\n try {\n foundBooks(books);\n } catch(reason) {\n failure(reason);\n }\n }\n });\n } catch(error) {\n failure(err);\n }\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findAuthor().\n then(findBooksByAuthor).\n then(function(books){\n // found books\n }).catch(function(reason){\n // something went wrong\n });\n ```\n @method then\n @param {Function} onFulfilled\n @param {Function} onRejected\n Useful for tooling.\n @return {Promise}\n */\n\n /**\n `catch` is simply sugar for `then(undefined, onRejection)` which makes it the same\n as the catch block of a try/catch statement.\n ```js\n function findAuthor(){\n throw new Error('couldn't find that author');\n }\n // synchronous\n try {\n findAuthor();\n } catch(reason) {\n // something went wrong\n }\n // async with promises\n findAuthor().catch(function(reason){\n // something went wrong\n });\n ```\n @method catch\n @param {Function} onRejection\n Useful for tooling.\n @return {Promise}\n */\n\n\n Promise.prototype.catch = function _catch(onRejection) {\n return this.then(null, onRejection);\n };\n\n /**\n `finally` will be invoked regardless of the promise's fate just as native\n try/catch/finally behaves\n \n Synchronous example:\n \n ```js\n findAuthor() {\n if (Math.random() > 0.5) {\n throw new Error();\n }\n return new Author();\n }\n \n try {\n return findAuthor(); // succeed or fail\n } catch(error) {\n return findOtherAuther();\n } finally {\n // always runs\n // doesn't affect the return value\n }\n ```\n \n Asynchronous example:\n \n ```js\n findAuthor().catch(function(reason){\n return findOtherAuther();\n }).finally(function(){\n // author was either found, or not\n });\n ```\n \n @method finally\n @param {Function} callback\n @return {Promise}\n */\n\n\n Promise.prototype.finally = function _finally(callback) {\n var promise = this;\n var constructor = promise.constructor;\n\n return promise.then(function (value) {\n return constructor.resolve(callback()).then(function () {\n return value;\n });\n }, function (reason) {\n return constructor.resolve(callback()).then(function () {\n throw reason;\n });\n });\n };\n\n return Promise;\n}();\n\nPromise.prototype.then = then;\nexport default Promise;\nPromise.all = all;\nPromise.race = race;\nPromise.resolve = Resolve;\nPromise.reject = Reject;\nPromise._setScheduler = setScheduler;\nPromise._setAsap = setAsap;\nPromise._asap = asap;","/*global self*/\nimport Promise from './promise';\n\nexport default function polyfill() {\n var local = void 0;\n\n if (typeof global !== 'undefined') {\n local = global;\n } else if (typeof self !== 'undefined') {\n local = self;\n } else {\n try {\n local = Function('return this')();\n } catch (e) {\n throw new Error('polyfill failed because global object is unavailable in this environment');\n }\n }\n\n var P = local.Promise;\n\n if (P) {\n var promiseToString = null;\n try {\n promiseToString = Object.prototype.toString.call(P.resolve());\n } catch (e) {\n // silently ignored\n }\n\n if (promiseToString === '[object Promise]' && !P.cast) {\n return;\n }\n }\n\n local.Promise = Promise;\n}","import Promise from './es6-promise/promise';\nimport polyfill from './es6-promise/polyfill';\n\n// Strange compat..\nPromise.polyfill = polyfill;\nPromise.Promise = Promise;\nexport default Promise;"],"names":["resolve","_resolve","then","originalThen","originalResolve","Promise","reject","_reject","Resolve","Reject"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACNO,SAAS,gBAAgB,CAAC,CAAC,EAAE;EAClC,IAAI,IAAI,GAAG,OAAO,CAAC,CAAC;EACpB,OAAO,CAAC,KAAK,IAAI,KAAK,IAAI,KAAK,QAAQ,IAAI,IAAI,KAAK,UAAU,CAAC,CAAC;CACjE;;AAED,AAAO,SAAS,UAAU,CAAC,CAAC,EAAE;EAC5B,OAAO,OAAO,CAAC,KAAK,UAAU,CAAC;CAChC;;AAED,AAEC;;AAED,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC;AACtB,IAAI,KAAK,CAAC,OAAO,EAAE;EACjB,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC;CAC1B,MAAM;EACL,QAAQ,GAAG,UAAU,CAAC,EAAE;IACtB,OAAO,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,gBAAgB,CAAC;GAC/D,CAAC;CACH;;AAED,AAAO,IAAI,OAAO,GAAG,QAAQ;;ACtB7B,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAI,SAAS,GAAG,KAAK,CAAC,CAAC;AACvB,IAAI,iBAAiB,GAAG,KAAK,CAAC,CAAC;;AAE/B,AAAO,IAAI,IAAI,GAAG,SAAS,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;EAC7C,KAAK,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC;EACtB,KAAK,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;EACrB,GAAG,IAAI,CAAC,CAAC;EACT,IAAI,GAAG,KAAK,CAAC,EAAE;;;;IAIb,IAAI,iBAAiB,EAAE;MACrB,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAC1B,MAAM;MACL,aAAa,EAAE,CAAC;KACjB;GACF;CACF,CAAC;;AAEF,AAAO,SAAS,YAAY,CAAC,UAAU,EAAE;EACvC,iBAAiB,GAAG,UAAU,CAAC;CAChC;;AAED,AAAO,SAAS,OAAO,CAAC,MAAM,EAAE;EAC9B,IAAI,GAAG,MAAM,CAAC;CACf;;AAED,IAAI,aAAa,GAAG,OAAO,MAAM,KAAK,WAAW,GAAG,MAAM,GAAG,SAAS,CAAC;AACvE,IAAI,aAAa,GAAG,aAAa,IAAI,EAAE,CAAC;AACxC,IAAI,uBAAuB,GAAG,aAAa,CAAC,gBAAgB,IAAI,aAAa,CAAC,sBAAsB,CAAC;AACrG,IAAI,MAAM,GAAG,OAAO,IAAI,KAAK,WAAW,IAAI,OAAO,OAAO,KAAK,WAAW,IAAI,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,kBAAkB,CAAC;;;AAG/H,IAAI,QAAQ,GAAG,OAAO,iBAAiB,KAAK,WAAW,IAAI,OAAO,aAAa,KAAK,WAAW,IAAI,OAAO,cAAc,KAAK,WAAW,CAAC;;;AAGzI,SAAS,WAAW,GAAG;;;EAGrB,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;GAChC,CAAC;CACH;;;AAGD,SAAS,aAAa,GAAG;EACvB,IAAI,OAAO,SAAS,KAAK,WAAW,EAAE;IACpC,OAAO,YAAY;MACjB,SAAS,CAAC,KAAK,CAAC,CAAC;KAClB,CAAC;GACH;;EAED,OAAO,aAAa,EAAE,CAAC;CACxB;;AAED,SAAS,mBAAmB,GAAG;EAC7B,IAAI,UAAU,GAAG,CAAC,CAAC;EACnB,IAAI,QAAQ,GAAG,IAAI,uBAAuB,CAAC,KAAK,CAAC,CAAC;EAClD,IAAI,IAAI,GAAG,QAAQ,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC;EACvC,QAAQ,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;;EAEhD,OAAO,YAAY;IACjB,IAAI,CAAC,IAAI,GAAG,UAAU,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC;GAC3C,CAAC;CACH;;;AAGD,SAAS,iBAAiB,GAAG;EAC3B,IAAI,OAAO,GAAG,IAAI,cAAc,EAAE,CAAC;EACnC,OAAO,CAAC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC;EAChC,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;GACrC,CAAC;CACH;;AAED,SAAS,aAAa,GAAG;;;EAGvB,IAAI,gBAAgB,GAAG,UAAU,CAAC;EAClC,OAAO,YAAY;IACjB,OAAO,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;GACnC,CAAC;CACH;;AAED,IAAI,KAAK,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC;AAC5B,SAAS,KAAK,GAAG;EACf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE;IAC/B,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,GAAG,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;;IAEvB,QAAQ,CAAC,GAAG,CAAC,CAAC;;IAEd,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;IACrB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;GAC1B;;EAED,GAAG,GAAG,CAAC,CAAC;CACT;;AAED,SAAS,YAAY,GAAG;EACtB,IAAI;IACF,IAAI,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;IACvD,SAAS,GAAG,KAAK,CAAC,SAAS,IAAI,KAAK,CAAC,YAAY,CAAC;IAClD,OAAO,aAAa,EAAE,CAAC;GACxB,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,aAAa,EAAE,CAAC;GACxB;CACF;;AAED,IAAI,aAAa,GAAG,KAAK,CAAC,CAAC;;AAE3B,IAAI,MAAM,EAAE;EACV,aAAa,GAAG,WAAW,EAAE,CAAC;CAC/B,MAAM,IAAI,uBAAuB,EAAE;EAClC,aAAa,GAAG,mBAAmB,EAAE,CAAC;CACvC,MAAM,IAAI,QAAQ,EAAE;EACnB,aAAa,GAAG,iBAAiB,EAAE,CAAC;CACrC,MAAM,IAAI,aAAa,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;EACvE,aAAa,GAAG,YAAY,EAAE,CAAC;CAChC,MAAM;EACL,aAAa,GAAG,aAAa,EAAE,CAAC;;;CACjC,DCtHc,SAAS,IAAI,CAAC,aAAa,EAAE,WAAW,EAAE;EACvD,IAAI,MAAM,GAAG,IAAI,CAAC;;EAElB,IAAI,KAAK,GAAG,IAAI,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;;EAEvC,IAAI,KAAK,CAAC,UAAU,CAAC,KAAK,SAAS,EAAE;IACnC,WAAW,CAAC,KAAK,CAAC,CAAC;GACpB;;EAED,IAAI,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;;;EAG3B,IAAI,MAAM,EAAE;IACV,IAAI,QAAQ,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IACrC,IAAI,CAAC,YAAY;MACf,OAAO,cAAc,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;KAChE,CAAC,CAAC;GACJ,MAAM;IACL,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,CAAC,CAAC;GACtD;;EAED,OAAO,KAAK,CAAC;;;CACd,DCxBD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+BA,AAAe,SAASA,SAAO,CAAC,MAAM,EAAE;;EAEtC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,CAAC,WAAW,KAAK,WAAW,EAAE;IAC9E,OAAO,MAAM,CAAC;GACf;;EAED,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,OAAQ,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EAC1B,OAAO,OAAO,CAAC;;;CAChB,DCrCM,IAAI,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;;AAEhE,SAAS,IAAI,GAAG,EAAE;;AAElB,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC;AACrB,IAAI,SAAS,GAAG,CAAC,CAAC;AAClB,IAAI,QAAQ,GAAG,CAAC,CAAC;;AAEjB,IAAI,eAAe,GAAG,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC;;AAEtC,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,0CAA0C,CAAC,CAAC;CAClE;;AAED,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,sDAAsD,CAAC,CAAC;CAC9E;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE;EACxB,IAAI;IACF,OAAO,OAAO,CAAC,IAAI,CAAC;GACrB,CAAC,OAAO,KAAK,EAAE;IACd,eAAe,CAAC,KAAK,GAAG,KAAK,CAAC;IAC9B,OAAO,eAAe,CAAC;GACxB;CACF;;AAED,SAAS,OAAO,CAACC,OAAI,EAAE,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,EAAE;EAClE,IAAI;IACFA,OAAI,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;GACxD,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,CAAC;GACV;CACF;;AAED,SAAS,qBAAqB,CAAC,OAAO,EAAE,QAAQ,EAAEA,OAAI,EAAE;EACtD,IAAI,CAAC,UAAU,OAAO,EAAE;IACtB,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,IAAI,KAAK,GAAG,OAAO,CAACA,OAAI,EAAE,QAAQ,EAAE,UAAU,KAAK,EAAE;MACnD,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;MACd,IAAI,QAAQ,KAAK,KAAK,EAAE;QACtB,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB,MAAM;QACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB;KACF,EAAE,UAAU,MAAM,EAAE;MACnB,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;;MAEd,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,EAAE,UAAU,IAAI,OAAO,CAAC,MAAM,IAAI,kBAAkB,CAAC,CAAC,CAAC;;IAExD,IAAI,CAAC,MAAM,IAAI,KAAK,EAAE;MACpB,MAAM,GAAG,IAAI,CAAC;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACxB;GACF,EAAE,OAAO,CAAC,CAAC;CACb;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI,QAAQ,CAAC,MAAM,KAAK,SAAS,EAAE;IACjC,OAAO,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACpC,MAAM,IAAI,QAAQ,CAAC,MAAM,KAAK,QAAQ,EAAE;IACvC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACnC,MAAM;IACL,SAAS,CAAC,QAAQ,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC9C,OAAO,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KAChC,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KAChC,CAAC,CAAC;GACJ;CACF;;AAED,SAAS,mBAAmB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,EAAE;EACzD,IAAI,aAAa,CAAC,WAAW,KAAK,OAAO,CAAC,WAAW,IAAIA,OAAI,KAAKC,IAAY,IAAI,aAAa,CAAC,WAAW,CAAC,OAAO,KAAKC,SAAe,EAAE;IACvI,iBAAiB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;GAC3C,MAAM;IACL,IAAIF,OAAI,KAAK,eAAe,EAAE;MAC5B,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,KAAK,CAAC,CAAC;MACvC,eAAe,CAAC,KAAK,GAAG,IAAI,CAAC;KAC9B,MAAM,IAAIA,OAAI,KAAK,SAAS,EAAE;MAC7B,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC,MAAM,IAAI,UAAU,CAACA,OAAI,CAAC,EAAE;MAC3B,qBAAqB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,CAAC,CAAC;KACrD,MAAM;MACL,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC;GACF;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,KAAK,KAAK,EAAE;IACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;GACpC,MAAM,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;IAClC,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC;GACrD,MAAM;IACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB;CACF;;AAED,SAAS,gBAAgB,CAAC,OAAO,EAAE;EACjC,IAAI,OAAO,CAAC,QAAQ,EAAE;IACpB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;GACnC;;EAED,OAAO,CAAC,OAAO,CAAC,CAAC;CAClB;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;;EAED,OAAO,CAAC,OAAO,GAAG,KAAK,CAAC;EACxB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;;EAE3B,IAAI,OAAO,CAAC,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;IACrC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;EACD,OAAO,CAAC,MAAM,GAAG,QAAQ,CAAC;EAC1B,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC;;EAEzB,IAAI,CAAC,gBAAgB,EAAE,OAAO,CAAC,CAAC;CACjC;;AAED,SAAS,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,EAAE;EAC5D,IAAI,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;EACvC,IAAI,MAAM,GAAG,YAAY,CAAC,MAAM,CAAC;;;EAGjC,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC;;EAEvB,YAAY,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC;EAC7B,YAAY,CAAC,MAAM,GAAG,SAAS,CAAC,GAAG,aAAa,CAAC;EACjD,YAAY,CAAC,MAAM,GAAG,QAAQ,CAAC,GAAG,WAAW,CAAC;;EAE9C,IAAI,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,MAAM,EAAE;IACjC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;GACvB;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE;EACxB,IAAI,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;EACvC,IAAI,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC;;EAE7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;IAC5B,OAAO;GACR;;EAED,IAAI,KAAK,GAAG,KAAK,CAAC;MACd,QAAQ,GAAG,KAAK,CAAC;MACjB,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;;EAE7B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,IAAI,CAAC,EAAE;IAC9C,KAAK,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;IACvB,QAAQ,GAAG,WAAW,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC;;IAEpC,IAAI,KAAK,EAAE;MACT,cAAc,CAAC,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;KAClD,MAAM;MACL,QAAQ,CAAC,MAAM,CAAC,CAAC;KAClB;GACF;;EAED,OAAO,CAAC,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC;CACjC;;AAED,SAAS,QAAQ,CAAC,QAAQ,EAAE,MAAM,EAAE;EAClC,IAAI;IACF,OAAO,QAAQ,CAAC,MAAM,CAAC,CAAC;GACzB,CAAC,OAAO,CAAC,EAAE;IACV,eAAe,CAAC,KAAK,GAAG,CAAC,CAAC;IAC1B,OAAO,eAAe,CAAC;GACxB;CACF;;AAED,SAAS,cAAc,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE;EAC1D,IAAI,WAAW,GAAG,UAAU,CAAC,QAAQ,CAAC;MAClC,KAAK,GAAG,KAAK,CAAC;MACd,KAAK,GAAG,KAAK,CAAC;MACd,SAAS,GAAG,KAAK,CAAC;MAClB,MAAM,GAAG,KAAK,CAAC,CAAC;;EAEpB,IAAI,WAAW,EAAE;IACf,KAAK,GAAG,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;;IAEnC,IAAI,KAAK,KAAK,eAAe,EAAE;MAC7B,MAAM,GAAG,IAAI,CAAC;MACd,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC;MACpB,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC;KACpB,MAAM;MACL,SAAS,GAAG,IAAI,CAAC;KAClB;;IAED,IAAI,OAAO,KAAK,KAAK,EAAE;MACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;MACnC,OAAO;KACR;GACF,MAAM;IACL,KAAK,GAAG,MAAM,CAAC;IACf,SAAS,GAAG,IAAI,CAAC;GAClB;;EAED,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;;GAE/B,MAAM,IAAI,WAAW,IAAI,SAAS,EAAE;IACnC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,MAAM,EAAE;IACjB,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB,MAAM,IAAI,OAAO,KAAK,SAAS,EAAE;IAChC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,OAAO,KAAK,QAAQ,EAAE;IAC/B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI;IACF,QAAQ,CAAC,SAAS,cAAc,CAAC,KAAK,EAAE;MACtC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACzB,EAAE,SAAS,aAAa,CAAC,MAAM,EAAE;MAChC,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,CAAC,CAAC;GACJ,CAAC,OAAO,CAAC,EAAE;IACV,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;GACpB;CACF;;AAED,IAAI,EAAE,GAAG,CAAC,CAAC;AACX,SAAS,MAAM,GAAG;EAChB,OAAO,EAAE,EAAE,CAAC;CACb;;AAED,SAAS,WAAW,CAAC,OAAO,EAAE;EAC5B,OAAO,CAAC,UAAU,CAAC,GAAG,EAAE,EAAE,CAAC;EAC3B,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;EAC3B,OAAO,CAAC,OAAO,GAAG,SAAS,CAAC;EAC5B,OAAO,CAAC,YAAY,GAAG,EAAE,CAAC;CAC3B;;ACrPD,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;CAC7D,AAAC;;AAEF,IAAI,UAAU,GAAG,YAAY;EAC3B,SAAS,UAAU,CAAC,WAAW,EAAE,KAAK,EAAE;IACtC,IAAI,CAAC,oBAAoB,GAAG,WAAW,CAAC;IACxC,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;;IAErC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;MAC7B,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;KAC3B;;IAED,IAAI,OAAO,CAAC,KAAK,CAAC,EAAE;MAClB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;MAC3B,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,MAAM,CAAC;;MAE/B,IAAI,CAAC,OAAO,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;;MAEtC,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACrB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;OACrC,MAAM;QACL,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QACvB,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;UACzB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;SACrC;OACF;KACF,MAAM;MACL,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;KACzC;GACF;;EAED,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE;IAC3D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,KAAK,OAAO,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;MAChE,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC9B;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE;IAC9D,IAAI,CAAC,GAAG,IAAI,CAAC,oBAAoB,CAAC;IAClC,IAAIF,UAAO,GAAG,CAAC,CAAC,OAAO,CAAC;;;IAGxB,IAAIA,UAAO,KAAKI,SAAe,EAAE;MAC/B,IAAI,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC;;MAE3B,IAAI,KAAK,KAAKD,IAAY,IAAI,KAAK,CAAC,MAAM,KAAK,OAAO,EAAE;QACtD,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;OACjD,MAAM,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;QACtC,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB,MAAM,IAAI,CAAC,KAAKE,SAAO,EAAE;QACxB,IAAI,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC;QAC1B,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAC3C,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;OAChC,MAAM;QACL,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,UAAUL,UAAO,EAAE;UAC1C,OAAOA,UAAO,CAAC,KAAK,CAAC,CAAC;SACvB,CAAC,EAAE,CAAC,CAAC,CAAC;OACR;KACF,MAAM;MACL,IAAI,CAAC,aAAa,CAACA,UAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;KACvC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE,KAAK,EAAE;IACrE,IAAI,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;;;IAG3B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;MAC9B,IAAI,CAAC,UAAU,EAAE,CAAC;;MAElB,IAAI,KAAK,KAAK,QAAQ,EAAE;QACtB,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACxB,MAAM;QACL,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB;KACF;;IAED,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;MACzB,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;KAChC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,aAAa,GAAG,SAAS,aAAa,CAAC,OAAO,EAAE,CAAC,EAAE;IACtE,IAAI,UAAU,GAAG,IAAI,CAAC;;IAEtB,SAAS,CAAC,OAAO,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC7C,OAAO,UAAU,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC;KACnD,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,UAAU,CAAC,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;KACnD,CAAC,CAAC;GACJ,CAAC;;EAEF,OAAO,UAAU,CAAC;CACnB,EAAE;;ACzGH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+CA,AAAe,SAAS,GAAG,CAAC,OAAO,EAAE;EACnC,OAAO,IAAI,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,OAAO,CAAC;;;CAC9C,DCjDD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiEA,AAAe,SAAS,IAAI,CAAC,OAAO,EAAE;;EAEpC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;IACrB,OAAO,IAAI,WAAW,CAAC,UAAU,CAAC,EAAE,MAAM,EAAE;MAC1C,OAAO,MAAM,CAAC,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAC,CAAC;KACjE,CAAC,CAAC;GACJ,MAAM;IACL,OAAO,IAAI,WAAW,CAAC,UAAU,OAAO,EAAE,MAAM,EAAE;MAChD,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;MAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;QAC/B,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;OACvD;KACF,CAAC,CAAC;GACJ;;;CACF,DCjFD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCA,AAAe,SAASM,QAAM,CAAC,MAAM,EAAE;;EAErC,IAAI,WAAW,GAAG,IAAI,CAAC;EACvB,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,MAAO,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EACzB,OAAO,OAAO,CAAC;;;CAChB,DC9BD,SAAS,aAAa,GAAG;EACvB,MAAM,IAAI,SAAS,CAAC,oFAAoF,CAAC,CAAC;CAC3G;;AAED,SAAS,QAAQ,GAAG;EAClB,MAAM,IAAI,SAAS,CAAC,uHAAuH,CAAC,CAAC;CAC9I;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0GD,IAAIF,SAAO,GAAG,YAAY;EACxB,SAAS,OAAO,CAAC,QAAQ,EAAE;IACzB,IAAI,CAAC,UAAU,CAAC,GAAG,MAAM,EAAE,CAAC;IAC5B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;IACvC,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;;IAEvB,IAAI,IAAI,KAAK,QAAQ,EAAE;MACrB,OAAO,QAAQ,KAAK,UAAU,IAAI,aAAa,EAAE,CAAC;MAClD,IAAI,YAAY,OAAO,GAAG,iBAAiB,CAAC,IAAI,EAAE,QAAQ,CAAC,GAAG,QAAQ,EAAE,CAAC;KAC1E;GACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA4LD,OAAO,CAAC,SAAS,CAAC,KAAK,GAAG,SAAS,MAAM,CAAC,WAAW,EAAE;IACrD,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;GACrC,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA0CF,OAAO,CAAC,SAAS,CAAC,OAAO,GAAG,SAAS,QAAQ,CAAC,QAAQ,EAAE;IACtD,IAAI,OAAO,GAAG,IAAI,CAAC;IACnB,IAAI,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;;IAEtC,OAAO,OAAO,CAAC,IAAI,CAAC,UAAU,KAAK,EAAE;MACnC,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;QACtD,OAAO,KAAK,CAAC;OACd,CAAC,CAAC;KACJ,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;QACtD,MAAM,MAAM,CAAC;OACd,CAAC,CAAC;KACJ,CAAC,CAAC;GACJ,CAAC;;EAEF,OAAO,OAAO,CAAC;CAChB,EAAE,CAAC;;AAEJA,SAAO,CAAC,SAAS,CAAC,IAAI,GAAG,IAAI,CAAC;AAC9B,AACAA,SAAO,CAAC,GAAG,GAAG,GAAG,CAAC;AAClBA,SAAO,CAAC,IAAI,GAAG,IAAI,CAAC;AACpBA,SAAO,CAAC,OAAO,GAAGG,SAAO,CAAC;AAC1BH,SAAO,CAAC,MAAM,GAAGI,QAAM,CAAC;AACxBJ,SAAO,CAAC,aAAa,GAAG,YAAY,CAAC;AACrCA,SAAO,CAAC,QAAQ,GAAG,OAAO,CAAC;AAC3BA,SAAO,CAAC,KAAK,GAAG,IAAI;;ACxYpB;AACA,AAEe,SAAS,QAAQ,GAAG;EACjC,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;;EAEnB,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;IACjC,KAAK,GAAG,MAAM,CAAC;GAChB,MAAM,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;IACtC,KAAK,GAAG,IAAI,CAAC;GACd,MAAM;IACL,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC;KACnC,CAAC,OAAO,CAAC,EAAE;MACV,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;KAC7F;GACF;;EAED,IAAI,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC;;EAEtB,IAAI,CAAC,EAAE;IACL,IAAI,eAAe,GAAG,IAAI,CAAC;IAC3B,IAAI;MACF,eAAe,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KAC/D,CAAC,OAAO,CAAC,EAAE;;KAEX;;IAED,IAAI,eAAe,KAAK,kBAAkB,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE;MACrD,OAAO;KACR;GACF;;EAED,KAAK,CAAC,OAAO,GAAGA,SAAO,CAAC;;;CACzB,DC/BD;AACAA,SAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC;AAC5BA,SAAO,CAAC,OAAO,GAAGA,SAAO,CAAC;;;;;;;;","file":"es6-promise.js"} \ No newline at end of file +{"version":3,"sources":["config/versionTemplate.txt","lib/es6-promise/utils.js","lib/es6-promise/asap.js","lib/es6-promise/then.js","lib/es6-promise/promise/resolve.js","lib/es6-promise/-internal.js","lib/es6-promise/enumerator.js","lib/es6-promise/promise/all.js","lib/es6-promise/promise/race.js","lib/es6-promise/promise/reject.js","lib/es6-promise/promise.js","lib/es6-promise/polyfill.js","lib/es6-promise.js"],"sourcesContent":["/*!\n * @overview es6-promise - a tiny implementation of Promises/A+.\n * @copyright Copyright (c) 2014 Yehuda Katz, Tom Dale, Stefan Penner and contributors (Conversion to ES6 API by Jake Archibald)\n * @license Licensed under MIT license\n * See https://raw.githubusercontent.com/stefanpenner/es6-promise/master/LICENSE\n * @version v4.2.8+1e68dce6\n */\n","export function objectOrFunction(x) {\n var type = typeof x;\n return x !== null && (type === 'object' || type === 'function');\n}\n\nexport function isFunction(x) {\n return typeof x === 'function';\n}\n\nexport function isMaybeThenable(x) {\n return x !== null && typeof x === 'object';\n}\n\nvar _isArray = void 0;\nif (Array.isArray) {\n _isArray = Array.isArray;\n} else {\n _isArray = function (x) {\n return Object.prototype.toString.call(x) === '[object Array]';\n };\n}\n\nexport var isArray = _isArray;","var len = 0;\nvar vertxNext = void 0;\nvar customSchedulerFn = void 0;\n\nexport var asap = function asap(callback, arg) {\n queue[len] = callback;\n queue[len + 1] = arg;\n len += 2;\n if (len === 2) {\n // If len is 2, that means that we need to schedule an async flush.\n // If additional callbacks are queued before the queue is flushed, they\n // will be processed by this flush that we are scheduling.\n if (customSchedulerFn) {\n customSchedulerFn(flush);\n } else {\n scheduleFlush();\n }\n }\n};\n\nexport function setScheduler(scheduleFn) {\n customSchedulerFn = scheduleFn;\n}\n\nexport function setAsap(asapFn) {\n asap = asapFn;\n}\n\nvar browserWindow = typeof window !== 'undefined' ? window : undefined;\nvar browserGlobal = browserWindow || {};\nvar BrowserMutationObserver = browserGlobal.MutationObserver || browserGlobal.WebKitMutationObserver;\nvar isNode = typeof self === 'undefined' && typeof process !== 'undefined' && {}.toString.call(process) === '[object process]';\n\n// test for web worker but not in IE10\nvar isWorker = typeof Uint8ClampedArray !== 'undefined' && typeof importScripts !== 'undefined' && typeof MessageChannel !== 'undefined';\n\n// node\nfunction useNextTick() {\n // node version 0.10.x displays a deprecation warning when nextTick is used recursively\n // see https://github.com/cujojs/when/issues/410 for details\n return function () {\n return process.nextTick(flush);\n };\n}\n\n// vertx\nfunction useVertxTimer() {\n if (typeof vertxNext !== 'undefined') {\n return function () {\n vertxNext(flush);\n };\n }\n\n return useSetTimeout();\n}\n\nfunction useMutationObserver() {\n var iterations = 0;\n var observer = new BrowserMutationObserver(flush);\n var node = document.createTextNode('');\n observer.observe(node, { characterData: true });\n\n return function () {\n node.data = iterations = ++iterations % 2;\n };\n}\n\n// web worker\nfunction useMessageChannel() {\n var channel = new MessageChannel();\n channel.port1.onmessage = flush;\n return function () {\n return channel.port2.postMessage(0);\n };\n}\n\nfunction useSetTimeout() {\n // Store setTimeout reference so es6-promise will be unaffected by\n // other code modifying setTimeout (like sinon.useFakeTimers())\n var globalSetTimeout = setTimeout;\n return function () {\n return globalSetTimeout(flush, 1);\n };\n}\n\nvar queue = new Array(1000);\nfunction flush() {\n for (var i = 0; i < len; i += 2) {\n var callback = queue[i];\n var arg = queue[i + 1];\n\n callback(arg);\n\n queue[i] = undefined;\n queue[i + 1] = undefined;\n }\n\n len = 0;\n}\n\nfunction attemptVertx() {\n try {\n var vertx = Function('return this')().require('vertx');\n vertxNext = vertx.runOnLoop || vertx.runOnContext;\n return useVertxTimer();\n } catch (e) {\n return useSetTimeout();\n }\n}\n\nvar scheduleFlush = void 0;\n// Decide what async method to use to triggering processing of queued callbacks:\nif (isNode) {\n scheduleFlush = useNextTick();\n} else if (BrowserMutationObserver) {\n scheduleFlush = useMutationObserver();\n} else if (isWorker) {\n scheduleFlush = useMessageChannel();\n} else if (browserWindow === undefined && typeof require === 'function') {\n scheduleFlush = attemptVertx();\n} else {\n scheduleFlush = useSetTimeout();\n}","import { invokeCallback, subscribe, FULFILLED, REJECTED, noop, makePromise, PROMISE_ID } from './-internal';\n\nimport { asap } from './asap';\n\nexport default function then(onFulfillment, onRejection) {\n var parent = this;\n\n var child = new this.constructor(noop);\n\n if (child[PROMISE_ID] === undefined) {\n makePromise(child);\n }\n\n var _state = parent._state;\n\n\n if (_state) {\n var callback = arguments[_state - 1];\n asap(function () {\n return invokeCallback(_state, child, callback, parent._result);\n });\n } else {\n subscribe(parent, child, onFulfillment, onRejection);\n }\n\n return child;\n}","import { noop, resolve as _resolve } from '../-internal';\n\n/**\n `Promise.resolve` returns a promise that will become resolved with the\n passed `value`. It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n resolve(1);\n });\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.resolve(1);\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n @method resolve\n @static\n @param {Any} value value that the returned promise will be resolved with\n Useful for tooling.\n @return {Promise} a promise that will become fulfilled with the given\n `value`\n*/\nexport default function resolve(object) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (object && typeof object === 'object' && object.constructor === Constructor) {\n return object;\n }\n\n var promise = new Constructor(noop);\n _resolve(promise, object);\n return promise;\n}","import { objectOrFunction, isFunction } from './utils';\n\nimport { asap } from './asap';\n\nimport originalThen from './then';\nimport originalResolve from './promise/resolve';\n\nexport var PROMISE_ID = Math.random().toString(36).substring(2);\n\nfunction noop() {}\n\nvar PENDING = void 0;\nvar FULFILLED = 1;\nvar REJECTED = 2;\n\nfunction selfFulfillment() {\n return new TypeError(\"You cannot resolve a promise with itself\");\n}\n\nfunction cannotReturnOwn() {\n return new TypeError('A promises callback cannot return that same promise.');\n}\n\nfunction tryThen(then, value, fulfillmentHandler, rejectionHandler) {\n try {\n then.call(value, fulfillmentHandler, rejectionHandler);\n } catch (e) {\n return e;\n }\n}\n\nfunction handleForeignThenable(promise, thenable, then) {\n asap(function (promise) {\n var sealed = false;\n var error = tryThen(then, thenable, function (value) {\n if (sealed) {\n return;\n }\n sealed = true;\n if (thenable !== value) {\n resolve(promise, value);\n } else {\n fulfill(promise, value);\n }\n }, function (reason) {\n if (sealed) {\n return;\n }\n sealed = true;\n\n reject(promise, reason);\n }, 'Settle: ' + (promise._label || ' unknown promise'));\n\n if (!sealed && error) {\n sealed = true;\n reject(promise, error);\n }\n }, promise);\n}\n\nfunction handleOwnThenable(promise, thenable) {\n if (thenable._state === FULFILLED) {\n fulfill(promise, thenable._result);\n } else if (thenable._state === REJECTED) {\n reject(promise, thenable._result);\n } else {\n subscribe(thenable, undefined, function (value) {\n return resolve(promise, value);\n }, function (reason) {\n return reject(promise, reason);\n });\n }\n}\n\nfunction handleMaybeThenable(promise, maybeThenable, then) {\n if (maybeThenable.constructor === promise.constructor && then === originalThen && maybeThenable.constructor.resolve === originalResolve) {\n handleOwnThenable(promise, maybeThenable);\n } else {\n if (then === undefined) {\n fulfill(promise, maybeThenable);\n } else if (isFunction(then)) {\n handleForeignThenable(promise, maybeThenable, then);\n } else {\n fulfill(promise, maybeThenable);\n }\n }\n}\n\nfunction resolve(promise, value) {\n if (promise === value) {\n reject(promise, selfFulfillment());\n } else if (objectOrFunction(value)) {\n var then = void 0;\n try {\n then = value.then;\n } catch (error) {\n reject(promise, error);\n return;\n }\n handleMaybeThenable(promise, value, then);\n } else {\n fulfill(promise, value);\n }\n}\n\nfunction publishRejection(promise) {\n if (promise._onerror) {\n promise._onerror(promise._result);\n }\n\n publish(promise);\n}\n\nfunction fulfill(promise, value) {\n if (promise._state !== PENDING) {\n return;\n }\n\n promise._result = value;\n promise._state = FULFILLED;\n\n if (promise._subscribers.length !== 0) {\n asap(publish, promise);\n }\n}\n\nfunction reject(promise, reason) {\n if (promise._state !== PENDING) {\n return;\n }\n promise._state = REJECTED;\n promise._result = reason;\n\n asap(publishRejection, promise);\n}\n\nfunction subscribe(parent, child, onFulfillment, onRejection) {\n var _subscribers = parent._subscribers;\n var length = _subscribers.length;\n\n\n parent._onerror = null;\n\n _subscribers[length] = child;\n _subscribers[length + FULFILLED] = onFulfillment;\n _subscribers[length + REJECTED] = onRejection;\n\n if (length === 0 && parent._state) {\n asap(publish, parent);\n }\n}\n\nfunction publish(promise) {\n var subscribers = promise._subscribers;\n var settled = promise._state;\n\n if (subscribers.length === 0) {\n return;\n }\n\n var child = void 0,\n callback = void 0,\n detail = promise._result;\n\n for (var i = 0; i < subscribers.length; i += 3) {\n child = subscribers[i];\n callback = subscribers[i + settled];\n\n if (child) {\n invokeCallback(settled, child, callback, detail);\n } else {\n callback(detail);\n }\n }\n\n promise._subscribers.length = 0;\n}\n\nfunction invokeCallback(settled, promise, callback, detail) {\n var hasCallback = isFunction(callback),\n value = void 0,\n error = void 0,\n succeeded = true;\n\n if (hasCallback) {\n try {\n value = callback(detail);\n } catch (e) {\n succeeded = false;\n error = e;\n }\n\n if (promise === value) {\n reject(promise, cannotReturnOwn());\n return;\n }\n } else {\n value = detail;\n }\n\n if (promise._state !== PENDING) {\n // noop\n } else if (hasCallback && succeeded) {\n resolve(promise, value);\n } else if (succeeded === false) {\n reject(promise, error);\n } else if (settled === FULFILLED) {\n fulfill(promise, value);\n } else if (settled === REJECTED) {\n reject(promise, value);\n }\n}\n\nfunction initializePromise(promise, resolver) {\n try {\n resolver(function resolvePromise(value) {\n resolve(promise, value);\n }, function rejectPromise(reason) {\n reject(promise, reason);\n });\n } catch (e) {\n reject(promise, e);\n }\n}\n\nvar id = 0;\nfunction nextId() {\n return id++;\n}\n\nfunction makePromise(promise) {\n promise[PROMISE_ID] = id++;\n promise._state = undefined;\n promise._result = undefined;\n promise._subscribers = [];\n}\n\nexport { nextId, makePromise, noop, resolve, reject, fulfill, subscribe, publish, publishRejection, initializePromise, invokeCallback, FULFILLED, REJECTED, PENDING, handleMaybeThenable };","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isArray, isMaybeThenable } from './utils';\nimport { noop, reject, fulfill, subscribe, FULFILLED, REJECTED, PENDING, handleMaybeThenable } from './-internal';\n\nimport then from './then';\nimport Promise from './promise';\nimport originalResolve from './promise/resolve';\nimport originalThen from './then';\nimport { makePromise, PROMISE_ID } from './-internal';\n\nfunction validationError() {\n return new Error('Array Methods must be provided an Array');\n};\n\nvar Enumerator = function () {\n function Enumerator(Constructor, input) {\n this._instanceConstructor = Constructor;\n this.promise = new Constructor(noop);\n\n if (!this.promise[PROMISE_ID]) {\n makePromise(this.promise);\n }\n\n if (isArray(input)) {\n this.length = input.length;\n this._remaining = input.length;\n\n this._result = new Array(this.length);\n\n if (this.length === 0) {\n fulfill(this.promise, this._result);\n } else {\n this.length = this.length || 0;\n this._enumerate(input);\n if (this._remaining === 0) {\n fulfill(this.promise, this._result);\n }\n }\n } else {\n reject(this.promise, validationError());\n }\n }\n\n Enumerator.prototype._enumerate = function _enumerate(input) {\n for (var i = 0; this._state === PENDING && i < input.length; i++) {\n this._eachEntry(input[i], i);\n }\n };\n\n Enumerator.prototype._eachEntry = function _eachEntry(entry, i) {\n var c = this._instanceConstructor;\n var resolve = c.resolve;\n\n\n if (resolve === originalResolve) {\n var _then = void 0;\n var error = void 0;\n var didError = false;\n try {\n _then = entry.then;\n } catch (e) {\n didError = true;\n error = e;\n }\n\n if (_then === originalThen && entry._state !== PENDING) {\n this._settledAt(entry._state, i, entry._result);\n } else if (typeof _then !== 'function') {\n this._remaining--;\n this._result[i] = entry;\n } else if (c === Promise) {\n var promise = new c(noop);\n if (didError) {\n reject(promise, error);\n } else {\n handleMaybeThenable(promise, entry, _then);\n }\n this._willSettleAt(promise, i);\n } else {\n this._willSettleAt(new c(function (resolve) {\n return resolve(entry);\n }), i);\n }\n } else {\n this._willSettleAt(resolve(entry), i);\n }\n };\n\n Enumerator.prototype._settledAt = function _settledAt(state, i, value) {\n var promise = this.promise;\n\n\n if (promise._state === PENDING) {\n this._remaining--;\n\n if (state === REJECTED) {\n reject(promise, value);\n } else {\n this._result[i] = value;\n }\n }\n\n if (this._remaining === 0) {\n fulfill(promise, this._result);\n }\n };\n\n Enumerator.prototype._willSettleAt = function _willSettleAt(promise, i) {\n var enumerator = this;\n\n subscribe(promise, undefined, function (value) {\n return enumerator._settledAt(FULFILLED, i, value);\n }, function (reason) {\n return enumerator._settledAt(REJECTED, i, reason);\n });\n };\n\n return Enumerator;\n}();\n\nexport default Enumerator;\n;","import Enumerator from '../enumerator';\n\n/**\n `Promise.all` accepts an array of promises, and returns a new promise which\n is fulfilled with an array of fulfillment values for the passed promises, or\n rejected with the reason of the first passed promise to be rejected. It casts all\n elements of the passed iterable to promises as it runs this algorithm.\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = resolve(2);\n let promise3 = resolve(3);\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // The array here would be [ 1, 2, 3 ];\n });\n ```\n\n If any of the `promises` given to `all` are rejected, the first promise\n that is rejected will be given as an argument to the returned promises's\n rejection handler. For example:\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = reject(new Error(\"2\"));\n let promise3 = reject(new Error(\"3\"));\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // Code here never runs because there are rejected promises!\n }, function(error) {\n // error.message === \"2\"\n });\n ```\n\n @method all\n @static\n @param {Array} entries array of promises\n @param {String} label optional string for labeling the promise.\n Useful for tooling.\n @return {Promise} promise that is fulfilled when all `promises` have been\n fulfilled, or rejected if any of them become rejected.\n @static\n*/\nexport default function all(entries) {\n return new Enumerator(this, entries).promise;\n}","import { isArray } from \"../utils\";\n\n/**\n `Promise.race` returns a new promise which is settled in the same way as the\n first passed promise to settle.\n\n Example:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 2');\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // result === 'promise 2' because it was resolved before promise1\n // was resolved.\n });\n ```\n\n `Promise.race` is deterministic in that only the state of the first\n settled promise matters. For example, even if other promises given to the\n `promises` array argument are resolved, but the first settled promise has\n become rejected before the other promises became fulfilled, the returned\n promise will become rejected:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n reject(new Error('promise 2'));\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // Code here never runs\n }, function(reason){\n // reason.message === 'promise 2' because promise 2 became rejected before\n // promise 1 became fulfilled\n });\n ```\n\n An example real-world use case is implementing timeouts:\n\n ```javascript\n Promise.race([ajax('foo.json'), timeout(5000)])\n ```\n\n @method race\n @static\n @param {Array} promises array of promises to observe\n Useful for tooling.\n @return {Promise} a promise which settles in the same way as the first passed\n promise to settle.\n*/\nexport default function race(entries) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (!isArray(entries)) {\n return new Constructor(function (_, reject) {\n return reject(new TypeError('You must pass an array to race.'));\n });\n } else {\n return new Constructor(function (resolve, reject) {\n var length = entries.length;\n for (var i = 0; i < length; i++) {\n Constructor.resolve(entries[i]).then(resolve, reject);\n }\n });\n }\n}","import { noop, reject as _reject } from '../-internal';\n\n/**\n `Promise.reject` returns a promise rejected with the passed `reason`.\n It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n reject(new Error('WHOOPS'));\n });\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.reject(new Error('WHOOPS'));\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n @method reject\n @static\n @param {Any} reason value that the returned promise will be rejected with.\n Useful for tooling.\n @return {Promise} a promise rejected with the given `reason`.\n*/\nexport default function reject(reason) {\n /*jshint validthis:true */\n var Constructor = this;\n var promise = new Constructor(noop);\n _reject(promise, reason);\n return promise;\n}","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isFunction } from './utils';\nimport { noop, nextId, PROMISE_ID, initializePromise } from './-internal';\nimport { asap, setAsap, setScheduler } from './asap';\n\nimport all from './promise/all';\nimport race from './promise/race';\nimport Resolve from './promise/resolve';\nimport Reject from './promise/reject';\nimport then from './then';\n\nfunction needsResolver() {\n throw new TypeError('You must pass a resolver function as the first argument to the promise constructor');\n}\n\nfunction needsNew() {\n throw new TypeError(\"Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.\");\n}\n\n/**\n Promise objects represent the eventual result of an asynchronous operation. The\n primary way of interacting with a promise is through its `then` method, which\n registers callbacks to receive either a promise's eventual value or the reason\n why the promise cannot be fulfilled.\n\n Terminology\n -----------\n\n - `promise` is an object or function with a `then` method whose behavior conforms to this specification.\n - `thenable` is an object or function that defines a `then` method.\n - `value` is any legal JavaScript value (including undefined, a thenable, or a promise).\n - `exception` is a value that is thrown using the throw statement.\n - `reason` is a value that indicates why a promise was rejected.\n - `settled` the final resting state of a promise, fulfilled or rejected.\n\n A promise can be in one of three states: pending, fulfilled, or rejected.\n\n Promises that are fulfilled have a fulfillment value and are in the fulfilled\n state. Promises that are rejected have a rejection reason and are in the\n rejected state. A fulfillment value is never a thenable.\n\n Promises can also be said to *resolve* a value. If this value is also a\n promise, then the original promise's settled state will match the value's\n settled state. So a promise that *resolves* a promise that rejects will\n itself reject, and a promise that *resolves* a promise that fulfills will\n itself fulfill.\n\n\n Basic Usage:\n ------------\n\n ```js\n let promise = new Promise(function(resolve, reject) {\n // on success\n resolve(value);\n\n // on failure\n reject(reason);\n });\n\n promise.then(function(value) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Advanced Usage:\n ---------------\n\n Promises shine when abstracting away asynchronous interactions such as\n `XMLHttpRequest`s.\n\n ```js\n function getJSON(url) {\n return new Promise(function(resolve, reject){\n let xhr = new XMLHttpRequest();\n\n xhr.open('GET', url);\n xhr.onreadystatechange = handler;\n xhr.responseType = 'json';\n xhr.setRequestHeader('Accept', 'application/json');\n xhr.send();\n\n function handler() {\n if (this.readyState === this.DONE) {\n if (this.status === 200) {\n resolve(this.response);\n } else {\n reject(new Error('getJSON: `' + url + '` failed with status: [' + this.status + ']'));\n }\n }\n };\n });\n }\n\n getJSON('/posts.json').then(function(json) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Unlike callbacks, promises are great composable primitives.\n\n ```js\n Promise.all([\n getJSON('/posts'),\n getJSON('/comments')\n ]).then(function(values){\n values[0] // => postsJSON\n values[1] // => commentsJSON\n\n return values;\n });\n ```\n\n @class Promise\n @param {Function} resolver\n Useful for tooling.\n @constructor\n*/\n\nvar Promise = function () {\n function Promise(resolver) {\n this[PROMISE_ID] = nextId();\n this._result = this._state = undefined;\n this._subscribers = [];\n\n if (noop !== resolver) {\n typeof resolver !== 'function' && needsResolver();\n this instanceof Promise ? initializePromise(this, resolver) : needsNew();\n }\n }\n\n /**\n The primary way of interacting with a promise is through its `then` method,\n which registers callbacks to receive either a promise's eventual value or the\n reason why the promise cannot be fulfilled.\n ```js\n findUser().then(function(user){\n // user is available\n }, function(reason){\n // user is unavailable, and you are given the reason why\n });\n ```\n Chaining\n --------\n The return value of `then` is itself a promise. This second, 'downstream'\n promise is resolved with the return value of the first promise's fulfillment\n or rejection handler, or rejected if the handler throws an exception.\n ```js\n findUser().then(function (user) {\n return user.name;\n }, function (reason) {\n return 'default name';\n }).then(function (userName) {\n // If `findUser` fulfilled, `userName` will be the user's name, otherwise it\n // will be `'default name'`\n });\n findUser().then(function (user) {\n throw new Error('Found user, but still unhappy');\n }, function (reason) {\n throw new Error('`findUser` rejected and we're unhappy');\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.\n // If `findUser` rejected, `reason` will be '`findUser` rejected and we're unhappy'.\n });\n ```\n If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.\n ```js\n findUser().then(function (user) {\n throw new PedagogicalException('Upstream error');\n }).then(function (value) {\n // never reached\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // The `PedgagocialException` is propagated all the way down to here\n });\n ```\n Assimilation\n ------------\n Sometimes the value you want to propagate to a downstream promise can only be\n retrieved asynchronously. This can be achieved by returning a promise in the\n fulfillment or rejection handler. The downstream promise will then be pending\n until the returned promise is settled. This is called *assimilation*.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // The user's comments are now available\n });\n ```\n If the assimliated promise rejects, then the downstream promise will also reject.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // If `findCommentsByAuthor` fulfills, we'll have the value here\n }, function (reason) {\n // If `findCommentsByAuthor` rejects, we'll have the reason here\n });\n ```\n Simple Example\n --------------\n Synchronous Example\n ```javascript\n let result;\n try {\n result = findResult();\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n findResult(function(result, err){\n if (err) {\n // failure\n } else {\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findResult().then(function(result){\n // success\n }, function(reason){\n // failure\n });\n ```\n Advanced Example\n --------------\n Synchronous Example\n ```javascript\n let author, books;\n try {\n author = findAuthor();\n books = findBooksByAuthor(author);\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n function foundBooks(books) {\n }\n function failure(reason) {\n }\n findAuthor(function(author, err){\n if (err) {\n failure(err);\n // failure\n } else {\n try {\n findBoooksByAuthor(author, function(books, err) {\n if (err) {\n failure(err);\n } else {\n try {\n foundBooks(books);\n } catch(reason) {\n failure(reason);\n }\n }\n });\n } catch(error) {\n failure(err);\n }\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findAuthor().\n then(findBooksByAuthor).\n then(function(books){\n // found books\n }).catch(function(reason){\n // something went wrong\n });\n ```\n @method then\n @param {Function} onFulfilled\n @param {Function} onRejected\n Useful for tooling.\n @return {Promise}\n */\n\n /**\n `catch` is simply sugar for `then(undefined, onRejection)` which makes it the same\n as the catch block of a try/catch statement.\n ```js\n function findAuthor(){\n throw new Error('couldn't find that author');\n }\n // synchronous\n try {\n findAuthor();\n } catch(reason) {\n // something went wrong\n }\n // async with promises\n findAuthor().catch(function(reason){\n // something went wrong\n });\n ```\n @method catch\n @param {Function} onRejection\n Useful for tooling.\n @return {Promise}\n */\n\n\n Promise.prototype.catch = function _catch(onRejection) {\n return this.then(null, onRejection);\n };\n\n /**\n `finally` will be invoked regardless of the promise's fate just as native\n try/catch/finally behaves\n \n Synchronous example:\n \n ```js\n findAuthor() {\n if (Math.random() > 0.5) {\n throw new Error();\n }\n return new Author();\n }\n \n try {\n return findAuthor(); // succeed or fail\n } catch(error) {\n return findOtherAuther();\n } finally {\n // always runs\n // doesn't affect the return value\n }\n ```\n \n Asynchronous example:\n \n ```js\n findAuthor().catch(function(reason){\n return findOtherAuther();\n }).finally(function(){\n // author was either found, or not\n });\n ```\n \n @method finally\n @param {Function} callback\n @return {Promise}\n */\n\n\n Promise.prototype.finally = function _finally(callback) {\n var promise = this;\n var constructor = promise.constructor;\n\n if (isFunction(callback)) {\n return promise.then(function (value) {\n return constructor.resolve(callback()).then(function () {\n return value;\n });\n }, function (reason) {\n return constructor.resolve(callback()).then(function () {\n throw reason;\n });\n });\n }\n\n return promise.then(callback, callback);\n };\n\n return Promise;\n}();\n\nPromise.prototype.then = then;\nexport default Promise;\nPromise.all = all;\nPromise.race = race;\nPromise.resolve = Resolve;\nPromise.reject = Reject;\nPromise._setScheduler = setScheduler;\nPromise._setAsap = setAsap;\nPromise._asap = asap;","/*global self*/\nimport Promise from './promise';\n\nexport default function polyfill() {\n var local = void 0;\n\n if (typeof global !== 'undefined') {\n local = global;\n } else if (typeof self !== 'undefined') {\n local = self;\n } else {\n try {\n local = Function('return this')();\n } catch (e) {\n throw new Error('polyfill failed because global object is unavailable in this environment');\n }\n }\n\n var P = local.Promise;\n\n if (P) {\n var promiseToString = null;\n try {\n promiseToString = Object.prototype.toString.call(P.resolve());\n } catch (e) {\n // silently ignored\n }\n\n if (promiseToString === '[object Promise]' && !P.cast) {\n return;\n }\n }\n\n local.Promise = Promise;\n}","import Promise from './es6-promise/promise';\nimport polyfill from './es6-promise/polyfill';\n\n// Strange compat..\nPromise.polyfill = polyfill;\nPromise.Promise = Promise;\nexport default Promise;"],"names":["resolve","_resolve","then","originalThen","originalResolve","Promise","reject","_reject","Resolve","Reject"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACNO,SAAS,gBAAgB,CAAC,CAAC,EAAE;EAClC,IAAI,IAAI,GAAG,OAAO,CAAC,CAAC;EACpB,OAAO,CAAC,KAAK,IAAI,KAAK,IAAI,KAAK,QAAQ,IAAI,IAAI,KAAK,UAAU,CAAC,CAAC;CACjE;;AAED,AAAO,SAAS,UAAU,CAAC,CAAC,EAAE;EAC5B,OAAO,OAAO,CAAC,KAAK,UAAU,CAAC;CAChC;;AAED,AAEC;;AAED,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC;AACtB,IAAI,KAAK,CAAC,OAAO,EAAE;EACjB,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC;CAC1B,MAAM;EACL,QAAQ,GAAG,UAAU,CAAC,EAAE;IACtB,OAAO,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,gBAAgB,CAAC;GAC/D,CAAC;CACH;;AAED,AAAO,IAAI,OAAO,GAAG,QAAQ;;ACtB7B,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAI,SAAS,GAAG,KAAK,CAAC,CAAC;AACvB,IAAI,iBAAiB,GAAG,KAAK,CAAC,CAAC;;AAE/B,AAAO,IAAI,IAAI,GAAG,SAAS,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;EAC7C,KAAK,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC;EACtB,KAAK,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;EACrB,GAAG,IAAI,CAAC,CAAC;EACT,IAAI,GAAG,KAAK,CAAC,EAAE;;;;IAIb,IAAI,iBAAiB,EAAE;MACrB,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAC1B,MAAM;MACL,aAAa,EAAE,CAAC;KACjB;GACF;CACF,CAAC;;AAEF,AAAO,SAAS,YAAY,CAAC,UAAU,EAAE;EACvC,iBAAiB,GAAG,UAAU,CAAC;CAChC;;AAED,AAAO,SAAS,OAAO,CAAC,MAAM,EAAE;EAC9B,IAAI,GAAG,MAAM,CAAC;CACf;;AAED,IAAI,aAAa,GAAG,OAAO,MAAM,KAAK,WAAW,GAAG,MAAM,GAAG,SAAS,CAAC;AACvE,IAAI,aAAa,GAAG,aAAa,IAAI,EAAE,CAAC;AACxC,IAAI,uBAAuB,GAAG,aAAa,CAAC,gBAAgB,IAAI,aAAa,CAAC,sBAAsB,CAAC;AACrG,IAAI,MAAM,GAAG,OAAO,IAAI,KAAK,WAAW,IAAI,OAAO,OAAO,KAAK,WAAW,IAAI,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,kBAAkB,CAAC;;;AAG/H,IAAI,QAAQ,GAAG,OAAO,iBAAiB,KAAK,WAAW,IAAI,OAAO,aAAa,KAAK,WAAW,IAAI,OAAO,cAAc,KAAK,WAAW,CAAC;;;AAGzI,SAAS,WAAW,GAAG;;;EAGrB,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;GAChC,CAAC;CACH;;;AAGD,SAAS,aAAa,GAAG;EACvB,IAAI,OAAO,SAAS,KAAK,WAAW,EAAE;IACpC,OAAO,YAAY;MACjB,SAAS,CAAC,KAAK,CAAC,CAAC;KAClB,CAAC;GACH;;EAED,OAAO,aAAa,EAAE,CAAC;CACxB;;AAED,SAAS,mBAAmB,GAAG;EAC7B,IAAI,UAAU,GAAG,CAAC,CAAC;EACnB,IAAI,QAAQ,GAAG,IAAI,uBAAuB,CAAC,KAAK,CAAC,CAAC;EAClD,IAAI,IAAI,GAAG,QAAQ,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC;EACvC,QAAQ,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;;EAEhD,OAAO,YAAY;IACjB,IAAI,CAAC,IAAI,GAAG,UAAU,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC;GAC3C,CAAC;CACH;;;AAGD,SAAS,iBAAiB,GAAG;EAC3B,IAAI,OAAO,GAAG,IAAI,cAAc,EAAE,CAAC;EACnC,OAAO,CAAC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC;EAChC,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;GACrC,CAAC;CACH;;AAED,SAAS,aAAa,GAAG;;;EAGvB,IAAI,gBAAgB,GAAG,UAAU,CAAC;EAClC,OAAO,YAAY;IACjB,OAAO,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;GACnC,CAAC;CACH;;AAED,IAAI,KAAK,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC;AAC5B,SAAS,KAAK,GAAG;EACf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE;IAC/B,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,GAAG,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;;IAEvB,QAAQ,CAAC,GAAG,CAAC,CAAC;;IAEd,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;IACrB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;GAC1B;;EAED,GAAG,GAAG,CAAC,CAAC;CACT;;AAED,SAAS,YAAY,GAAG;EACtB,IAAI;IACF,IAAI,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;IACvD,SAAS,GAAG,KAAK,CAAC,SAAS,IAAI,KAAK,CAAC,YAAY,CAAC;IAClD,OAAO,aAAa,EAAE,CAAC;GACxB,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,aAAa,EAAE,CAAC;GACxB;CACF;;AAED,IAAI,aAAa,GAAG,KAAK,CAAC,CAAC;;AAE3B,IAAI,MAAM,EAAE;EACV,aAAa,GAAG,WAAW,EAAE,CAAC;CAC/B,MAAM,IAAI,uBAAuB,EAAE;EAClC,aAAa,GAAG,mBAAmB,EAAE,CAAC;CACvC,MAAM,IAAI,QAAQ,EAAE;EACnB,aAAa,GAAG,iBAAiB,EAAE,CAAC;CACrC,MAAM,IAAI,aAAa,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;EACvE,aAAa,GAAG,YAAY,EAAE,CAAC;CAChC,MAAM;EACL,aAAa,GAAG,aAAa,EAAE,CAAC;;;CACjC,DCtHc,SAAS,IAAI,CAAC,aAAa,EAAE,WAAW,EAAE;EACvD,IAAI,MAAM,GAAG,IAAI,CAAC;;EAElB,IAAI,KAAK,GAAG,IAAI,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;;EAEvC,IAAI,KAAK,CAAC,UAAU,CAAC,KAAK,SAAS,EAAE;IACnC,WAAW,CAAC,KAAK,CAAC,CAAC;GACpB;;EAED,IAAI,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;;;EAG3B,IAAI,MAAM,EAAE;IACV,IAAI,QAAQ,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IACrC,IAAI,CAAC,YAAY;MACf,OAAO,cAAc,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;KAChE,CAAC,CAAC;GACJ,MAAM;IACL,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,CAAC,CAAC;GACtD;;EAED,OAAO,KAAK,CAAC;;;CACd,DCxBD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+BA,AAAe,SAASA,SAAO,CAAC,MAAM,EAAE;;EAEtC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,CAAC,WAAW,KAAK,WAAW,EAAE;IAC9E,OAAO,MAAM,CAAC;GACf;;EAED,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,OAAQ,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EAC1B,OAAO,OAAO,CAAC;;;CAChB,DCrCM,IAAI,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;;AAEhE,SAAS,IAAI,GAAG,EAAE;;AAElB,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC;AACrB,IAAI,SAAS,GAAG,CAAC,CAAC;AAClB,IAAI,QAAQ,GAAG,CAAC,CAAC;;AAEjB,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,0CAA0C,CAAC,CAAC;CAClE;;AAED,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,sDAAsD,CAAC,CAAC;CAC9E;;AAED,SAAS,OAAO,CAACC,OAAI,EAAE,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,EAAE;EAClE,IAAI;IACFA,OAAI,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;GACxD,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,CAAC;GACV;CACF;;AAED,SAAS,qBAAqB,CAAC,OAAO,EAAE,QAAQ,EAAEA,OAAI,EAAE;EACtD,IAAI,CAAC,UAAU,OAAO,EAAE;IACtB,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,IAAI,KAAK,GAAG,OAAO,CAACA,OAAI,EAAE,QAAQ,EAAE,UAAU,KAAK,EAAE;MACnD,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;MACd,IAAI,QAAQ,KAAK,KAAK,EAAE;QACtB,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB,MAAM;QACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB;KACF,EAAE,UAAU,MAAM,EAAE;MACnB,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;;MAEd,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,EAAE,UAAU,IAAI,OAAO,CAAC,MAAM,IAAI,kBAAkB,CAAC,CAAC,CAAC;;IAExD,IAAI,CAAC,MAAM,IAAI,KAAK,EAAE;MACpB,MAAM,GAAG,IAAI,CAAC;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACxB;GACF,EAAE,OAAO,CAAC,CAAC;CACb;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI,QAAQ,CAAC,MAAM,KAAK,SAAS,EAAE;IACjC,OAAO,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACpC,MAAM,IAAI,QAAQ,CAAC,MAAM,KAAK,QAAQ,EAAE;IACvC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACnC,MAAM;IACL,SAAS,CAAC,QAAQ,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC9C,OAAO,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KAChC,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KAChC,CAAC,CAAC;GACJ;CACF;;AAED,SAAS,mBAAmB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,EAAE;EACzD,IAAI,aAAa,CAAC,WAAW,KAAK,OAAO,CAAC,WAAW,IAAIA,OAAI,KAAKC,IAAY,IAAI,aAAa,CAAC,WAAW,CAAC,OAAO,KAAKC,SAAe,EAAE;IACvI,iBAAiB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;GAC3C,MAAM;IACL,IAAIF,OAAI,KAAK,SAAS,EAAE;MACtB,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC,MAAM,IAAI,UAAU,CAACA,OAAI,CAAC,EAAE;MAC3B,qBAAqB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,CAAC,CAAC;KACrD,MAAM;MACL,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC;GACF;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,KAAK,KAAK,EAAE;IACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;GACpC,MAAM,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;IAClC,IAAIA,OAAI,GAAG,KAAK,CAAC,CAAC;IAClB,IAAI;MACFA,OAAI,GAAG,KAAK,CAAC,IAAI,CAAC;KACnB,CAAC,OAAO,KAAK,EAAE;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;MACvB,OAAO;KACR;IACD,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAEA,OAAI,CAAC,CAAC;GAC3C,MAAM;IACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB;CACF;;AAED,SAAS,gBAAgB,CAAC,OAAO,EAAE;EACjC,IAAI,OAAO,CAAC,QAAQ,EAAE;IACpB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;GACnC;;EAED,OAAO,CAAC,OAAO,CAAC,CAAC;CAClB;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;;EAED,OAAO,CAAC,OAAO,GAAG,KAAK,CAAC;EACxB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;;EAE3B,IAAI,OAAO,CAAC,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;IACrC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;EACD,OAAO,CAAC,MAAM,GAAG,QAAQ,CAAC;EAC1B,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC;;EAEzB,IAAI,CAAC,gBAAgB,EAAE,OAAO,CAAC,CAAC;CACjC;;AAED,SAAS,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,EAAE;EAC5D,IAAI,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;EACvC,IAAI,MAAM,GAAG,YAAY,CAAC,MAAM,CAAC;;;EAGjC,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC;;EAEvB,YAAY,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC;EAC7B,YAAY,CAAC,MAAM,GAAG,SAAS,CAAC,GAAG,aAAa,CAAC;EACjD,YAAY,CAAC,MAAM,GAAG,QAAQ,CAAC,GAAG,WAAW,CAAC;;EAE9C,IAAI,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,MAAM,EAAE;IACjC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;GACvB;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE;EACxB,IAAI,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;EACvC,IAAI,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC;;EAE7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;IAC5B,OAAO;GACR;;EAED,IAAI,KAAK,GAAG,KAAK,CAAC;MACd,QAAQ,GAAG,KAAK,CAAC;MACjB,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;;EAE7B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,IAAI,CAAC,EAAE;IAC9C,KAAK,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;IACvB,QAAQ,GAAG,WAAW,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC;;IAEpC,IAAI,KAAK,EAAE;MACT,cAAc,CAAC,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;KAClD,MAAM;MACL,QAAQ,CAAC,MAAM,CAAC,CAAC;KAClB;GACF;;EAED,OAAO,CAAC,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC;CACjC;;AAED,SAAS,cAAc,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE;EAC1D,IAAI,WAAW,GAAG,UAAU,CAAC,QAAQ,CAAC;MAClC,KAAK,GAAG,KAAK,CAAC;MACd,KAAK,GAAG,KAAK,CAAC;MACd,SAAS,GAAG,IAAI,CAAC;;EAErB,IAAI,WAAW,EAAE;IACf,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC;KAC1B,CAAC,OAAO,CAAC,EAAE;MACV,SAAS,GAAG,KAAK,CAAC;MAClB,KAAK,GAAG,CAAC,CAAC;KACX;;IAED,IAAI,OAAO,KAAK,KAAK,EAAE;MACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;MACnC,OAAO;KACR;GACF,MAAM;IACL,KAAK,GAAG,MAAM,CAAC;GAChB;;EAED,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;;GAE/B,MAAM,IAAI,WAAW,IAAI,SAAS,EAAE;IACnC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,SAAS,KAAK,KAAK,EAAE;IAC9B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB,MAAM,IAAI,OAAO,KAAK,SAAS,EAAE;IAChC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,OAAO,KAAK,QAAQ,EAAE;IAC/B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI;IACF,QAAQ,CAAC,SAAS,cAAc,CAAC,KAAK,EAAE;MACtC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACzB,EAAE,SAAS,aAAa,CAAC,MAAM,EAAE;MAChC,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,CAAC,CAAC;GACJ,CAAC,OAAO,CAAC,EAAE;IACV,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;GACpB;CACF;;AAED,IAAI,EAAE,GAAG,CAAC,CAAC;AACX,SAAS,MAAM,GAAG;EAChB,OAAO,EAAE,EAAE,CAAC;CACb;;AAED,SAAS,WAAW,CAAC,OAAO,EAAE;EAC5B,OAAO,CAAC,UAAU,CAAC,GAAG,EAAE,EAAE,CAAC;EAC3B,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;EAC3B,OAAO,CAAC,OAAO,GAAG,SAAS,CAAC;EAC5B,OAAO,CAAC,YAAY,GAAG,EAAE,CAAC;CAC3B;;AChOD,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;CAC7D,AAAC;;AAEF,IAAI,UAAU,GAAG,YAAY;EAC3B,SAAS,UAAU,CAAC,WAAW,EAAE,KAAK,EAAE;IACtC,IAAI,CAAC,oBAAoB,GAAG,WAAW,CAAC;IACxC,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;;IAErC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;MAC7B,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;KAC3B;;IAED,IAAI,OAAO,CAAC,KAAK,CAAC,EAAE;MAClB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;MAC3B,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,MAAM,CAAC;;MAE/B,IAAI,CAAC,OAAO,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;;MAEtC,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACrB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;OACrC,MAAM;QACL,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QACvB,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;UACzB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;SACrC;OACF;KACF,MAAM;MACL,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;KACzC;GACF;;EAED,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE;IAC3D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,KAAK,OAAO,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;MAChE,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC9B;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE;IAC9D,IAAI,CAAC,GAAG,IAAI,CAAC,oBAAoB,CAAC;IAClC,IAAIF,UAAO,GAAG,CAAC,CAAC,OAAO,CAAC;;;IAGxB,IAAIA,UAAO,KAAKI,SAAe,EAAE;MAC/B,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;MACnB,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;MACnB,IAAI,QAAQ,GAAG,KAAK,CAAC;MACrB,IAAI;QACF,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC;OACpB,CAAC,OAAO,CAAC,EAAE;QACV,QAAQ,GAAG,IAAI,CAAC;QAChB,KAAK,GAAG,CAAC,CAAC;OACX;;MAED,IAAI,KAAK,KAAKD,IAAY,IAAI,KAAK,CAAC,MAAM,KAAK,OAAO,EAAE;QACtD,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;OACjD,MAAM,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;QACtC,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB,MAAM,IAAI,CAAC,KAAKE,SAAO,EAAE;QACxB,IAAI,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC;QAC1B,IAAI,QAAQ,EAAE;UACZ,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;SACxB,MAAM;UACL,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;SAC5C;QACD,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;OAChC,MAAM;QACL,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,UAAUL,UAAO,EAAE;UAC1C,OAAOA,UAAO,CAAC,KAAK,CAAC,CAAC;SACvB,CAAC,EAAE,CAAC,CAAC,CAAC;OACR;KACF,MAAM;MACL,IAAI,CAAC,aAAa,CAACA,UAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;KACvC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE,KAAK,EAAE;IACrE,IAAI,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;;;IAG3B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;MAC9B,IAAI,CAAC,UAAU,EAAE,CAAC;;MAElB,IAAI,KAAK,KAAK,QAAQ,EAAE;QACtB,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACxB,MAAM;QACL,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB;KACF;;IAED,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;MACzB,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;KAChC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,aAAa,GAAG,SAAS,aAAa,CAAC,OAAO,EAAE,CAAC,EAAE;IACtE,IAAI,UAAU,GAAG,IAAI,CAAC;;IAEtB,SAAS,CAAC,OAAO,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC7C,OAAO,UAAU,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC;KACnD,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,UAAU,CAAC,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;KACnD,CAAC,CAAC;GACJ,CAAC;;EAEF,OAAO,UAAU,CAAC;CACnB,EAAE;;ACrHH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+CA,AAAe,SAAS,GAAG,CAAC,OAAO,EAAE;EACnC,OAAO,IAAI,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,OAAO,CAAC;;;CAC9C,DCjDD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiEA,AAAe,SAAS,IAAI,CAAC,OAAO,EAAE;;EAEpC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;IACrB,OAAO,IAAI,WAAW,CAAC,UAAU,CAAC,EAAE,MAAM,EAAE;MAC1C,OAAO,MAAM,CAAC,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAC,CAAC;KACjE,CAAC,CAAC;GACJ,MAAM;IACL,OAAO,IAAI,WAAW,CAAC,UAAU,OAAO,EAAE,MAAM,EAAE;MAChD,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;MAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;QAC/B,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;OACvD;KACF,CAAC,CAAC;GACJ;;;CACF,DCjFD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCA,AAAe,SAASM,QAAM,CAAC,MAAM,EAAE;;EAErC,IAAI,WAAW,GAAG,IAAI,CAAC;EACvB,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,MAAO,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EACzB,OAAO,OAAO,CAAC;;;CAChB,DC9BD,SAAS,aAAa,GAAG;EACvB,MAAM,IAAI,SAAS,CAAC,oFAAoF,CAAC,CAAC;CAC3G;;AAED,SAAS,QAAQ,GAAG;EAClB,MAAM,IAAI,SAAS,CAAC,uHAAuH,CAAC,CAAC;CAC9I;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0GD,IAAIF,SAAO,GAAG,YAAY;EACxB,SAAS,OAAO,CAAC,QAAQ,EAAE;IACzB,IAAI,CAAC,UAAU,CAAC,GAAG,MAAM,EAAE,CAAC;IAC5B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;IACvC,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;;IAEvB,IAAI,IAAI,KAAK,QAAQ,EAAE;MACrB,OAAO,QAAQ,KAAK,UAAU,IAAI,aAAa,EAAE,CAAC;MAClD,IAAI,YAAY,OAAO,GAAG,iBAAiB,CAAC,IAAI,EAAE,QAAQ,CAAC,GAAG,QAAQ,EAAE,CAAC;KAC1E;GACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA4LD,OAAO,CAAC,SAAS,CAAC,KAAK,GAAG,SAAS,MAAM,CAAC,WAAW,EAAE;IACrD,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;GACrC,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA0CF,OAAO,CAAC,SAAS,CAAC,OAAO,GAAG,SAAS,QAAQ,CAAC,QAAQ,EAAE;IACtD,IAAI,OAAO,GAAG,IAAI,CAAC;IACnB,IAAI,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;;IAEtC,IAAI,UAAU,CAAC,QAAQ,CAAC,EAAE;MACxB,OAAO,OAAO,CAAC,IAAI,CAAC,UAAU,KAAK,EAAE;QACnC,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;UACtD,OAAO,KAAK,CAAC;SACd,CAAC,CAAC;OACJ,EAAE,UAAU,MAAM,EAAE;QACnB,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;UACtD,MAAM,MAAM,CAAC;SACd,CAAC,CAAC;OACJ,CAAC,CAAC;KACJ;;IAED,OAAO,OAAO,CAAC,IAAI,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;GACzC,CAAC;;EAEF,OAAO,OAAO,CAAC;CAChB,EAAE,CAAC;;AAEJA,SAAO,CAAC,SAAS,CAAC,IAAI,GAAG,IAAI,CAAC;AAC9B,AACAA,SAAO,CAAC,GAAG,GAAG,GAAG,CAAC;AAClBA,SAAO,CAAC,IAAI,GAAG,IAAI,CAAC;AACpBA,SAAO,CAAC,OAAO,GAAGG,SAAO,CAAC;AAC1BH,SAAO,CAAC,MAAM,GAAGI,QAAM,CAAC;AACxBJ,SAAO,CAAC,aAAa,GAAG,YAAY,CAAC;AACrCA,SAAO,CAAC,QAAQ,GAAG,OAAO,CAAC;AAC3BA,SAAO,CAAC,KAAK,GAAG,IAAI;;AC5YpB;AACA,AAEe,SAAS,QAAQ,GAAG;EACjC,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;;EAEnB,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;IACjC,KAAK,GAAG,MAAM,CAAC;GAChB,MAAM,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;IACtC,KAAK,GAAG,IAAI,CAAC;GACd,MAAM;IACL,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC;KACnC,CAAC,OAAO,CAAC,EAAE;MACV,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;KAC7F;GACF;;EAED,IAAI,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC;;EAEtB,IAAI,CAAC,EAAE;IACL,IAAI,eAAe,GAAG,IAAI,CAAC;IAC3B,IAAI;MACF,eAAe,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KAC/D,CAAC,OAAO,CAAC,EAAE;;KAEX;;IAED,IAAI,eAAe,KAAK,kBAAkB,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE;MACrD,OAAO;KACR;GACF;;EAED,KAAK,CAAC,OAAO,GAAGA,SAAO,CAAC;;;CACzB,DC/BD;AACAA,SAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC;AAC5BA,SAAO,CAAC,OAAO,GAAGA,SAAO,CAAC;;;;;;;;","file":"es6-promise.js"} \ No newline at end of file diff --git a/node_modules/es6-promise/dist/es6-promise.min.js b/node_modules/es6-promise/dist/es6-promise.min.js index 1d9dc4877a573..6af5903ab5813 100644 --- a/node_modules/es6-promise/dist/es6-promise.min.js +++ b/node_modules/es6-promise/dist/es6-promise.min.js @@ -1 +1 @@ -!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):t.ES6Promise=e()}(this,function(){"use strict";function t(t){var e=typeof t;return null!==t&&("object"===e||"function"===e)}function e(t){return"function"==typeof t}function n(t){B=t}function r(t){G=t}function o(){return function(){return process.nextTick(a)}}function i(){return"undefined"!=typeof z?function(){z(a)}:c()}function s(){var t=0,e=new J(a),n=document.createTextNode("");return e.observe(n,{characterData:!0}),function(){n.data=t=++t%2}}function u(){var t=new MessageChannel;return t.port1.onmessage=a,function(){return t.port2.postMessage(0)}}function c(){var t=setTimeout;return function(){return t(a,1)}}function a(){for(var t=0;t postsJSON\n values[1] // => commentsJSON\n\n return values;\n });\n ```\n\n @class Promise\n @param {Function} resolver\n Useful for tooling.\n @constructor\n*/\n\nvar Promise = function () {\n function Promise(resolver) {\n this[PROMISE_ID] = nextId();\n this._result = this._state = undefined;\n this._subscribers = [];\n\n if (noop !== resolver) {\n typeof resolver !== 'function' && needsResolver();\n this instanceof Promise ? initializePromise(this, resolver) : needsNew();\n }\n }\n\n /**\n The primary way of interacting with a promise is through its `then` method,\n which registers callbacks to receive either a promise's eventual value or the\n reason why the promise cannot be fulfilled.\n ```js\n findUser().then(function(user){\n // user is available\n }, function(reason){\n // user is unavailable, and you are given the reason why\n });\n ```\n Chaining\n --------\n The return value of `then` is itself a promise. This second, 'downstream'\n promise is resolved with the return value of the first promise's fulfillment\n or rejection handler, or rejected if the handler throws an exception.\n ```js\n findUser().then(function (user) {\n return user.name;\n }, function (reason) {\n return 'default name';\n }).then(function (userName) {\n // If `findUser` fulfilled, `userName` will be the user's name, otherwise it\n // will be `'default name'`\n });\n findUser().then(function (user) {\n throw new Error('Found user, but still unhappy');\n }, function (reason) {\n throw new Error('`findUser` rejected and we're unhappy');\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.\n // If `findUser` rejected, `reason` will be '`findUser` rejected and we're unhappy'.\n });\n ```\n If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.\n ```js\n findUser().then(function (user) {\n throw new PedagogicalException('Upstream error');\n }).then(function (value) {\n // never reached\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // The `PedgagocialException` is propagated all the way down to here\n });\n ```\n Assimilation\n ------------\n Sometimes the value you want to propagate to a downstream promise can only be\n retrieved asynchronously. This can be achieved by returning a promise in the\n fulfillment or rejection handler. The downstream promise will then be pending\n until the returned promise is settled. This is called *assimilation*.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // The user's comments are now available\n });\n ```\n If the assimliated promise rejects, then the downstream promise will also reject.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // If `findCommentsByAuthor` fulfills, we'll have the value here\n }, function (reason) {\n // If `findCommentsByAuthor` rejects, we'll have the reason here\n });\n ```\n Simple Example\n --------------\n Synchronous Example\n ```javascript\n let result;\n try {\n result = findResult();\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n findResult(function(result, err){\n if (err) {\n // failure\n } else {\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findResult().then(function(result){\n // success\n }, function(reason){\n // failure\n });\n ```\n Advanced Example\n --------------\n Synchronous Example\n ```javascript\n let author, books;\n try {\n author = findAuthor();\n books = findBooksByAuthor(author);\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n function foundBooks(books) {\n }\n function failure(reason) {\n }\n findAuthor(function(author, err){\n if (err) {\n failure(err);\n // failure\n } else {\n try {\n findBoooksByAuthor(author, function(books, err) {\n if (err) {\n failure(err);\n } else {\n try {\n foundBooks(books);\n } catch(reason) {\n failure(reason);\n }\n }\n });\n } catch(error) {\n failure(err);\n }\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findAuthor().\n then(findBooksByAuthor).\n then(function(books){\n // found books\n }).catch(function(reason){\n // something went wrong\n });\n ```\n @method then\n @param {Function} onFulfilled\n @param {Function} onRejected\n Useful for tooling.\n @return {Promise}\n */\n\n /**\n `catch` is simply sugar for `then(undefined, onRejection)` which makes it the same\n as the catch block of a try/catch statement.\n ```js\n function findAuthor(){\n throw new Error('couldn't find that author');\n }\n // synchronous\n try {\n findAuthor();\n } catch(reason) {\n // something went wrong\n }\n // async with promises\n findAuthor().catch(function(reason){\n // something went wrong\n });\n ```\n @method catch\n @param {Function} onRejection\n Useful for tooling.\n @return {Promise}\n */\n\n\n Promise.prototype.catch = function _catch(onRejection) {\n return this.then(null, onRejection);\n };\n\n /**\n `finally` will be invoked regardless of the promise's fate just as native\n try/catch/finally behaves\n \n Synchronous example:\n \n ```js\n findAuthor() {\n if (Math.random() > 0.5) {\n throw new Error();\n }\n return new Author();\n }\n \n try {\n return findAuthor(); // succeed or fail\n } catch(error) {\n return findOtherAuther();\n } finally {\n // always runs\n // doesn't affect the return value\n }\n ```\n \n Asynchronous example:\n \n ```js\n findAuthor().catch(function(reason){\n return findOtherAuther();\n }).finally(function(){\n // author was either found, or not\n });\n ```\n \n @method finally\n @param {Function} callback\n @return {Promise}\n */\n\n\n Promise.prototype.finally = function _finally(callback) {\n var promise = this;\n var constructor = promise.constructor;\n\n return promise.then(function (value) {\n return constructor.resolve(callback()).then(function () {\n return value;\n });\n }, function (reason) {\n return constructor.resolve(callback()).then(function () {\n throw reason;\n });\n });\n };\n\n return Promise;\n}();\n\nPromise.prototype.then = then;\nexport default Promise;\nPromise.all = all;\nPromise.race = race;\nPromise.resolve = Resolve;\nPromise.reject = Reject;\nPromise._setScheduler = setScheduler;\nPromise._setAsap = setAsap;\nPromise._asap = asap;","/*global self*/\nimport Promise from './promise';\n\nexport default function polyfill() {\n var local = void 0;\n\n if (typeof global !== 'undefined') {\n local = global;\n } else if (typeof self !== 'undefined') {\n local = self;\n } else {\n try {\n local = Function('return this')();\n } catch (e) {\n throw new Error('polyfill failed because global object is unavailable in this environment');\n }\n }\n\n var P = local.Promise;\n\n if (P) {\n var promiseToString = null;\n try {\n promiseToString = Object.prototype.toString.call(P.resolve());\n } catch (e) {\n // silently ignored\n }\n\n if (promiseToString === '[object Promise]' && !P.cast) {\n return;\n }\n }\n\n local.Promise = Promise;\n}","import Promise from './es6-promise/promise';\nimport polyfill from './es6-promise/polyfill';\n\n// Strange compat..\nPromise.polyfill = polyfill;\nPromise.Promise = Promise;\nexport default Promise;"],"names":["resolve","_resolve","then","originalThen","originalResolve","Promise","reject","_reject","Resolve","Reject"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACNO,SAAS,gBAAgB,CAAC,CAAC,EAAE;EAClC,IAAI,IAAI,GAAG,OAAO,CAAC,CAAC;EACpB,OAAO,CAAC,KAAK,IAAI,KAAK,IAAI,KAAK,QAAQ,IAAI,IAAI,KAAK,UAAU,CAAC,CAAC;CACjE;;AAED,AAAO,SAAS,UAAU,CAAC,CAAC,EAAE;EAC5B,OAAO,OAAO,CAAC,KAAK,UAAU,CAAC;CAChC;;AAED,AAEC;;AAED,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC;AACtB,IAAI,KAAK,CAAC,OAAO,EAAE;EACjB,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC;CAC1B,MAAM;EACL,QAAQ,GAAG,UAAU,CAAC,EAAE;IACtB,OAAO,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,gBAAgB,CAAC;GAC/D,CAAC;CACH;;AAED,AAAO,IAAI,OAAO,GAAG,QAAQ;;ACtB7B,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAI,SAAS,GAAG,KAAK,CAAC,CAAC;AACvB,IAAI,iBAAiB,GAAG,KAAK,CAAC,CAAC;;AAE/B,AAAO,IAAI,IAAI,GAAG,SAAS,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;EAC7C,KAAK,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC;EACtB,KAAK,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;EACrB,GAAG,IAAI,CAAC,CAAC;EACT,IAAI,GAAG,KAAK,CAAC,EAAE;;;;IAIb,IAAI,iBAAiB,EAAE;MACrB,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAC1B,MAAM;MACL,aAAa,EAAE,CAAC;KACjB;GACF;CACF,CAAC;;AAEF,AAAO,SAAS,YAAY,CAAC,UAAU,EAAE;EACvC,iBAAiB,GAAG,UAAU,CAAC;CAChC;;AAED,AAAO,SAAS,OAAO,CAAC,MAAM,EAAE;EAC9B,IAAI,GAAG,MAAM,CAAC;CACf;;AAED,IAAI,aAAa,GAAG,OAAO,MAAM,KAAK,WAAW,GAAG,MAAM,GAAG,SAAS,CAAC;AACvE,IAAI,aAAa,GAAG,aAAa,IAAI,EAAE,CAAC;AACxC,IAAI,uBAAuB,GAAG,aAAa,CAAC,gBAAgB,IAAI,aAAa,CAAC,sBAAsB,CAAC;AACrG,IAAI,MAAM,GAAG,OAAO,IAAI,KAAK,WAAW,IAAI,OAAO,OAAO,KAAK,WAAW,IAAI,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,kBAAkB,CAAC;;;AAG/H,IAAI,QAAQ,GAAG,OAAO,iBAAiB,KAAK,WAAW,IAAI,OAAO,aAAa,KAAK,WAAW,IAAI,OAAO,cAAc,KAAK,WAAW,CAAC;;;AAGzI,SAAS,WAAW,GAAG;;;EAGrB,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;GAChC,CAAC;CACH;;;AAGD,SAAS,aAAa,GAAG;EACvB,IAAI,OAAO,SAAS,KAAK,WAAW,EAAE;IACpC,OAAO,YAAY;MACjB,SAAS,CAAC,KAAK,CAAC,CAAC;KAClB,CAAC;GACH;;EAED,OAAO,aAAa,EAAE,CAAC;CACxB;;AAED,SAAS,mBAAmB,GAAG;EAC7B,IAAI,UAAU,GAAG,CAAC,CAAC;EACnB,IAAI,QAAQ,GAAG,IAAI,uBAAuB,CAAC,KAAK,CAAC,CAAC;EAClD,IAAI,IAAI,GAAG,QAAQ,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC;EACvC,QAAQ,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;;EAEhD,OAAO,YAAY;IACjB,IAAI,CAAC,IAAI,GAAG,UAAU,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC;GAC3C,CAAC;CACH;;;AAGD,SAAS,iBAAiB,GAAG;EAC3B,IAAI,OAAO,GAAG,IAAI,cAAc,EAAE,CAAC;EACnC,OAAO,CAAC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC;EAChC,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;GACrC,CAAC;CACH;;AAED,SAAS,aAAa,GAAG;;;EAGvB,IAAI,gBAAgB,GAAG,UAAU,CAAC;EAClC,OAAO,YAAY;IACjB,OAAO,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;GACnC,CAAC;CACH;;AAED,IAAI,KAAK,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC;AAC5B,SAAS,KAAK,GAAG;EACf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE;IAC/B,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,GAAG,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;;IAEvB,QAAQ,CAAC,GAAG,CAAC,CAAC;;IAEd,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;IACrB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;GAC1B;;EAED,GAAG,GAAG,CAAC,CAAC;CACT;;AAED,SAAS,YAAY,GAAG;EACtB,IAAI;IACF,IAAI,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;IACvD,SAAS,GAAG,KAAK,CAAC,SAAS,IAAI,KAAK,CAAC,YAAY,CAAC;IAClD,OAAO,aAAa,EAAE,CAAC;GACxB,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,aAAa,EAAE,CAAC;GACxB;CACF;;AAED,IAAI,aAAa,GAAG,KAAK,CAAC,CAAC;;AAE3B,IAAI,MAAM,EAAE;EACV,aAAa,GAAG,WAAW,EAAE,CAAC;CAC/B,MAAM,IAAI,uBAAuB,EAAE;EAClC,aAAa,GAAG,mBAAmB,EAAE,CAAC;CACvC,MAAM,IAAI,QAAQ,EAAE;EACnB,aAAa,GAAG,iBAAiB,EAAE,CAAC;CACrC,MAAM,IAAI,aAAa,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;EACvE,aAAa,GAAG,YAAY,EAAE,CAAC;CAChC,MAAM;EACL,aAAa,GAAG,aAAa,EAAE,CAAC;;;CACjC,DCtHc,SAAS,IAAI,CAAC,aAAa,EAAE,WAAW,EAAE;EACvD,IAAI,MAAM,GAAG,IAAI,CAAC;;EAElB,IAAI,KAAK,GAAG,IAAI,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;;EAEvC,IAAI,KAAK,CAAC,UAAU,CAAC,KAAK,SAAS,EAAE;IACnC,WAAW,CAAC,KAAK,CAAC,CAAC;GACpB;;EAED,IAAI,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;;;EAG3B,IAAI,MAAM,EAAE;IACV,IAAI,QAAQ,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IACrC,IAAI,CAAC,YAAY;MACf,OAAO,cAAc,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;KAChE,CAAC,CAAC;GACJ,MAAM;IACL,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,CAAC,CAAC;GACtD;;EAED,OAAO,KAAK,CAAC;;;CACd,DCxBD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+BA,AAAe,SAASA,SAAO,CAAC,MAAM,EAAE;;EAEtC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,CAAC,WAAW,KAAK,WAAW,EAAE;IAC9E,OAAO,MAAM,CAAC;GACf;;EAED,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,OAAQ,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EAC1B,OAAO,OAAO,CAAC;;;CAChB,DCrCM,IAAI,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;;AAEhE,SAAS,IAAI,GAAG,EAAE;;AAElB,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC;AACrB,IAAI,SAAS,GAAG,CAAC,CAAC;AAClB,IAAI,QAAQ,GAAG,CAAC,CAAC;;AAEjB,IAAI,eAAe,GAAG,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC;;AAEtC,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,0CAA0C,CAAC,CAAC;CAClE;;AAED,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,sDAAsD,CAAC,CAAC;CAC9E;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE;EACxB,IAAI;IACF,OAAO,OAAO,CAAC,IAAI,CAAC;GACrB,CAAC,OAAO,KAAK,EAAE;IACd,eAAe,CAAC,KAAK,GAAG,KAAK,CAAC;IAC9B,OAAO,eAAe,CAAC;GACxB;CACF;;AAED,SAAS,OAAO,CAACC,OAAI,EAAE,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,EAAE;EAClE,IAAI;IACFA,OAAI,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;GACxD,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,CAAC;GACV;CACF;;AAED,SAAS,qBAAqB,CAAC,OAAO,EAAE,QAAQ,EAAEA,OAAI,EAAE;EACtD,IAAI,CAAC,UAAU,OAAO,EAAE;IACtB,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,IAAI,KAAK,GAAG,OAAO,CAACA,OAAI,EAAE,QAAQ,EAAE,UAAU,KAAK,EAAE;MACnD,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;MACd,IAAI,QAAQ,KAAK,KAAK,EAAE;QACtB,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB,MAAM;QACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB;KACF,EAAE,UAAU,MAAM,EAAE;MACnB,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;;MAEd,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,EAAE,UAAU,IAAI,OAAO,CAAC,MAAM,IAAI,kBAAkB,CAAC,CAAC,CAAC;;IAExD,IAAI,CAAC,MAAM,IAAI,KAAK,EAAE;MACpB,MAAM,GAAG,IAAI,CAAC;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACxB;GACF,EAAE,OAAO,CAAC,CAAC;CACb;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI,QAAQ,CAAC,MAAM,KAAK,SAAS,EAAE;IACjC,OAAO,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACpC,MAAM,IAAI,QAAQ,CAAC,MAAM,KAAK,QAAQ,EAAE;IACvC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACnC,MAAM;IACL,SAAS,CAAC,QAAQ,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC9C,OAAO,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KAChC,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KAChC,CAAC,CAAC;GACJ;CACF;;AAED,SAAS,mBAAmB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,EAAE;EACzD,IAAI,aAAa,CAAC,WAAW,KAAK,OAAO,CAAC,WAAW,IAAIA,OAAI,KAAKC,IAAY,IAAI,aAAa,CAAC,WAAW,CAAC,OAAO,KAAKC,SAAe,EAAE;IACvI,iBAAiB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;GAC3C,MAAM;IACL,IAAIF,OAAI,KAAK,eAAe,EAAE;MAC5B,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,KAAK,CAAC,CAAC;MACvC,eAAe,CAAC,KAAK,GAAG,IAAI,CAAC;KAC9B,MAAM,IAAIA,OAAI,KAAK,SAAS,EAAE;MAC7B,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC,MAAM,IAAI,UAAU,CAACA,OAAI,CAAC,EAAE;MAC3B,qBAAqB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,CAAC,CAAC;KACrD,MAAM;MACL,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC;GACF;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,KAAK,KAAK,EAAE;IACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;GACpC,MAAM,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;IAClC,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC;GACrD,MAAM;IACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB;CACF;;AAED,SAAS,gBAAgB,CAAC,OAAO,EAAE;EACjC,IAAI,OAAO,CAAC,QAAQ,EAAE;IACpB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;GACnC;;EAED,OAAO,CAAC,OAAO,CAAC,CAAC;CAClB;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;;EAED,OAAO,CAAC,OAAO,GAAG,KAAK,CAAC;EACxB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;;EAE3B,IAAI,OAAO,CAAC,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;IACrC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;EACD,OAAO,CAAC,MAAM,GAAG,QAAQ,CAAC;EAC1B,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC;;EAEzB,IAAI,CAAC,gBAAgB,EAAE,OAAO,CAAC,CAAC;CACjC;;AAED,SAAS,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,EAAE;EAC5D,IAAI,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;EACvC,IAAI,MAAM,GAAG,YAAY,CAAC,MAAM,CAAC;;;EAGjC,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC;;EAEvB,YAAY,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC;EAC7B,YAAY,CAAC,MAAM,GAAG,SAAS,CAAC,GAAG,aAAa,CAAC;EACjD,YAAY,CAAC,MAAM,GAAG,QAAQ,CAAC,GAAG,WAAW,CAAC;;EAE9C,IAAI,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,MAAM,EAAE;IACjC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;GACvB;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE;EACxB,IAAI,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;EACvC,IAAI,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC;;EAE7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;IAC5B,OAAO;GACR;;EAED,IAAI,KAAK,GAAG,KAAK,CAAC;MACd,QAAQ,GAAG,KAAK,CAAC;MACjB,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;;EAE7B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,IAAI,CAAC,EAAE;IAC9C,KAAK,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;IACvB,QAAQ,GAAG,WAAW,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC;;IAEpC,IAAI,KAAK,EAAE;MACT,cAAc,CAAC,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;KAClD,MAAM;MACL,QAAQ,CAAC,MAAM,CAAC,CAAC;KAClB;GACF;;EAED,OAAO,CAAC,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC;CACjC;;AAED,SAAS,QAAQ,CAAC,QAAQ,EAAE,MAAM,EAAE;EAClC,IAAI;IACF,OAAO,QAAQ,CAAC,MAAM,CAAC,CAAC;GACzB,CAAC,OAAO,CAAC,EAAE;IACV,eAAe,CAAC,KAAK,GAAG,CAAC,CAAC;IAC1B,OAAO,eAAe,CAAC;GACxB;CACF;;AAED,SAAS,cAAc,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE;EAC1D,IAAI,WAAW,GAAG,UAAU,CAAC,QAAQ,CAAC;MAClC,KAAK,GAAG,KAAK,CAAC;MACd,KAAK,GAAG,KAAK,CAAC;MACd,SAAS,GAAG,KAAK,CAAC;MAClB,MAAM,GAAG,KAAK,CAAC,CAAC;;EAEpB,IAAI,WAAW,EAAE;IACf,KAAK,GAAG,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;;IAEnC,IAAI,KAAK,KAAK,eAAe,EAAE;MAC7B,MAAM,GAAG,IAAI,CAAC;MACd,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC;MACpB,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC;KACpB,MAAM;MACL,SAAS,GAAG,IAAI,CAAC;KAClB;;IAED,IAAI,OAAO,KAAK,KAAK,EAAE;MACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;MACnC,OAAO;KACR;GACF,MAAM;IACL,KAAK,GAAG,MAAM,CAAC;IACf,SAAS,GAAG,IAAI,CAAC;GAClB;;EAED,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;;GAE/B,MAAM,IAAI,WAAW,IAAI,SAAS,EAAE;IACnC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,MAAM,EAAE;IACjB,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB,MAAM,IAAI,OAAO,KAAK,SAAS,EAAE;IAChC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,OAAO,KAAK,QAAQ,EAAE;IAC/B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI;IACF,QAAQ,CAAC,SAAS,cAAc,CAAC,KAAK,EAAE;MACtC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACzB,EAAE,SAAS,aAAa,CAAC,MAAM,EAAE;MAChC,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,CAAC,CAAC;GACJ,CAAC,OAAO,CAAC,EAAE;IACV,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;GACpB;CACF;;AAED,IAAI,EAAE,GAAG,CAAC,CAAC;AACX,SAAS,MAAM,GAAG;EAChB,OAAO,EAAE,EAAE,CAAC;CACb;;AAED,SAAS,WAAW,CAAC,OAAO,EAAE;EAC5B,OAAO,CAAC,UAAU,CAAC,GAAG,EAAE,EAAE,CAAC;EAC3B,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;EAC3B,OAAO,CAAC,OAAO,GAAG,SAAS,CAAC;EAC5B,OAAO,CAAC,YAAY,GAAG,EAAE,CAAC;CAC3B;;ACrPD,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;CAC7D,AAAC;;AAEF,IAAI,UAAU,GAAG,YAAY;EAC3B,SAAS,UAAU,CAAC,WAAW,EAAE,KAAK,EAAE;IACtC,IAAI,CAAC,oBAAoB,GAAG,WAAW,CAAC;IACxC,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;;IAErC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;MAC7B,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;KAC3B;;IAED,IAAI,OAAO,CAAC,KAAK,CAAC,EAAE;MAClB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;MAC3B,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,MAAM,CAAC;;MAE/B,IAAI,CAAC,OAAO,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;;MAEtC,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACrB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;OACrC,MAAM;QACL,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QACvB,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;UACzB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;SACrC;OACF;KACF,MAAM;MACL,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;KACzC;GACF;;EAED,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE;IAC3D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,KAAK,OAAO,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;MAChE,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC9B;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE;IAC9D,IAAI,CAAC,GAAG,IAAI,CAAC,oBAAoB,CAAC;IAClC,IAAIF,UAAO,GAAG,CAAC,CAAC,OAAO,CAAC;;;IAGxB,IAAIA,UAAO,KAAKI,SAAe,EAAE;MAC/B,IAAI,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC;;MAE3B,IAAI,KAAK,KAAKD,IAAY,IAAI,KAAK,CAAC,MAAM,KAAK,OAAO,EAAE;QACtD,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;OACjD,MAAM,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;QACtC,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB,MAAM,IAAI,CAAC,KAAKE,SAAO,EAAE;QACxB,IAAI,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC;QAC1B,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAC3C,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;OAChC,MAAM;QACL,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,UAAUL,UAAO,EAAE;UAC1C,OAAOA,UAAO,CAAC,KAAK,CAAC,CAAC;SACvB,CAAC,EAAE,CAAC,CAAC,CAAC;OACR;KACF,MAAM;MACL,IAAI,CAAC,aAAa,CAACA,UAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;KACvC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE,KAAK,EAAE;IACrE,IAAI,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;;;IAG3B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;MAC9B,IAAI,CAAC,UAAU,EAAE,CAAC;;MAElB,IAAI,KAAK,KAAK,QAAQ,EAAE;QACtB,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACxB,MAAM;QACL,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB;KACF;;IAED,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;MACzB,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;KAChC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,aAAa,GAAG,SAAS,aAAa,CAAC,OAAO,EAAE,CAAC,EAAE;IACtE,IAAI,UAAU,GAAG,IAAI,CAAC;;IAEtB,SAAS,CAAC,OAAO,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC7C,OAAO,UAAU,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC;KACnD,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,UAAU,CAAC,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;KACnD,CAAC,CAAC;GACJ,CAAC;;EAEF,OAAO,UAAU,CAAC;CACnB,EAAE;;ACzGH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+CA,AAAe,SAAS,GAAG,CAAC,OAAO,EAAE;EACnC,OAAO,IAAI,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,OAAO,CAAC;;;CAC9C,DCjDD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiEA,AAAe,SAAS,IAAI,CAAC,OAAO,EAAE;;EAEpC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;IACrB,OAAO,IAAI,WAAW,CAAC,UAAU,CAAC,EAAE,MAAM,EAAE;MAC1C,OAAO,MAAM,CAAC,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAC,CAAC;KACjE,CAAC,CAAC;GACJ,MAAM;IACL,OAAO,IAAI,WAAW,CAAC,UAAU,OAAO,EAAE,MAAM,EAAE;MAChD,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;MAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;QAC/B,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;OACvD;KACF,CAAC,CAAC;GACJ;;;CACF,DCjFD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCA,AAAe,SAASM,QAAM,CAAC,MAAM,EAAE;;EAErC,IAAI,WAAW,GAAG,IAAI,CAAC;EACvB,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,MAAO,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EACzB,OAAO,OAAO,CAAC;;;CAChB,DC9BD,SAAS,aAAa,GAAG;EACvB,MAAM,IAAI,SAAS,CAAC,oFAAoF,CAAC,CAAC;CAC3G;;AAED,SAAS,QAAQ,GAAG;EAClB,MAAM,IAAI,SAAS,CAAC,uHAAuH,CAAC,CAAC;CAC9I;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0GD,IAAIF,SAAO,GAAG,YAAY;EACxB,SAAS,OAAO,CAAC,QAAQ,EAAE;IACzB,IAAI,CAAC,UAAU,CAAC,GAAG,MAAM,EAAE,CAAC;IAC5B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;IACvC,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;;IAEvB,IAAI,IAAI,KAAK,QAAQ,EAAE;MACrB,OAAO,QAAQ,KAAK,UAAU,IAAI,aAAa,EAAE,CAAC;MAClD,IAAI,YAAY,OAAO,GAAG,iBAAiB,CAAC,IAAI,EAAE,QAAQ,CAAC,GAAG,QAAQ,EAAE,CAAC;KAC1E;GACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA4LD,OAAO,CAAC,SAAS,CAAC,KAAK,GAAG,SAAS,MAAM,CAAC,WAAW,EAAE;IACrD,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;GACrC,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA0CF,OAAO,CAAC,SAAS,CAAC,OAAO,GAAG,SAAS,QAAQ,CAAC,QAAQ,EAAE;IACtD,IAAI,OAAO,GAAG,IAAI,CAAC;IACnB,IAAI,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;;IAEtC,OAAO,OAAO,CAAC,IAAI,CAAC,UAAU,KAAK,EAAE;MACnC,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;QACtD,OAAO,KAAK,CAAC;OACd,CAAC,CAAC;KACJ,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;QACtD,MAAM,MAAM,CAAC;OACd,CAAC,CAAC;KACJ,CAAC,CAAC;GACJ,CAAC;;EAEF,OAAO,OAAO,CAAC;CAChB,EAAE,CAAC;;AAEJA,SAAO,CAAC,SAAS,CAAC,IAAI,GAAG,IAAI,CAAC;AAC9B,AACAA,SAAO,CAAC,GAAG,GAAG,GAAG,CAAC;AAClBA,SAAO,CAAC,IAAI,GAAG,IAAI,CAAC;AACpBA,SAAO,CAAC,OAAO,GAAGG,SAAO,CAAC;AAC1BH,SAAO,CAAC,MAAM,GAAGI,QAAM,CAAC;AACxBJ,SAAO,CAAC,aAAa,GAAG,YAAY,CAAC;AACrCA,SAAO,CAAC,QAAQ,GAAG,OAAO,CAAC;AAC3BA,SAAO,CAAC,KAAK,GAAG,IAAI;;ACxYpB;AACA,AAEe,SAAS,QAAQ,GAAG;EACjC,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;;EAEnB,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;IACjC,KAAK,GAAG,MAAM,CAAC;GAChB,MAAM,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;IACtC,KAAK,GAAG,IAAI,CAAC;GACd,MAAM;IACL,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC;KACnC,CAAC,OAAO,CAAC,EAAE;MACV,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;KAC7F;GACF;;EAED,IAAI,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC;;EAEtB,IAAI,CAAC,EAAE;IACL,IAAI,eAAe,GAAG,IAAI,CAAC;IAC3B,IAAI;MACF,eAAe,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KAC/D,CAAC,OAAO,CAAC,EAAE;;KAEX;;IAED,IAAI,eAAe,KAAK,kBAAkB,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE;MACrD,OAAO;KACR;GACF;;EAED,KAAK,CAAC,OAAO,GAAGA,SAAO,CAAC;;;CACzB,DC/BD;AACAA,SAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC;AAC5BA,SAAO,CAAC,OAAO,GAAGA,SAAO,CAAC;;;;;;;;","file":"es6-promise.min.js"} \ No newline at end of file +{"version":3,"sources":["config/versionTemplate.txt","lib/es6-promise/utils.js","lib/es6-promise/asap.js","lib/es6-promise/then.js","lib/es6-promise/promise/resolve.js","lib/es6-promise/-internal.js","lib/es6-promise/enumerator.js","lib/es6-promise/promise/all.js","lib/es6-promise/promise/race.js","lib/es6-promise/promise/reject.js","lib/es6-promise/promise.js","lib/es6-promise/polyfill.js","lib/es6-promise.js"],"sourcesContent":["/*!\n * @overview es6-promise - a tiny implementation of Promises/A+.\n * @copyright Copyright (c) 2014 Yehuda Katz, Tom Dale, Stefan Penner and contributors (Conversion to ES6 API by Jake Archibald)\n * @license Licensed under MIT license\n * See https://raw.githubusercontent.com/stefanpenner/es6-promise/master/LICENSE\n * @version v4.2.8+1e68dce6\n */\n","export function objectOrFunction(x) {\n var type = typeof x;\n return x !== null && (type === 'object' || type === 'function');\n}\n\nexport function isFunction(x) {\n return typeof x === 'function';\n}\n\nexport function isMaybeThenable(x) {\n return x !== null && typeof x === 'object';\n}\n\nvar _isArray = void 0;\nif (Array.isArray) {\n _isArray = Array.isArray;\n} else {\n _isArray = function (x) {\n return Object.prototype.toString.call(x) === '[object Array]';\n };\n}\n\nexport var isArray = _isArray;","var len = 0;\nvar vertxNext = void 0;\nvar customSchedulerFn = void 0;\n\nexport var asap = function asap(callback, arg) {\n queue[len] = callback;\n queue[len + 1] = arg;\n len += 2;\n if (len === 2) {\n // If len is 2, that means that we need to schedule an async flush.\n // If additional callbacks are queued before the queue is flushed, they\n // will be processed by this flush that we are scheduling.\n if (customSchedulerFn) {\n customSchedulerFn(flush);\n } else {\n scheduleFlush();\n }\n }\n};\n\nexport function setScheduler(scheduleFn) {\n customSchedulerFn = scheduleFn;\n}\n\nexport function setAsap(asapFn) {\n asap = asapFn;\n}\n\nvar browserWindow = typeof window !== 'undefined' ? window : undefined;\nvar browserGlobal = browserWindow || {};\nvar BrowserMutationObserver = browserGlobal.MutationObserver || browserGlobal.WebKitMutationObserver;\nvar isNode = typeof self === 'undefined' && typeof process !== 'undefined' && {}.toString.call(process) === '[object process]';\n\n// test for web worker but not in IE10\nvar isWorker = typeof Uint8ClampedArray !== 'undefined' && typeof importScripts !== 'undefined' && typeof MessageChannel !== 'undefined';\n\n// node\nfunction useNextTick() {\n // node version 0.10.x displays a deprecation warning when nextTick is used recursively\n // see https://github.com/cujojs/when/issues/410 for details\n return function () {\n return process.nextTick(flush);\n };\n}\n\n// vertx\nfunction useVertxTimer() {\n if (typeof vertxNext !== 'undefined') {\n return function () {\n vertxNext(flush);\n };\n }\n\n return useSetTimeout();\n}\n\nfunction useMutationObserver() {\n var iterations = 0;\n var observer = new BrowserMutationObserver(flush);\n var node = document.createTextNode('');\n observer.observe(node, { characterData: true });\n\n return function () {\n node.data = iterations = ++iterations % 2;\n };\n}\n\n// web worker\nfunction useMessageChannel() {\n var channel = new MessageChannel();\n channel.port1.onmessage = flush;\n return function () {\n return channel.port2.postMessage(0);\n };\n}\n\nfunction useSetTimeout() {\n // Store setTimeout reference so es6-promise will be unaffected by\n // other code modifying setTimeout (like sinon.useFakeTimers())\n var globalSetTimeout = setTimeout;\n return function () {\n return globalSetTimeout(flush, 1);\n };\n}\n\nvar queue = new Array(1000);\nfunction flush() {\n for (var i = 0; i < len; i += 2) {\n var callback = queue[i];\n var arg = queue[i + 1];\n\n callback(arg);\n\n queue[i] = undefined;\n queue[i + 1] = undefined;\n }\n\n len = 0;\n}\n\nfunction attemptVertx() {\n try {\n var vertx = Function('return this')().require('vertx');\n vertxNext = vertx.runOnLoop || vertx.runOnContext;\n return useVertxTimer();\n } catch (e) {\n return useSetTimeout();\n }\n}\n\nvar scheduleFlush = void 0;\n// Decide what async method to use to triggering processing of queued callbacks:\nif (isNode) {\n scheduleFlush = useNextTick();\n} else if (BrowserMutationObserver) {\n scheduleFlush = useMutationObserver();\n} else if (isWorker) {\n scheduleFlush = useMessageChannel();\n} else if (browserWindow === undefined && typeof require === 'function') {\n scheduleFlush = attemptVertx();\n} else {\n scheduleFlush = useSetTimeout();\n}","import { invokeCallback, subscribe, FULFILLED, REJECTED, noop, makePromise, PROMISE_ID } from './-internal';\n\nimport { asap } from './asap';\n\nexport default function then(onFulfillment, onRejection) {\n var parent = this;\n\n var child = new this.constructor(noop);\n\n if (child[PROMISE_ID] === undefined) {\n makePromise(child);\n }\n\n var _state = parent._state;\n\n\n if (_state) {\n var callback = arguments[_state - 1];\n asap(function () {\n return invokeCallback(_state, child, callback, parent._result);\n });\n } else {\n subscribe(parent, child, onFulfillment, onRejection);\n }\n\n return child;\n}","import { noop, resolve as _resolve } from '../-internal';\n\n/**\n `Promise.resolve` returns a promise that will become resolved with the\n passed `value`. It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n resolve(1);\n });\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.resolve(1);\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n @method resolve\n @static\n @param {Any} value value that the returned promise will be resolved with\n Useful for tooling.\n @return {Promise} a promise that will become fulfilled with the given\n `value`\n*/\nexport default function resolve(object) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (object && typeof object === 'object' && object.constructor === Constructor) {\n return object;\n }\n\n var promise = new Constructor(noop);\n _resolve(promise, object);\n return promise;\n}","import { objectOrFunction, isFunction } from './utils';\n\nimport { asap } from './asap';\n\nimport originalThen from './then';\nimport originalResolve from './promise/resolve';\n\nexport var PROMISE_ID = Math.random().toString(36).substring(2);\n\nfunction noop() {}\n\nvar PENDING = void 0;\nvar FULFILLED = 1;\nvar REJECTED = 2;\n\nfunction selfFulfillment() {\n return new TypeError(\"You cannot resolve a promise with itself\");\n}\n\nfunction cannotReturnOwn() {\n return new TypeError('A promises callback cannot return that same promise.');\n}\n\nfunction tryThen(then, value, fulfillmentHandler, rejectionHandler) {\n try {\n then.call(value, fulfillmentHandler, rejectionHandler);\n } catch (e) {\n return e;\n }\n}\n\nfunction handleForeignThenable(promise, thenable, then) {\n asap(function (promise) {\n var sealed = false;\n var error = tryThen(then, thenable, function (value) {\n if (sealed) {\n return;\n }\n sealed = true;\n if (thenable !== value) {\n resolve(promise, value);\n } else {\n fulfill(promise, value);\n }\n }, function (reason) {\n if (sealed) {\n return;\n }\n sealed = true;\n\n reject(promise, reason);\n }, 'Settle: ' + (promise._label || ' unknown promise'));\n\n if (!sealed && error) {\n sealed = true;\n reject(promise, error);\n }\n }, promise);\n}\n\nfunction handleOwnThenable(promise, thenable) {\n if (thenable._state === FULFILLED) {\n fulfill(promise, thenable._result);\n } else if (thenable._state === REJECTED) {\n reject(promise, thenable._result);\n } else {\n subscribe(thenable, undefined, function (value) {\n return resolve(promise, value);\n }, function (reason) {\n return reject(promise, reason);\n });\n }\n}\n\nfunction handleMaybeThenable(promise, maybeThenable, then) {\n if (maybeThenable.constructor === promise.constructor && then === originalThen && maybeThenable.constructor.resolve === originalResolve) {\n handleOwnThenable(promise, maybeThenable);\n } else {\n if (then === undefined) {\n fulfill(promise, maybeThenable);\n } else if (isFunction(then)) {\n handleForeignThenable(promise, maybeThenable, then);\n } else {\n fulfill(promise, maybeThenable);\n }\n }\n}\n\nfunction resolve(promise, value) {\n if (promise === value) {\n reject(promise, selfFulfillment());\n } else if (objectOrFunction(value)) {\n var then = void 0;\n try {\n then = value.then;\n } catch (error) {\n reject(promise, error);\n return;\n }\n handleMaybeThenable(promise, value, then);\n } else {\n fulfill(promise, value);\n }\n}\n\nfunction publishRejection(promise) {\n if (promise._onerror) {\n promise._onerror(promise._result);\n }\n\n publish(promise);\n}\n\nfunction fulfill(promise, value) {\n if (promise._state !== PENDING) {\n return;\n }\n\n promise._result = value;\n promise._state = FULFILLED;\n\n if (promise._subscribers.length !== 0) {\n asap(publish, promise);\n }\n}\n\nfunction reject(promise, reason) {\n if (promise._state !== PENDING) {\n return;\n }\n promise._state = REJECTED;\n promise._result = reason;\n\n asap(publishRejection, promise);\n}\n\nfunction subscribe(parent, child, onFulfillment, onRejection) {\n var _subscribers = parent._subscribers;\n var length = _subscribers.length;\n\n\n parent._onerror = null;\n\n _subscribers[length] = child;\n _subscribers[length + FULFILLED] = onFulfillment;\n _subscribers[length + REJECTED] = onRejection;\n\n if (length === 0 && parent._state) {\n asap(publish, parent);\n }\n}\n\nfunction publish(promise) {\n var subscribers = promise._subscribers;\n var settled = promise._state;\n\n if (subscribers.length === 0) {\n return;\n }\n\n var child = void 0,\n callback = void 0,\n detail = promise._result;\n\n for (var i = 0; i < subscribers.length; i += 3) {\n child = subscribers[i];\n callback = subscribers[i + settled];\n\n if (child) {\n invokeCallback(settled, child, callback, detail);\n } else {\n callback(detail);\n }\n }\n\n promise._subscribers.length = 0;\n}\n\nfunction invokeCallback(settled, promise, callback, detail) {\n var hasCallback = isFunction(callback),\n value = void 0,\n error = void 0,\n succeeded = true;\n\n if (hasCallback) {\n try {\n value = callback(detail);\n } catch (e) {\n succeeded = false;\n error = e;\n }\n\n if (promise === value) {\n reject(promise, cannotReturnOwn());\n return;\n }\n } else {\n value = detail;\n }\n\n if (promise._state !== PENDING) {\n // noop\n } else if (hasCallback && succeeded) {\n resolve(promise, value);\n } else if (succeeded === false) {\n reject(promise, error);\n } else if (settled === FULFILLED) {\n fulfill(promise, value);\n } else if (settled === REJECTED) {\n reject(promise, value);\n }\n}\n\nfunction initializePromise(promise, resolver) {\n try {\n resolver(function resolvePromise(value) {\n resolve(promise, value);\n }, function rejectPromise(reason) {\n reject(promise, reason);\n });\n } catch (e) {\n reject(promise, e);\n }\n}\n\nvar id = 0;\nfunction nextId() {\n return id++;\n}\n\nfunction makePromise(promise) {\n promise[PROMISE_ID] = id++;\n promise._state = undefined;\n promise._result = undefined;\n promise._subscribers = [];\n}\n\nexport { nextId, makePromise, noop, resolve, reject, fulfill, subscribe, publish, publishRejection, initializePromise, invokeCallback, FULFILLED, REJECTED, PENDING, handleMaybeThenable };","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isArray, isMaybeThenable } from './utils';\nimport { noop, reject, fulfill, subscribe, FULFILLED, REJECTED, PENDING, handleMaybeThenable } from './-internal';\n\nimport then from './then';\nimport Promise from './promise';\nimport originalResolve from './promise/resolve';\nimport originalThen from './then';\nimport { makePromise, PROMISE_ID } from './-internal';\n\nfunction validationError() {\n return new Error('Array Methods must be provided an Array');\n};\n\nvar Enumerator = function () {\n function Enumerator(Constructor, input) {\n this._instanceConstructor = Constructor;\n this.promise = new Constructor(noop);\n\n if (!this.promise[PROMISE_ID]) {\n makePromise(this.promise);\n }\n\n if (isArray(input)) {\n this.length = input.length;\n this._remaining = input.length;\n\n this._result = new Array(this.length);\n\n if (this.length === 0) {\n fulfill(this.promise, this._result);\n } else {\n this.length = this.length || 0;\n this._enumerate(input);\n if (this._remaining === 0) {\n fulfill(this.promise, this._result);\n }\n }\n } else {\n reject(this.promise, validationError());\n }\n }\n\n Enumerator.prototype._enumerate = function _enumerate(input) {\n for (var i = 0; this._state === PENDING && i < input.length; i++) {\n this._eachEntry(input[i], i);\n }\n };\n\n Enumerator.prototype._eachEntry = function _eachEntry(entry, i) {\n var c = this._instanceConstructor;\n var resolve = c.resolve;\n\n\n if (resolve === originalResolve) {\n var _then = void 0;\n var error = void 0;\n var didError = false;\n try {\n _then = entry.then;\n } catch (e) {\n didError = true;\n error = e;\n }\n\n if (_then === originalThen && entry._state !== PENDING) {\n this._settledAt(entry._state, i, entry._result);\n } else if (typeof _then !== 'function') {\n this._remaining--;\n this._result[i] = entry;\n } else if (c === Promise) {\n var promise = new c(noop);\n if (didError) {\n reject(promise, error);\n } else {\n handleMaybeThenable(promise, entry, _then);\n }\n this._willSettleAt(promise, i);\n } else {\n this._willSettleAt(new c(function (resolve) {\n return resolve(entry);\n }), i);\n }\n } else {\n this._willSettleAt(resolve(entry), i);\n }\n };\n\n Enumerator.prototype._settledAt = function _settledAt(state, i, value) {\n var promise = this.promise;\n\n\n if (promise._state === PENDING) {\n this._remaining--;\n\n if (state === REJECTED) {\n reject(promise, value);\n } else {\n this._result[i] = value;\n }\n }\n\n if (this._remaining === 0) {\n fulfill(promise, this._result);\n }\n };\n\n Enumerator.prototype._willSettleAt = function _willSettleAt(promise, i) {\n var enumerator = this;\n\n subscribe(promise, undefined, function (value) {\n return enumerator._settledAt(FULFILLED, i, value);\n }, function (reason) {\n return enumerator._settledAt(REJECTED, i, reason);\n });\n };\n\n return Enumerator;\n}();\n\nexport default Enumerator;\n;","import Enumerator from '../enumerator';\n\n/**\n `Promise.all` accepts an array of promises, and returns a new promise which\n is fulfilled with an array of fulfillment values for the passed promises, or\n rejected with the reason of the first passed promise to be rejected. It casts all\n elements of the passed iterable to promises as it runs this algorithm.\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = resolve(2);\n let promise3 = resolve(3);\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // The array here would be [ 1, 2, 3 ];\n });\n ```\n\n If any of the `promises` given to `all` are rejected, the first promise\n that is rejected will be given as an argument to the returned promises's\n rejection handler. For example:\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = reject(new Error(\"2\"));\n let promise3 = reject(new Error(\"3\"));\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // Code here never runs because there are rejected promises!\n }, function(error) {\n // error.message === \"2\"\n });\n ```\n\n @method all\n @static\n @param {Array} entries array of promises\n @param {String} label optional string for labeling the promise.\n Useful for tooling.\n @return {Promise} promise that is fulfilled when all `promises` have been\n fulfilled, or rejected if any of them become rejected.\n @static\n*/\nexport default function all(entries) {\n return new Enumerator(this, entries).promise;\n}","import { isArray } from \"../utils\";\n\n/**\n `Promise.race` returns a new promise which is settled in the same way as the\n first passed promise to settle.\n\n Example:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 2');\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // result === 'promise 2' because it was resolved before promise1\n // was resolved.\n });\n ```\n\n `Promise.race` is deterministic in that only the state of the first\n settled promise matters. For example, even if other promises given to the\n `promises` array argument are resolved, but the first settled promise has\n become rejected before the other promises became fulfilled, the returned\n promise will become rejected:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n reject(new Error('promise 2'));\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // Code here never runs\n }, function(reason){\n // reason.message === 'promise 2' because promise 2 became rejected before\n // promise 1 became fulfilled\n });\n ```\n\n An example real-world use case is implementing timeouts:\n\n ```javascript\n Promise.race([ajax('foo.json'), timeout(5000)])\n ```\n\n @method race\n @static\n @param {Array} promises array of promises to observe\n Useful for tooling.\n @return {Promise} a promise which settles in the same way as the first passed\n promise to settle.\n*/\nexport default function race(entries) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (!isArray(entries)) {\n return new Constructor(function (_, reject) {\n return reject(new TypeError('You must pass an array to race.'));\n });\n } else {\n return new Constructor(function (resolve, reject) {\n var length = entries.length;\n for (var i = 0; i < length; i++) {\n Constructor.resolve(entries[i]).then(resolve, reject);\n }\n });\n }\n}","import { noop, reject as _reject } from '../-internal';\n\n/**\n `Promise.reject` returns a promise rejected with the passed `reason`.\n It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n reject(new Error('WHOOPS'));\n });\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.reject(new Error('WHOOPS'));\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n @method reject\n @static\n @param {Any} reason value that the returned promise will be rejected with.\n Useful for tooling.\n @return {Promise} a promise rejected with the given `reason`.\n*/\nexport default function reject(reason) {\n /*jshint validthis:true */\n var Constructor = this;\n var promise = new Constructor(noop);\n _reject(promise, reason);\n return promise;\n}","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isFunction } from './utils';\nimport { noop, nextId, PROMISE_ID, initializePromise } from './-internal';\nimport { asap, setAsap, setScheduler } from './asap';\n\nimport all from './promise/all';\nimport race from './promise/race';\nimport Resolve from './promise/resolve';\nimport Reject from './promise/reject';\nimport then from './then';\n\nfunction needsResolver() {\n throw new TypeError('You must pass a resolver function as the first argument to the promise constructor');\n}\n\nfunction needsNew() {\n throw new TypeError(\"Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.\");\n}\n\n/**\n Promise objects represent the eventual result of an asynchronous operation. The\n primary way of interacting with a promise is through its `then` method, which\n registers callbacks to receive either a promise's eventual value or the reason\n why the promise cannot be fulfilled.\n\n Terminology\n -----------\n\n - `promise` is an object or function with a `then` method whose behavior conforms to this specification.\n - `thenable` is an object or function that defines a `then` method.\n - `value` is any legal JavaScript value (including undefined, a thenable, or a promise).\n - `exception` is a value that is thrown using the throw statement.\n - `reason` is a value that indicates why a promise was rejected.\n - `settled` the final resting state of a promise, fulfilled or rejected.\n\n A promise can be in one of three states: pending, fulfilled, or rejected.\n\n Promises that are fulfilled have a fulfillment value and are in the fulfilled\n state. Promises that are rejected have a rejection reason and are in the\n rejected state. A fulfillment value is never a thenable.\n\n Promises can also be said to *resolve* a value. If this value is also a\n promise, then the original promise's settled state will match the value's\n settled state. So a promise that *resolves* a promise that rejects will\n itself reject, and a promise that *resolves* a promise that fulfills will\n itself fulfill.\n\n\n Basic Usage:\n ------------\n\n ```js\n let promise = new Promise(function(resolve, reject) {\n // on success\n resolve(value);\n\n // on failure\n reject(reason);\n });\n\n promise.then(function(value) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Advanced Usage:\n ---------------\n\n Promises shine when abstracting away asynchronous interactions such as\n `XMLHttpRequest`s.\n\n ```js\n function getJSON(url) {\n return new Promise(function(resolve, reject){\n let xhr = new XMLHttpRequest();\n\n xhr.open('GET', url);\n xhr.onreadystatechange = handler;\n xhr.responseType = 'json';\n xhr.setRequestHeader('Accept', 'application/json');\n xhr.send();\n\n function handler() {\n if (this.readyState === this.DONE) {\n if (this.status === 200) {\n resolve(this.response);\n } else {\n reject(new Error('getJSON: `' + url + '` failed with status: [' + this.status + ']'));\n }\n }\n };\n });\n }\n\n getJSON('/posts.json').then(function(json) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Unlike callbacks, promises are great composable primitives.\n\n ```js\n Promise.all([\n getJSON('/posts'),\n getJSON('/comments')\n ]).then(function(values){\n values[0] // => postsJSON\n values[1] // => commentsJSON\n\n return values;\n });\n ```\n\n @class Promise\n @param {Function} resolver\n Useful for tooling.\n @constructor\n*/\n\nvar Promise = function () {\n function Promise(resolver) {\n this[PROMISE_ID] = nextId();\n this._result = this._state = undefined;\n this._subscribers = [];\n\n if (noop !== resolver) {\n typeof resolver !== 'function' && needsResolver();\n this instanceof Promise ? initializePromise(this, resolver) : needsNew();\n }\n }\n\n /**\n The primary way of interacting with a promise is through its `then` method,\n which registers callbacks to receive either a promise's eventual value or the\n reason why the promise cannot be fulfilled.\n ```js\n findUser().then(function(user){\n // user is available\n }, function(reason){\n // user is unavailable, and you are given the reason why\n });\n ```\n Chaining\n --------\n The return value of `then` is itself a promise. This second, 'downstream'\n promise is resolved with the return value of the first promise's fulfillment\n or rejection handler, or rejected if the handler throws an exception.\n ```js\n findUser().then(function (user) {\n return user.name;\n }, function (reason) {\n return 'default name';\n }).then(function (userName) {\n // If `findUser` fulfilled, `userName` will be the user's name, otherwise it\n // will be `'default name'`\n });\n findUser().then(function (user) {\n throw new Error('Found user, but still unhappy');\n }, function (reason) {\n throw new Error('`findUser` rejected and we're unhappy');\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.\n // If `findUser` rejected, `reason` will be '`findUser` rejected and we're unhappy'.\n });\n ```\n If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.\n ```js\n findUser().then(function (user) {\n throw new PedagogicalException('Upstream error');\n }).then(function (value) {\n // never reached\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // The `PedgagocialException` is propagated all the way down to here\n });\n ```\n Assimilation\n ------------\n Sometimes the value you want to propagate to a downstream promise can only be\n retrieved asynchronously. This can be achieved by returning a promise in the\n fulfillment or rejection handler. The downstream promise will then be pending\n until the returned promise is settled. This is called *assimilation*.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // The user's comments are now available\n });\n ```\n If the assimliated promise rejects, then the downstream promise will also reject.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // If `findCommentsByAuthor` fulfills, we'll have the value here\n }, function (reason) {\n // If `findCommentsByAuthor` rejects, we'll have the reason here\n });\n ```\n Simple Example\n --------------\n Synchronous Example\n ```javascript\n let result;\n try {\n result = findResult();\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n findResult(function(result, err){\n if (err) {\n // failure\n } else {\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findResult().then(function(result){\n // success\n }, function(reason){\n // failure\n });\n ```\n Advanced Example\n --------------\n Synchronous Example\n ```javascript\n let author, books;\n try {\n author = findAuthor();\n books = findBooksByAuthor(author);\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n function foundBooks(books) {\n }\n function failure(reason) {\n }\n findAuthor(function(author, err){\n if (err) {\n failure(err);\n // failure\n } else {\n try {\n findBoooksByAuthor(author, function(books, err) {\n if (err) {\n failure(err);\n } else {\n try {\n foundBooks(books);\n } catch(reason) {\n failure(reason);\n }\n }\n });\n } catch(error) {\n failure(err);\n }\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findAuthor().\n then(findBooksByAuthor).\n then(function(books){\n // found books\n }).catch(function(reason){\n // something went wrong\n });\n ```\n @method then\n @param {Function} onFulfilled\n @param {Function} onRejected\n Useful for tooling.\n @return {Promise}\n */\n\n /**\n `catch` is simply sugar for `then(undefined, onRejection)` which makes it the same\n as the catch block of a try/catch statement.\n ```js\n function findAuthor(){\n throw new Error('couldn't find that author');\n }\n // synchronous\n try {\n findAuthor();\n } catch(reason) {\n // something went wrong\n }\n // async with promises\n findAuthor().catch(function(reason){\n // something went wrong\n });\n ```\n @method catch\n @param {Function} onRejection\n Useful for tooling.\n @return {Promise}\n */\n\n\n Promise.prototype.catch = function _catch(onRejection) {\n return this.then(null, onRejection);\n };\n\n /**\n `finally` will be invoked regardless of the promise's fate just as native\n try/catch/finally behaves\n \n Synchronous example:\n \n ```js\n findAuthor() {\n if (Math.random() > 0.5) {\n throw new Error();\n }\n return new Author();\n }\n \n try {\n return findAuthor(); // succeed or fail\n } catch(error) {\n return findOtherAuther();\n } finally {\n // always runs\n // doesn't affect the return value\n }\n ```\n \n Asynchronous example:\n \n ```js\n findAuthor().catch(function(reason){\n return findOtherAuther();\n }).finally(function(){\n // author was either found, or not\n });\n ```\n \n @method finally\n @param {Function} callback\n @return {Promise}\n */\n\n\n Promise.prototype.finally = function _finally(callback) {\n var promise = this;\n var constructor = promise.constructor;\n\n if (isFunction(callback)) {\n return promise.then(function (value) {\n return constructor.resolve(callback()).then(function () {\n return value;\n });\n }, function (reason) {\n return constructor.resolve(callback()).then(function () {\n throw reason;\n });\n });\n }\n\n return promise.then(callback, callback);\n };\n\n return Promise;\n}();\n\nPromise.prototype.then = then;\nexport default Promise;\nPromise.all = all;\nPromise.race = race;\nPromise.resolve = Resolve;\nPromise.reject = Reject;\nPromise._setScheduler = setScheduler;\nPromise._setAsap = setAsap;\nPromise._asap = asap;","/*global self*/\nimport Promise from './promise';\n\nexport default function polyfill() {\n var local = void 0;\n\n if (typeof global !== 'undefined') {\n local = global;\n } else if (typeof self !== 'undefined') {\n local = self;\n } else {\n try {\n local = Function('return this')();\n } catch (e) {\n throw new Error('polyfill failed because global object is unavailable in this environment');\n }\n }\n\n var P = local.Promise;\n\n if (P) {\n var promiseToString = null;\n try {\n promiseToString = Object.prototype.toString.call(P.resolve());\n } catch (e) {\n // silently ignored\n }\n\n if (promiseToString === '[object Promise]' && !P.cast) {\n return;\n }\n }\n\n local.Promise = Promise;\n}","import Promise from './es6-promise/promise';\nimport polyfill from './es6-promise/polyfill';\n\n// Strange compat..\nPromise.polyfill = polyfill;\nPromise.Promise = Promise;\nexport default Promise;"],"names":["resolve","_resolve","then","originalThen","originalResolve","Promise","reject","_reject","Resolve","Reject"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACNO,SAAS,gBAAgB,CAAC,CAAC,EAAE;EAClC,IAAI,IAAI,GAAG,OAAO,CAAC,CAAC;EACpB,OAAO,CAAC,KAAK,IAAI,KAAK,IAAI,KAAK,QAAQ,IAAI,IAAI,KAAK,UAAU,CAAC,CAAC;CACjE;;AAED,AAAO,SAAS,UAAU,CAAC,CAAC,EAAE;EAC5B,OAAO,OAAO,CAAC,KAAK,UAAU,CAAC;CAChC;;AAED,AAEC;;AAED,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC;AACtB,IAAI,KAAK,CAAC,OAAO,EAAE;EACjB,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC;CAC1B,MAAM;EACL,QAAQ,GAAG,UAAU,CAAC,EAAE;IACtB,OAAO,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,gBAAgB,CAAC;GAC/D,CAAC;CACH;;AAED,AAAO,IAAI,OAAO,GAAG,QAAQ;;ACtB7B,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAI,SAAS,GAAG,KAAK,CAAC,CAAC;AACvB,IAAI,iBAAiB,GAAG,KAAK,CAAC,CAAC;;AAE/B,AAAO,IAAI,IAAI,GAAG,SAAS,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;EAC7C,KAAK,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC;EACtB,KAAK,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;EACrB,GAAG,IAAI,CAAC,CAAC;EACT,IAAI,GAAG,KAAK,CAAC,EAAE;;;;IAIb,IAAI,iBAAiB,EAAE;MACrB,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAC1B,MAAM;MACL,aAAa,EAAE,CAAC;KACjB;GACF;CACF,CAAC;;AAEF,AAAO,SAAS,YAAY,CAAC,UAAU,EAAE;EACvC,iBAAiB,GAAG,UAAU,CAAC;CAChC;;AAED,AAAO,SAAS,OAAO,CAAC,MAAM,EAAE;EAC9B,IAAI,GAAG,MAAM,CAAC;CACf;;AAED,IAAI,aAAa,GAAG,OAAO,MAAM,KAAK,WAAW,GAAG,MAAM,GAAG,SAAS,CAAC;AACvE,IAAI,aAAa,GAAG,aAAa,IAAI,EAAE,CAAC;AACxC,IAAI,uBAAuB,GAAG,aAAa,CAAC,gBAAgB,IAAI,aAAa,CAAC,sBAAsB,CAAC;AACrG,IAAI,MAAM,GAAG,OAAO,IAAI,KAAK,WAAW,IAAI,OAAO,OAAO,KAAK,WAAW,IAAI,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,kBAAkB,CAAC;;;AAG/H,IAAI,QAAQ,GAAG,OAAO,iBAAiB,KAAK,WAAW,IAAI,OAAO,aAAa,KAAK,WAAW,IAAI,OAAO,cAAc,KAAK,WAAW,CAAC;;;AAGzI,SAAS,WAAW,GAAG;;;EAGrB,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;GAChC,CAAC;CACH;;;AAGD,SAAS,aAAa,GAAG;EACvB,IAAI,OAAO,SAAS,KAAK,WAAW,EAAE;IACpC,OAAO,YAAY;MACjB,SAAS,CAAC,KAAK,CAAC,CAAC;KAClB,CAAC;GACH;;EAED,OAAO,aAAa,EAAE,CAAC;CACxB;;AAED,SAAS,mBAAmB,GAAG;EAC7B,IAAI,UAAU,GAAG,CAAC,CAAC;EACnB,IAAI,QAAQ,GAAG,IAAI,uBAAuB,CAAC,KAAK,CAAC,CAAC;EAClD,IAAI,IAAI,GAAG,QAAQ,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC;EACvC,QAAQ,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;;EAEhD,OAAO,YAAY;IACjB,IAAI,CAAC,IAAI,GAAG,UAAU,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC;GAC3C,CAAC;CACH;;;AAGD,SAAS,iBAAiB,GAAG;EAC3B,IAAI,OAAO,GAAG,IAAI,cAAc,EAAE,CAAC;EACnC,OAAO,CAAC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC;EAChC,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;GACrC,CAAC;CACH;;AAED,SAAS,aAAa,GAAG;;;EAGvB,IAAI,gBAAgB,GAAG,UAAU,CAAC;EAClC,OAAO,YAAY;IACjB,OAAO,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;GACnC,CAAC;CACH;;AAED,IAAI,KAAK,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC;AAC5B,SAAS,KAAK,GAAG;EACf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE;IAC/B,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,GAAG,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;;IAEvB,QAAQ,CAAC,GAAG,CAAC,CAAC;;IAEd,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;IACrB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;GAC1B;;EAED,GAAG,GAAG,CAAC,CAAC;CACT;;AAED,SAAS,YAAY,GAAG;EACtB,IAAI;IACF,IAAI,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;IACvD,SAAS,GAAG,KAAK,CAAC,SAAS,IAAI,KAAK,CAAC,YAAY,CAAC;IAClD,OAAO,aAAa,EAAE,CAAC;GACxB,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,aAAa,EAAE,CAAC;GACxB;CACF;;AAED,IAAI,aAAa,GAAG,KAAK,CAAC,CAAC;;AAE3B,IAAI,MAAM,EAAE;EACV,aAAa,GAAG,WAAW,EAAE,CAAC;CAC/B,MAAM,IAAI,uBAAuB,EAAE;EAClC,aAAa,GAAG,mBAAmB,EAAE,CAAC;CACvC,MAAM,IAAI,QAAQ,EAAE;EACnB,aAAa,GAAG,iBAAiB,EAAE,CAAC;CACrC,MAAM,IAAI,aAAa,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;EACvE,aAAa,GAAG,YAAY,EAAE,CAAC;CAChC,MAAM;EACL,aAAa,GAAG,aAAa,EAAE,CAAC;;;CACjC,DCtHc,SAAS,IAAI,CAAC,aAAa,EAAE,WAAW,EAAE;EACvD,IAAI,MAAM,GAAG,IAAI,CAAC;;EAElB,IAAI,KAAK,GAAG,IAAI,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;;EAEvC,IAAI,KAAK,CAAC,UAAU,CAAC,KAAK,SAAS,EAAE;IACnC,WAAW,CAAC,KAAK,CAAC,CAAC;GACpB;;EAED,IAAI,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;;;EAG3B,IAAI,MAAM,EAAE;IACV,IAAI,QAAQ,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IACrC,IAAI,CAAC,YAAY;MACf,OAAO,cAAc,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;KAChE,CAAC,CAAC;GACJ,MAAM;IACL,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,CAAC,CAAC;GACtD;;EAED,OAAO,KAAK,CAAC;;;CACd,DCxBD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+BA,AAAe,SAASA,SAAO,CAAC,MAAM,EAAE;;EAEtC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,CAAC,WAAW,KAAK,WAAW,EAAE;IAC9E,OAAO,MAAM,CAAC;GACf;;EAED,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,OAAQ,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EAC1B,OAAO,OAAO,CAAC;;;CAChB,DCrCM,IAAI,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;;AAEhE,SAAS,IAAI,GAAG,EAAE;;AAElB,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC;AACrB,IAAI,SAAS,GAAG,CAAC,CAAC;AAClB,IAAI,QAAQ,GAAG,CAAC,CAAC;;AAEjB,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,0CAA0C,CAAC,CAAC;CAClE;;AAED,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,sDAAsD,CAAC,CAAC;CAC9E;;AAED,SAAS,OAAO,CAACC,OAAI,EAAE,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,EAAE;EAClE,IAAI;IACFA,OAAI,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;GACxD,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,CAAC;GACV;CACF;;AAED,SAAS,qBAAqB,CAAC,OAAO,EAAE,QAAQ,EAAEA,OAAI,EAAE;EACtD,IAAI,CAAC,UAAU,OAAO,EAAE;IACtB,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,IAAI,KAAK,GAAG,OAAO,CAACA,OAAI,EAAE,QAAQ,EAAE,UAAU,KAAK,EAAE;MACnD,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;MACd,IAAI,QAAQ,KAAK,KAAK,EAAE;QACtB,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB,MAAM;QACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB;KACF,EAAE,UAAU,MAAM,EAAE;MACnB,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;;MAEd,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,EAAE,UAAU,IAAI,OAAO,CAAC,MAAM,IAAI,kBAAkB,CAAC,CAAC,CAAC;;IAExD,IAAI,CAAC,MAAM,IAAI,KAAK,EAAE;MACpB,MAAM,GAAG,IAAI,CAAC;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACxB;GACF,EAAE,OAAO,CAAC,CAAC;CACb;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI,QAAQ,CAAC,MAAM,KAAK,SAAS,EAAE;IACjC,OAAO,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACpC,MAAM,IAAI,QAAQ,CAAC,MAAM,KAAK,QAAQ,EAAE;IACvC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACnC,MAAM;IACL,SAAS,CAAC,QAAQ,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC9C,OAAO,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KAChC,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KAChC,CAAC,CAAC;GACJ;CACF;;AAED,SAAS,mBAAmB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,EAAE;EACzD,IAAI,aAAa,CAAC,WAAW,KAAK,OAAO,CAAC,WAAW,IAAIA,OAAI,KAAKC,IAAY,IAAI,aAAa,CAAC,WAAW,CAAC,OAAO,KAAKC,SAAe,EAAE;IACvI,iBAAiB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;GAC3C,MAAM;IACL,IAAIF,OAAI,KAAK,SAAS,EAAE;MACtB,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC,MAAM,IAAI,UAAU,CAACA,OAAI,CAAC,EAAE;MAC3B,qBAAqB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,CAAC,CAAC;KACrD,MAAM;MACL,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC;GACF;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,KAAK,KAAK,EAAE;IACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;GACpC,MAAM,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;IAClC,IAAIA,OAAI,GAAG,KAAK,CAAC,CAAC;IAClB,IAAI;MACFA,OAAI,GAAG,KAAK,CAAC,IAAI,CAAC;KACnB,CAAC,OAAO,KAAK,EAAE;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;MACvB,OAAO;KACR;IACD,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAEA,OAAI,CAAC,CAAC;GAC3C,MAAM;IACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB;CACF;;AAED,SAAS,gBAAgB,CAAC,OAAO,EAAE;EACjC,IAAI,OAAO,CAAC,QAAQ,EAAE;IACpB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;GACnC;;EAED,OAAO,CAAC,OAAO,CAAC,CAAC;CAClB;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;;EAED,OAAO,CAAC,OAAO,GAAG,KAAK,CAAC;EACxB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;;EAE3B,IAAI,OAAO,CAAC,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;IACrC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;EACD,OAAO,CAAC,MAAM,GAAG,QAAQ,CAAC;EAC1B,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC;;EAEzB,IAAI,CAAC,gBAAgB,EAAE,OAAO,CAAC,CAAC;CACjC;;AAED,SAAS,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,EAAE;EAC5D,IAAI,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;EACvC,IAAI,MAAM,GAAG,YAAY,CAAC,MAAM,CAAC;;;EAGjC,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC;;EAEvB,YAAY,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC;EAC7B,YAAY,CAAC,MAAM,GAAG,SAAS,CAAC,GAAG,aAAa,CAAC;EACjD,YAAY,CAAC,MAAM,GAAG,QAAQ,CAAC,GAAG,WAAW,CAAC;;EAE9C,IAAI,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,MAAM,EAAE;IACjC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;GACvB;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE;EACxB,IAAI,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;EACvC,IAAI,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC;;EAE7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;IAC5B,OAAO;GACR;;EAED,IAAI,KAAK,GAAG,KAAK,CAAC;MACd,QAAQ,GAAG,KAAK,CAAC;MACjB,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;;EAE7B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,IAAI,CAAC,EAAE;IAC9C,KAAK,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;IACvB,QAAQ,GAAG,WAAW,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC;;IAEpC,IAAI,KAAK,EAAE;MACT,cAAc,CAAC,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;KAClD,MAAM;MACL,QAAQ,CAAC,MAAM,CAAC,CAAC;KAClB;GACF;;EAED,OAAO,CAAC,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC;CACjC;;AAED,SAAS,cAAc,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE;EAC1D,IAAI,WAAW,GAAG,UAAU,CAAC,QAAQ,CAAC;MAClC,KAAK,GAAG,KAAK,CAAC;MACd,KAAK,GAAG,KAAK,CAAC;MACd,SAAS,GAAG,IAAI,CAAC;;EAErB,IAAI,WAAW,EAAE;IACf,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC;KAC1B,CAAC,OAAO,CAAC,EAAE;MACV,SAAS,GAAG,KAAK,CAAC;MAClB,KAAK,GAAG,CAAC,CAAC;KACX;;IAED,IAAI,OAAO,KAAK,KAAK,EAAE;MACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;MACnC,OAAO;KACR;GACF,MAAM;IACL,KAAK,GAAG,MAAM,CAAC;GAChB;;EAED,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;;GAE/B,MAAM,IAAI,WAAW,IAAI,SAAS,EAAE;IACnC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,SAAS,KAAK,KAAK,EAAE;IAC9B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB,MAAM,IAAI,OAAO,KAAK,SAAS,EAAE;IAChC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,OAAO,KAAK,QAAQ,EAAE;IAC/B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI;IACF,QAAQ,CAAC,SAAS,cAAc,CAAC,KAAK,EAAE;MACtC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACzB,EAAE,SAAS,aAAa,CAAC,MAAM,EAAE;MAChC,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,CAAC,CAAC;GACJ,CAAC,OAAO,CAAC,EAAE;IACV,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;GACpB;CACF;;AAED,IAAI,EAAE,GAAG,CAAC,CAAC;AACX,SAAS,MAAM,GAAG;EAChB,OAAO,EAAE,EAAE,CAAC;CACb;;AAED,SAAS,WAAW,CAAC,OAAO,EAAE;EAC5B,OAAO,CAAC,UAAU,CAAC,GAAG,EAAE,EAAE,CAAC;EAC3B,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;EAC3B,OAAO,CAAC,OAAO,GAAG,SAAS,CAAC;EAC5B,OAAO,CAAC,YAAY,GAAG,EAAE,CAAC;CAC3B;;AChOD,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;CAC7D,AAAC;;AAEF,IAAI,UAAU,GAAG,YAAY;EAC3B,SAAS,UAAU,CAAC,WAAW,EAAE,KAAK,EAAE;IACtC,IAAI,CAAC,oBAAoB,GAAG,WAAW,CAAC;IACxC,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;;IAErC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;MAC7B,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;KAC3B;;IAED,IAAI,OAAO,CAAC,KAAK,CAAC,EAAE;MAClB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;MAC3B,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,MAAM,CAAC;;MAE/B,IAAI,CAAC,OAAO,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;;MAEtC,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACrB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;OACrC,MAAM;QACL,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QACvB,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;UACzB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;SACrC;OACF;KACF,MAAM;MACL,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;KACzC;GACF;;EAED,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE;IAC3D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,KAAK,OAAO,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;MAChE,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC9B;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE;IAC9D,IAAI,CAAC,GAAG,IAAI,CAAC,oBAAoB,CAAC;IAClC,IAAIF,UAAO,GAAG,CAAC,CAAC,OAAO,CAAC;;;IAGxB,IAAIA,UAAO,KAAKI,SAAe,EAAE;MAC/B,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;MACnB,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;MACnB,IAAI,QAAQ,GAAG,KAAK,CAAC;MACrB,IAAI;QACF,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC;OACpB,CAAC,OAAO,CAAC,EAAE;QACV,QAAQ,GAAG,IAAI,CAAC;QAChB,KAAK,GAAG,CAAC,CAAC;OACX;;MAED,IAAI,KAAK,KAAKD,IAAY,IAAI,KAAK,CAAC,MAAM,KAAK,OAAO,EAAE;QACtD,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;OACjD,MAAM,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;QACtC,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB,MAAM,IAAI,CAAC,KAAKE,SAAO,EAAE;QACxB,IAAI,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC;QAC1B,IAAI,QAAQ,EAAE;UACZ,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;SACxB,MAAM;UACL,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;SAC5C;QACD,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;OAChC,MAAM;QACL,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,UAAUL,UAAO,EAAE;UAC1C,OAAOA,UAAO,CAAC,KAAK,CAAC,CAAC;SACvB,CAAC,EAAE,CAAC,CAAC,CAAC;OACR;KACF,MAAM;MACL,IAAI,CAAC,aAAa,CAACA,UAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;KACvC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE,KAAK,EAAE;IACrE,IAAI,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;;;IAG3B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;MAC9B,IAAI,CAAC,UAAU,EAAE,CAAC;;MAElB,IAAI,KAAK,KAAK,QAAQ,EAAE;QACtB,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACxB,MAAM;QACL,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB;KACF;;IAED,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;MACzB,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;KAChC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,aAAa,GAAG,SAAS,aAAa,CAAC,OAAO,EAAE,CAAC,EAAE;IACtE,IAAI,UAAU,GAAG,IAAI,CAAC;;IAEtB,SAAS,CAAC,OAAO,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC7C,OAAO,UAAU,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC;KACnD,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,UAAU,CAAC,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;KACnD,CAAC,CAAC;GACJ,CAAC;;EAEF,OAAO,UAAU,CAAC;CACnB,EAAE;;ACrHH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+CA,AAAe,SAAS,GAAG,CAAC,OAAO,EAAE;EACnC,OAAO,IAAI,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,OAAO,CAAC;;;CAC9C,DCjDD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiEA,AAAe,SAAS,IAAI,CAAC,OAAO,EAAE;;EAEpC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;IACrB,OAAO,IAAI,WAAW,CAAC,UAAU,CAAC,EAAE,MAAM,EAAE;MAC1C,OAAO,MAAM,CAAC,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAC,CAAC;KACjE,CAAC,CAAC;GACJ,MAAM;IACL,OAAO,IAAI,WAAW,CAAC,UAAU,OAAO,EAAE,MAAM,EAAE;MAChD,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;MAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;QAC/B,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;OACvD;KACF,CAAC,CAAC;GACJ;;;CACF,DCjFD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCA,AAAe,SAASM,QAAM,CAAC,MAAM,EAAE;;EAErC,IAAI,WAAW,GAAG,IAAI,CAAC;EACvB,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,MAAO,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EACzB,OAAO,OAAO,CAAC;;;CAChB,DC9BD,SAAS,aAAa,GAAG;EACvB,MAAM,IAAI,SAAS,CAAC,oFAAoF,CAAC,CAAC;CAC3G;;AAED,SAAS,QAAQ,GAAG;EAClB,MAAM,IAAI,SAAS,CAAC,uHAAuH,CAAC,CAAC;CAC9I;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0GD,IAAIF,SAAO,GAAG,YAAY;EACxB,SAAS,OAAO,CAAC,QAAQ,EAAE;IACzB,IAAI,CAAC,UAAU,CAAC,GAAG,MAAM,EAAE,CAAC;IAC5B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;IACvC,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;;IAEvB,IAAI,IAAI,KAAK,QAAQ,EAAE;MACrB,OAAO,QAAQ,KAAK,UAAU,IAAI,aAAa,EAAE,CAAC;MAClD,IAAI,YAAY,OAAO,GAAG,iBAAiB,CAAC,IAAI,EAAE,QAAQ,CAAC,GAAG,QAAQ,EAAE,CAAC;KAC1E;GACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA4LD,OAAO,CAAC,SAAS,CAAC,KAAK,GAAG,SAAS,MAAM,CAAC,WAAW,EAAE;IACrD,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;GACrC,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA0CF,OAAO,CAAC,SAAS,CAAC,OAAO,GAAG,SAAS,QAAQ,CAAC,QAAQ,EAAE;IACtD,IAAI,OAAO,GAAG,IAAI,CAAC;IACnB,IAAI,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;;IAEtC,IAAI,UAAU,CAAC,QAAQ,CAAC,EAAE;MACxB,OAAO,OAAO,CAAC,IAAI,CAAC,UAAU,KAAK,EAAE;QACnC,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;UACtD,OAAO,KAAK,CAAC;SACd,CAAC,CAAC;OACJ,EAAE,UAAU,MAAM,EAAE;QACnB,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;UACtD,MAAM,MAAM,CAAC;SACd,CAAC,CAAC;OACJ,CAAC,CAAC;KACJ;;IAED,OAAO,OAAO,CAAC,IAAI,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;GACzC,CAAC;;EAEF,OAAO,OAAO,CAAC;CAChB,EAAE,CAAC;;AAEJA,SAAO,CAAC,SAAS,CAAC,IAAI,GAAG,IAAI,CAAC;AAC9B,AACAA,SAAO,CAAC,GAAG,GAAG,GAAG,CAAC;AAClBA,SAAO,CAAC,IAAI,GAAG,IAAI,CAAC;AACpBA,SAAO,CAAC,OAAO,GAAGG,SAAO,CAAC;AAC1BH,SAAO,CAAC,MAAM,GAAGI,QAAM,CAAC;AACxBJ,SAAO,CAAC,aAAa,GAAG,YAAY,CAAC;AACrCA,SAAO,CAAC,QAAQ,GAAG,OAAO,CAAC;AAC3BA,SAAO,CAAC,KAAK,GAAG,IAAI;;AC5YpB;AACA,AAEe,SAAS,QAAQ,GAAG;EACjC,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;;EAEnB,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;IACjC,KAAK,GAAG,MAAM,CAAC;GAChB,MAAM,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;IACtC,KAAK,GAAG,IAAI,CAAC;GACd,MAAM;IACL,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC;KACnC,CAAC,OAAO,CAAC,EAAE;MACV,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;KAC7F;GACF;;EAED,IAAI,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC;;EAEtB,IAAI,CAAC,EAAE;IACL,IAAI,eAAe,GAAG,IAAI,CAAC;IAC3B,IAAI;MACF,eAAe,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KAC/D,CAAC,OAAO,CAAC,EAAE;;KAEX;;IAED,IAAI,eAAe,KAAK,kBAAkB,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE;MACrD,OAAO;KACR;GACF;;EAED,KAAK,CAAC,OAAO,GAAGA,SAAO,CAAC;;;CACzB,DC/BD;AACAA,SAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC;AAC5BA,SAAO,CAAC,OAAO,GAAGA,SAAO,CAAC;;;;;;;;","file":"es6-promise.min.js"} \ No newline at end of file diff --git a/node_modules/es6-promise/es6-promise.d.ts b/node_modules/es6-promise/es6-promise.d.ts index cce5b36071e6c..e4200dfd0d0f6 100644 --- a/node_modules/es6-promise/es6-promise.d.ts +++ b/node_modules/es6-promise/es6-promise.d.ts @@ -34,11 +34,15 @@ export class Promise implements Thenable { catch (onRejected?: (error: any) => U | Thenable): Promise; /** - * onSettled is invoked when/if the "promise" settles (either rejects or fulfills); + * onSettled is invoked when/if the "promise" settles (either rejects or fulfills). + * The returned promise is settled when the `Thenable` returned by `onFinally` settles; + * it is rejected if `onFinally` throws or rejects; otherwise it assumes the state of the + * original Promise. * * @param onFinally called when/if "promise" settles + */ - finally (onFinally?: (callback: any) => U | Thenable): Promise; + finally (onFinally?: () => any | Thenable): Promise; /** * Make a new promise from the thenable. diff --git a/node_modules/es6-promise/lib/es6-promise/-internal.js b/node_modules/es6-promise/lib/es6-promise/-internal.js index 925776f55f07a..6bd75a82c309d 100644 --- a/node_modules/es6-promise/lib/es6-promise/-internal.js +++ b/node_modules/es6-promise/lib/es6-promise/-internal.js @@ -18,8 +18,6 @@ const PENDING = void 0; const FULFILLED = 1; const REJECTED = 2; -const TRY_CATCH_ERROR = { error: null }; - function selfFulfillment() { return new TypeError("You cannot resolve a promise with itself"); } @@ -28,15 +26,6 @@ function cannotReturnOwn() { return new TypeError('A promises callback cannot return that same promise.'); } -function getThen(promise) { - try { - return promise.then; - } catch(error) { - TRY_CATCH_ERROR.error = error; - return TRY_CATCH_ERROR; - } -} - function tryThen(then, value, fulfillmentHandler, rejectionHandler) { try { then.call(value, fulfillmentHandler, rejectionHandler); @@ -47,8 +36,8 @@ function tryThen(then, value, fulfillmentHandler, rejectionHandler) { function handleForeignThenable(promise, thenable, then) { asap(promise => { - var sealed = false; - var error = tryThen(then, thenable, value => { + let sealed = false; + let error = tryThen(then, thenable, value => { if (sealed) { return; } sealed = true; if (thenable !== value) { @@ -87,10 +76,7 @@ function handleMaybeThenable(promise, maybeThenable, then) { maybeThenable.constructor.resolve === originalResolve) { handleOwnThenable(promise, maybeThenable); } else { - if (then === TRY_CATCH_ERROR) { - reject(promise, TRY_CATCH_ERROR.error); - TRY_CATCH_ERROR.error = null; - } else if (then === undefined) { + if (then === undefined) { fulfill(promise, maybeThenable); } else if (isFunction(then)) { handleForeignThenable(promise, maybeThenable, then); @@ -104,7 +90,14 @@ function resolve(promise, value) { if (promise === value) { reject(promise, selfFulfillment()); } else if (objectOrFunction(value)) { - handleMaybeThenable(promise, value, getThen(value)); + let then; + try { + then = value.then; + } catch (error) { + reject(promise, error); + return; + } + handleMaybeThenable(promise, value, then); } else { fulfill(promise, value); } @@ -174,46 +167,31 @@ function publish(promise) { promise._subscribers.length = 0; } - -function tryCatch(callback, detail) { - try { - return callback(detail); - } catch(e) { - TRY_CATCH_ERROR.error = e; - return TRY_CATCH_ERROR; - } -} - function invokeCallback(settled, promise, callback, detail) { let hasCallback = isFunction(callback), - value, error, succeeded, failed; + value, error, succeeded = true; if (hasCallback) { - value = tryCatch(callback, detail); - - if (value === TRY_CATCH_ERROR) { - failed = true; - error = value.error; - value.error = null; - } else { - succeeded = true; + try { + value = callback(detail); + } catch (e) { + succeeded = false; + error = e; } if (promise === value) { reject(promise, cannotReturnOwn()); return; } - } else { value = detail; - succeeded = true; } if (promise._state !== PENDING) { // noop } else if (hasCallback && succeeded) { resolve(promise, value); - } else if (failed) { + } else if (succeeded === false) { reject(promise, error); } else if (settled === FULFILLED) { fulfill(promise, value); @@ -249,7 +227,6 @@ function makePromise(promise) { export { nextId, makePromise, - getThen, noop, resolve, reject, diff --git a/node_modules/es6-promise/lib/es6-promise/enumerator.js b/node_modules/es6-promise/lib/es6-promise/enumerator.js index b0eaa9300eb06..be2e0938a9987 100644 --- a/node_modules/es6-promise/lib/es6-promise/enumerator.js +++ b/node_modules/es6-promise/lib/es6-promise/enumerator.js @@ -10,7 +10,6 @@ import { FULFILLED, REJECTED, PENDING, - getThen, handleMaybeThenable } from './-internal'; @@ -63,7 +62,15 @@ export default class Enumerator { let { resolve } = c; if (resolve === originalResolve) { - let then = getThen(entry); + let then; + let error; + let didError = false; + try { + then = entry.then; + } catch (e) { + didError = true; + error = e; + } if (then === originalThen && entry._state !== PENDING) { @@ -73,7 +80,11 @@ export default class Enumerator { this._result[i] = entry; } else if (c === Promise) { let promise = new c(noop); - handleMaybeThenable(promise, entry, then); + if (didError) { + reject(promise, error); + } else { + handleMaybeThenable(promise, entry, then); + } this._willSettleAt(promise, i); } else { this._willSettleAt(new c(resolve => resolve(entry)), i); diff --git a/node_modules/es6-promise/lib/es6-promise/promise.js b/node_modules/es6-promise/lib/es6-promise/promise.js index 8c21b6f2b3ab4..ae1703638d710 100644 --- a/node_modules/es6-promise/lib/es6-promise/promise.js +++ b/node_modules/es6-promise/lib/es6-promise/promise.js @@ -410,8 +410,12 @@ class Promise { let promise = this; let constructor = promise.constructor; - return promise.then(value => constructor.resolve(callback()).then(() => value), - reason => constructor.resolve(callback()).then(() => { throw reason; })); + if ( isFunction(callback) ) { + return promise.then(value => constructor.resolve(callback()).then(() => value), + reason => constructor.resolve(callback()).then(() => { throw reason; })); + } + + return promise.then(callback, callback); } } diff --git a/node_modules/es6-promise/package.json b/node_modules/es6-promise/package.json index 5fe4bcbbfb329..9095197841815 100644 --- a/node_modules/es6-promise/package.json +++ b/node_modules/es6-promise/package.json @@ -1,8 +1,8 @@ { "_from": "es6-promise@^4.0.3", - "_id": "es6-promise@4.2.4", + "_id": "es6-promise@4.2.8", "_inBundle": false, - "_integrity": "sha512-/NdNZVJg+uZgtm9eS3O6lrOLYmQag2DjdEXuPaHlZ6RuVqgqaVZfgYCepEIKsLqwdQArOPtC3XzRLqGGfT8KQQ==", + "_integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==", "_location": "/es6-promise", "_phantomChildren": {}, "_requested": { @@ -18,10 +18,10 @@ "_requiredBy": [ "/es6-promisify" ], - "_resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.4.tgz", - "_shasum": "dc4221c2b16518760bd8c39a52d8f356fc00ed29", + "_resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", + "_shasum": "4eb21594c972bc40553d276e510539143db53e0a", "_spec": "es6-promise@^4.0.3", - "_where": "/Users/rebecca/code/npm/node_modules/es6-promisify", + "_where": "/Users/isaacs/dev/npm/cli/node_modules/es6-promisify", "author": { "name": "Yehuda Katz, Tom Dale, Stefan Penner and contributors", "url": "Conversion to ES6 API by Jake Archibald" @@ -72,12 +72,13 @@ "auto.js", "!dist/test" ], - "homepage": "https://github.com/stefanpenner/es6-promise#readme", + "homepage": "https://github.com/stefanpenner/es6-promise", + "jsdelivr": "dist/es6-promise.auto.min.js", "keywords": [ - "promises", - "promise", + "futures", "polyfill", - "futures" + "promise", + "promises" ], "license": "MIT", "main": "dist/es6-promise.js", @@ -100,5 +101,6 @@ "main": "dist/es6-promise.js" }, "typings": "es6-promise.d.ts", - "version": "4.2.4" + "unpkg": "dist/es6-promise.auto.min.js", + "version": "4.2.8" } diff --git a/node_modules/execa/node_modules/get-stream/buffer-stream.js b/node_modules/execa/node_modules/get-stream/buffer-stream.js new file mode 100644 index 0000000000000..ae45d3d9e7417 --- /dev/null +++ b/node_modules/execa/node_modules/get-stream/buffer-stream.js @@ -0,0 +1,51 @@ +'use strict'; +const PassThrough = require('stream').PassThrough; + +module.exports = opts => { + opts = Object.assign({}, opts); + + const array = opts.array; + let encoding = opts.encoding; + const buffer = encoding === 'buffer'; + let objectMode = false; + + if (array) { + objectMode = !(encoding || buffer); + } else { + encoding = encoding || 'utf8'; + } + + if (buffer) { + encoding = null; + } + + let len = 0; + const ret = []; + const stream = new PassThrough({objectMode}); + + if (encoding) { + stream.setEncoding(encoding); + } + + stream.on('data', chunk => { + ret.push(chunk); + + if (objectMode) { + len = ret.length; + } else { + len += chunk.length; + } + }); + + stream.getBufferedValue = () => { + if (array) { + return ret; + } + + return buffer ? Buffer.concat(ret, len) : ret.join(''); + }; + + stream.getBufferedLength = () => len; + + return stream; +}; diff --git a/node_modules/execa/node_modules/get-stream/index.js b/node_modules/execa/node_modules/get-stream/index.js new file mode 100644 index 0000000000000..2dc5ee96af2d9 --- /dev/null +++ b/node_modules/execa/node_modules/get-stream/index.js @@ -0,0 +1,51 @@ +'use strict'; +const bufferStream = require('./buffer-stream'); + +function getStream(inputStream, opts) { + if (!inputStream) { + return Promise.reject(new Error('Expected a stream')); + } + + opts = Object.assign({maxBuffer: Infinity}, opts); + + const maxBuffer = opts.maxBuffer; + let stream; + let clean; + + const p = new Promise((resolve, reject) => { + const error = err => { + if (err) { // null check + err.bufferedData = stream.getBufferedValue(); + } + + reject(err); + }; + + stream = bufferStream(opts); + inputStream.once('error', error); + inputStream.pipe(stream); + + stream.on('data', () => { + if (stream.getBufferedLength() > maxBuffer) { + reject(new Error('maxBuffer exceeded')); + } + }); + stream.once('error', error); + stream.on('end', resolve); + + clean = () => { + // some streams doesn't implement the `stream.Readable` interface correctly + if (inputStream.unpipe) { + inputStream.unpipe(stream); + } + }; + }); + + p.then(clean, clean); + + return p.then(() => stream.getBufferedValue()); +} + +module.exports = getStream; +module.exports.buffer = (stream, opts) => getStream(stream, Object.assign({}, opts, {encoding: 'buffer'})); +module.exports.array = (stream, opts) => getStream(stream, Object.assign({}, opts, {array: true})); diff --git a/node_modules/is-builtin-module/license b/node_modules/execa/node_modules/get-stream/license similarity index 100% rename from node_modules/is-builtin-module/license rename to node_modules/execa/node_modules/get-stream/license diff --git a/node_modules/execa/node_modules/get-stream/package.json b/node_modules/execa/node_modules/get-stream/package.json new file mode 100644 index 0000000000000..987588836554a --- /dev/null +++ b/node_modules/execa/node_modules/get-stream/package.json @@ -0,0 +1,80 @@ +{ + "_from": "get-stream@^3.0.0", + "_id": "get-stream@3.0.0", + "_inBundle": false, + "_integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=", + "_location": "/execa/get-stream", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "get-stream@^3.0.0", + "name": "get-stream", + "escapedName": "get-stream", + "rawSpec": "^3.0.0", + "saveSpec": null, + "fetchSpec": "^3.0.0" + }, + "_requiredBy": [ + "/execa" + ], + "_resolved": "http://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "_shasum": "8e943d1358dc37555054ecbe2edb05aa174ede14", + "_spec": "get-stream@^3.0.0", + "_where": "/Users/zkat/Documents/code/work/npm/node_modules/execa", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/get-stream/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Get a stream as a string, buffer, or array", + "devDependencies": { + "ava": "*", + "into-stream": "^3.0.0", + "xo": "*" + }, + "engines": { + "node": ">=4" + }, + "files": [ + "index.js", + "buffer-stream.js" + ], + "homepage": "https://github.com/sindresorhus/get-stream#readme", + "keywords": [ + "get", + "stream", + "promise", + "concat", + "string", + "str", + "text", + "buffer", + "read", + "data", + "consume", + "readable", + "readablestream", + "array", + "object", + "obj" + ], + "license": "MIT", + "name": "get-stream", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/get-stream.git" + }, + "scripts": { + "test": "xo && ava" + }, + "version": "3.0.0", + "xo": { + "esnext": true + } +} diff --git a/node_modules/execa/node_modules/get-stream/readme.md b/node_modules/execa/node_modules/get-stream/readme.md new file mode 100644 index 0000000000000..73b188fb420f2 --- /dev/null +++ b/node_modules/execa/node_modules/get-stream/readme.md @@ -0,0 +1,117 @@ +# get-stream [![Build Status](https://travis-ci.org/sindresorhus/get-stream.svg?branch=master)](https://travis-ci.org/sindresorhus/get-stream) + +> Get a stream as a string, buffer, or array + + +## Install + +``` +$ npm install --save get-stream +``` + + +## Usage + +```js +const fs = require('fs'); +const getStream = require('get-stream'); +const stream = fs.createReadStream('unicorn.txt'); + +getStream(stream).then(str => { + console.log(str); + /* + ,,))))))));, + __)))))))))))))), + \|/ -\(((((''''((((((((. + -*-==//////(('' . `)))))), + /|\ ))| o ;-. '((((( ,(, + ( `| / ) ;))))' ,_))^;(~ + | | | ,))((((_ _____------~~~-. %,;(;(>';'~ + o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~ + ; ''''```` `: `:::|\,__,%% );`'; ~ + | _ ) / `:|`----' `-' + ______/\/~ | / / + /~;;.____/;;' / ___--,-( `;;;/ + / // _;______;'------~~~~~ /;;/\ / + // | | / ; \;;,\ + (<_ | ; /',/-----' _> + \_| ||_ //~;~~~~~~~~~ + `\_| (,~~ + \~\ + ~~ + */ +}); +``` + + +## API + +The methods returns a promise that resolves when the `end` event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode. + +### getStream(stream, [options]) + +Get the `stream` as a string. + +#### options + +##### encoding + +Type: `string`
    +Default: `utf8` + +[Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream. + +##### maxBuffer + +Type: `number`
    +Default: `Infinity` + +Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected. + +### getStream.buffer(stream, [options]) + +Get the `stream` as a buffer. + +It honors the `maxBuffer` option as above, but it refers to byte length rather than string length. + +### getStream.array(stream, [options]) + +Get the `stream` as an array of values. + +It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen: + +- When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes). + +- When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array. + +- When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array. + + +## Errors + +If the input stream emits an `error` event, the promise will be rejected with the error. The buffered data will be attached to the `bufferedData` property of the error. + +```js +getStream(streamThatErrorsAtTheEnd('unicorn')) + .catch(err => { + console.log(err.bufferedData); + //=> 'unicorn' + }); +``` + + +## FAQ + +### How is this different from [`concat-stream`](https://github.com/maxogden/concat-stream)? + +This module accepts a stream instead of being one and returns a promise instead of using a callback. The API is simpler and it only supports returning a string, buffer, or array. It doesn't have a fragile type inference. You explicitly choose what you want. And it doesn't depend on the huge `readable-stream` package. + + +## Related + +- [get-stdin](https://github.com/sindresorhus/get-stdin) - Get stdin as a string or buffer + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/figgy-pudding/package.json b/node_modules/figgy-pudding/package.json index 00fc27248205f..4f268f6ff01ea 100644 --- a/node_modules/figgy-pudding/package.json +++ b/node_modules/figgy-pudding/package.json @@ -20,7 +20,10 @@ "/", "/cacache", "/libnpmhook", - "/libnpmhook/npm-registry-fetch" + "/libnpmorg", + "/libnpmteam", + "/npm-registry-fetch", + "/pacote" ], "_resolved": "https://registry.npmjs.org/figgy-pudding/-/figgy-pudding-3.5.1.tgz", "_shasum": "862470112901c727a0e495a80744bd5baa1d6790", diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/.travis.yml b/node_modules/flush-write-stream/node_modules/readable-stream/.travis.yml new file mode 100644 index 0000000000000..40992555bf5cc --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/.travis.yml @@ -0,0 +1,55 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test +script: "npm run $TASK" +env: + global: + - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= + - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/flush-write-stream/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 0000000000000..f478d58dca85b --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/GOVERNANCE.md b/node_modules/flush-write-stream/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 0000000000000..16ffb93f24bec --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/LICENSE b/node_modules/flush-write-stream/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000000000..2873b3b2e5950 --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/README.md b/node_modules/flush-write-stream/node_modules/readable-stream/README.md new file mode 100644 index 0000000000000..23fe3f3e3009a --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/README.md @@ -0,0 +1,58 @@ +# readable-stream + +***Node-core v8.11.1 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) + +```bash +npm install --save readable-stream +``` + +***Node-core streams for userland*** + +This package is a mirror of the Streams2 and Streams3 implementations in +Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.11.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> + - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> + - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D +* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/flush-write-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 0000000000000..83275f192e407 --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,60 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section + + diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/duplex-browser.js b/node_modules/flush-write-stream/node_modules/readable-stream/duplex-browser.js new file mode 100644 index 0000000000000..f8b2db83dbe73 --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/duplex-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_duplex.js'); diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/duplex.js b/node_modules/flush-write-stream/node_modules/readable-stream/duplex.js new file mode 100644 index 0000000000000..46924cbfdf538 --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require('./readable').Duplex diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/flush-write-stream/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000000000..a1ca813e5acbd --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,131 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); + + pna.nextTick(cb, err); +}; \ No newline at end of file diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/flush-write-stream/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000000000..a9c835884828d --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,47 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/flush-write-stream/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000000000..bf34ac65e1108 --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1019 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Readable; + +/**/ +var isArray = require('isarray'); +/**/ + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = require('./internal/streams/BufferList'); +var destroyImpl = require('./internal/streams/destroy'); +var StringDecoder; + +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } + } + + return needMoreData(state); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, unpipeInfo); + }return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this, unpipeInfo); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/flush-write-stream/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000000000..5d1f8b876d98c --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,214 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } + + ts.writechunk = null; + ts.writecb = null; + + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + + cb(er); + + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; + + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); + + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/flush-write-stream/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000000000..b3f4e85a2f6e3 --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,687 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +var destroyImpl = require('./internal/streams/destroy'); + +util.inherits(Writable, Stream); + +function nop() {} + +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = corkReq; + } else { + state.corkedRequestsFree = corkReq; + } +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); + +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; \ No newline at end of file diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/flush-write-stream/node_modules/readable-stream/lib/internal/streams/BufferList.js new file mode 100644 index 0000000000000..aefc68bd90b9c --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/lib/internal/streams/BufferList.js @@ -0,0 +1,79 @@ +'use strict'; + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var Buffer = require('safe-buffer').Buffer; +var util = require('util'); + +function copyBuffer(src, target, offset) { + src.copy(target, offset); +} + +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; + + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; + + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; + + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; + + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; + + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + if (this.length === 1) return this.head.data; + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }; + + return BufferList; +}(); + +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; +} \ No newline at end of file diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/flush-write-stream/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 0000000000000..5a0a0d88cec6f --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,74 @@ +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { + pna.nextTick(emitErrorNT, this, err); + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + pna.nextTick(emitErrorNT, _this, err); + if (_this._writableState) { + _this._writableState.errorEmitted = true; + } + } else if (cb) { + cb(err); + } + }); + + return this; +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy +}; \ No newline at end of file diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/flush-write-stream/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 0000000000000..9332a3fdae706 --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/flush-write-stream/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 0000000000000..ce2ad5b6ee57f --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/package.json b/node_modules/flush-write-stream/node_modules/readable-stream/package.json new file mode 100644 index 0000000000000..d6f84bde6b982 --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/package.json @@ -0,0 +1,81 @@ +{ + "_from": "readable-stream@^2.0.4", + "_id": "readable-stream@2.3.6", + "_inBundle": false, + "_integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "_location": "/flush-write-stream/readable-stream", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "readable-stream@^2.0.4", + "name": "readable-stream", + "escapedName": "readable-stream", + "rawSpec": "^2.0.4", + "saveSpec": null, + "fetchSpec": "^2.0.4" + }, + "_requiredBy": [ + "/flush-write-stream" + ], + "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "_shasum": "b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf", + "_spec": "readable-stream@^2.0.4", + "_where": "/Users/aeschright/code/cli/node_modules/flush-write-stream", + "browser": { + "util": false, + "./readable.js": "./readable-browser.js", + "./writable.js": "./writable-browser.js", + "./duplex.js": "./duplex-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "bugs": { + "url": "https://github.com/nodejs/readable-stream/issues" + }, + "bundleDependencies": false, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "deprecated": false, + "description": "Streams3, a user-land copy of the stream library from Node.js", + "devDependencies": { + "assert": "^1.4.0", + "babel-polyfill": "^6.9.1", + "buffer": "^4.9.0", + "lolex": "^2.3.2", + "nyc": "^6.4.0", + "tap": "^0.7.0", + "tape": "^4.8.0" + }, + "homepage": "https://github.com/nodejs/readable-stream#readme", + "keywords": [ + "readable", + "stream", + "pipe" + ], + "license": "MIT", + "main": "readable.js", + "name": "readable-stream", + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js" + }, + "version": "2.3.6" +} diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/passthrough.js b/node_modules/flush-write-stream/node_modules/readable-stream/passthrough.js new file mode 100644 index 0000000000000..ffd791d7ff275 --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require('./readable').PassThrough diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/readable-browser.js b/node_modules/flush-write-stream/node_modules/readable-stream/readable-browser.js new file mode 100644 index 0000000000000..e50372592ee6c --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,7 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/readable.js b/node_modules/flush-write-stream/node_modules/readable-stream/readable.js new file mode 100644 index 0000000000000..ec89ec5330649 --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/readable.js @@ -0,0 +1,19 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); +} diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/transform.js b/node_modules/flush-write-stream/node_modules/readable-stream/transform.js new file mode 100644 index 0000000000000..b1baba26da03d --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require('./readable').Transform diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/writable-browser.js b/node_modules/flush-write-stream/node_modules/readable-stream/writable-browser.js new file mode 100644 index 0000000000000..ebdde6a85dcb1 --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/writable-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_writable.js'); diff --git a/node_modules/flush-write-stream/node_modules/readable-stream/writable.js b/node_modules/flush-write-stream/node_modules/readable-stream/writable.js new file mode 100644 index 0000000000000..3211a6f80d1ab --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/readable-stream/writable.js @@ -0,0 +1,8 @@ +var Stream = require("stream") +var Writable = require("./lib/_stream_writable.js") + +if (process.env.READABLE_STREAM === 'disable') { + module.exports = Stream && Stream.Writable || Writable +} else { + module.exports = Writable +} diff --git a/node_modules/flush-write-stream/node_modules/string_decoder/.travis.yml b/node_modules/flush-write-stream/node_modules/string_decoder/.travis.yml new file mode 100644 index 0000000000000..3347a72546505 --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/string_decoder/.travis.yml @@ -0,0 +1,50 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test diff --git a/node_modules/flush-write-stream/node_modules/string_decoder/LICENSE b/node_modules/flush-write-stream/node_modules/string_decoder/LICENSE new file mode 100644 index 0000000000000..778edb20730ef --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/node_modules/flush-write-stream/node_modules/string_decoder/README.md b/node_modules/flush-write-stream/node_modules/string_decoder/README.md new file mode 100644 index 0000000000000..5fd58315ed588 --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/node_modules/flush-write-stream/node_modules/string_decoder/lib/string_decoder.js b/node_modules/flush-write-stream/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 0000000000000..2e89e63f7933e --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/node_modules/flush-write-stream/node_modules/string_decoder/package.json b/node_modules/flush-write-stream/node_modules/string_decoder/package.json new file mode 100644 index 0000000000000..56012b0108109 --- /dev/null +++ b/node_modules/flush-write-stream/node_modules/string_decoder/package.json @@ -0,0 +1,59 @@ +{ + "_from": "string_decoder@~1.1.1", + "_id": "string_decoder@1.1.1", + "_inBundle": false, + "_integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "_location": "/flush-write-stream/string_decoder", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "string_decoder@~1.1.1", + "name": "string_decoder", + "escapedName": "string_decoder", + "rawSpec": "~1.1.1", + "saveSpec": null, + "fetchSpec": "~1.1.1" + }, + "_requiredBy": [ + "/flush-write-stream/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "_shasum": "9cf1611ba62685d7030ae9e4ba34149c3af03fc8", + "_spec": "string_decoder@~1.1.1", + "_where": "/Users/aeschright/code/cli/node_modules/flush-write-stream/node_modules/readable-stream", + "bugs": { + "url": "https://github.com/nodejs/string_decoder/issues" + }, + "bundleDependencies": false, + "dependencies": { + "safe-buffer": "~5.1.0" + }, + "deprecated": false, + "description": "The string_decoder module from Node core", + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT", + "main": "lib/string_decoder.js", + "name": "string_decoder", + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "test": "tap test/parallel/*.js && node test/verify-dependencies" + }, + "version": "1.1.1" +} diff --git a/node_modules/from2/node_modules/readable-stream/.travis.yml b/node_modules/from2/node_modules/readable-stream/.travis.yml new file mode 100644 index 0000000000000..40992555bf5cc --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/.travis.yml @@ -0,0 +1,55 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test +script: "npm run $TASK" +env: + global: + - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= + - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/from2/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/from2/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 0000000000000..f478d58dca85b --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/from2/node_modules/readable-stream/GOVERNANCE.md b/node_modules/from2/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 0000000000000..16ffb93f24bec --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/node_modules/from2/node_modules/readable-stream/LICENSE b/node_modules/from2/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000000000..2873b3b2e5950 --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/node_modules/from2/node_modules/readable-stream/README.md b/node_modules/from2/node_modules/readable-stream/README.md new file mode 100644 index 0000000000000..23fe3f3e3009a --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/README.md @@ -0,0 +1,58 @@ +# readable-stream + +***Node-core v8.11.1 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) + +```bash +npm install --save readable-stream +``` + +***Node-core streams for userland*** + +This package is a mirror of the Streams2 and Streams3 implementations in +Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.11.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> + - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> + - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D +* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> diff --git a/node_modules/from2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/from2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 0000000000000..83275f192e407 --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,60 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section + + diff --git a/node_modules/from2/node_modules/readable-stream/duplex-browser.js b/node_modules/from2/node_modules/readable-stream/duplex-browser.js new file mode 100644 index 0000000000000..f8b2db83dbe73 --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/duplex-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_duplex.js'); diff --git a/node_modules/from2/node_modules/readable-stream/duplex.js b/node_modules/from2/node_modules/readable-stream/duplex.js new file mode 100644 index 0000000000000..46924cbfdf538 --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require('./readable').Duplex diff --git a/node_modules/from2/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/from2/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000000000..a1ca813e5acbd --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,131 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); + + pna.nextTick(cb, err); +}; \ No newline at end of file diff --git a/node_modules/from2/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/from2/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000000000..a9c835884828d --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,47 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/from2/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/from2/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000000000..bf34ac65e1108 --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1019 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Readable; + +/**/ +var isArray = require('isarray'); +/**/ + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = require('./internal/streams/BufferList'); +var destroyImpl = require('./internal/streams/destroy'); +var StringDecoder; + +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } + } + + return needMoreData(state); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, unpipeInfo); + }return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this, unpipeInfo); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/from2/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/from2/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000000000..5d1f8b876d98c --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,214 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } + + ts.writechunk = null; + ts.writecb = null; + + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + + cb(er); + + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; + + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); + + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/from2/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/from2/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000000000..b3f4e85a2f6e3 --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,687 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +var destroyImpl = require('./internal/streams/destroy'); + +util.inherits(Writable, Stream); + +function nop() {} + +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = corkReq; + } else { + state.corkedRequestsFree = corkReq; + } +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); + +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; \ No newline at end of file diff --git a/node_modules/from2/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/from2/node_modules/readable-stream/lib/internal/streams/BufferList.js new file mode 100644 index 0000000000000..aefc68bd90b9c --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/lib/internal/streams/BufferList.js @@ -0,0 +1,79 @@ +'use strict'; + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var Buffer = require('safe-buffer').Buffer; +var util = require('util'); + +function copyBuffer(src, target, offset) { + src.copy(target, offset); +} + +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; + + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; + + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; + + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; + + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; + + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + if (this.length === 1) return this.head.data; + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }; + + return BufferList; +}(); + +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; +} \ No newline at end of file diff --git a/node_modules/from2/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/from2/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 0000000000000..5a0a0d88cec6f --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,74 @@ +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { + pna.nextTick(emitErrorNT, this, err); + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + pna.nextTick(emitErrorNT, _this, err); + if (_this._writableState) { + _this._writableState.errorEmitted = true; + } + } else if (cb) { + cb(err); + } + }); + + return this; +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy +}; \ No newline at end of file diff --git a/node_modules/from2/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/from2/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 0000000000000..9332a3fdae706 --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/node_modules/from2/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/from2/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 0000000000000..ce2ad5b6ee57f --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/node_modules/from2/node_modules/readable-stream/package.json b/node_modules/from2/node_modules/readable-stream/package.json new file mode 100644 index 0000000000000..699ff7e674306 --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/package.json @@ -0,0 +1,81 @@ +{ + "_from": "readable-stream@^2.0.0", + "_id": "readable-stream@2.3.6", + "_inBundle": false, + "_integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "_location": "/from2/readable-stream", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "readable-stream@^2.0.0", + "name": "readable-stream", + "escapedName": "readable-stream", + "rawSpec": "^2.0.0", + "saveSpec": null, + "fetchSpec": "^2.0.0" + }, + "_requiredBy": [ + "/from2" + ], + "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "_shasum": "b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf", + "_spec": "readable-stream@^2.0.0", + "_where": "/Users/aeschright/code/cli/node_modules/from2", + "browser": { + "util": false, + "./readable.js": "./readable-browser.js", + "./writable.js": "./writable-browser.js", + "./duplex.js": "./duplex-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "bugs": { + "url": "https://github.com/nodejs/readable-stream/issues" + }, + "bundleDependencies": false, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "deprecated": false, + "description": "Streams3, a user-land copy of the stream library from Node.js", + "devDependencies": { + "assert": "^1.4.0", + "babel-polyfill": "^6.9.1", + "buffer": "^4.9.0", + "lolex": "^2.3.2", + "nyc": "^6.4.0", + "tap": "^0.7.0", + "tape": "^4.8.0" + }, + "homepage": "https://github.com/nodejs/readable-stream#readme", + "keywords": [ + "readable", + "stream", + "pipe" + ], + "license": "MIT", + "main": "readable.js", + "name": "readable-stream", + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js" + }, + "version": "2.3.6" +} diff --git a/node_modules/from2/node_modules/readable-stream/passthrough.js b/node_modules/from2/node_modules/readable-stream/passthrough.js new file mode 100644 index 0000000000000..ffd791d7ff275 --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require('./readable').PassThrough diff --git a/node_modules/from2/node_modules/readable-stream/readable-browser.js b/node_modules/from2/node_modules/readable-stream/readable-browser.js new file mode 100644 index 0000000000000..e50372592ee6c --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,7 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/node_modules/from2/node_modules/readable-stream/readable.js b/node_modules/from2/node_modules/readable-stream/readable.js new file mode 100644 index 0000000000000..ec89ec5330649 --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/readable.js @@ -0,0 +1,19 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); +} diff --git a/node_modules/from2/node_modules/readable-stream/transform.js b/node_modules/from2/node_modules/readable-stream/transform.js new file mode 100644 index 0000000000000..b1baba26da03d --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require('./readable').Transform diff --git a/node_modules/from2/node_modules/readable-stream/writable-browser.js b/node_modules/from2/node_modules/readable-stream/writable-browser.js new file mode 100644 index 0000000000000..ebdde6a85dcb1 --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/writable-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_writable.js'); diff --git a/node_modules/from2/node_modules/readable-stream/writable.js b/node_modules/from2/node_modules/readable-stream/writable.js new file mode 100644 index 0000000000000..3211a6f80d1ab --- /dev/null +++ b/node_modules/from2/node_modules/readable-stream/writable.js @@ -0,0 +1,8 @@ +var Stream = require("stream") +var Writable = require("./lib/_stream_writable.js") + +if (process.env.READABLE_STREAM === 'disable') { + module.exports = Stream && Stream.Writable || Writable +} else { + module.exports = Writable +} diff --git a/node_modules/from2/node_modules/string_decoder/.travis.yml b/node_modules/from2/node_modules/string_decoder/.travis.yml new file mode 100644 index 0000000000000..3347a72546505 --- /dev/null +++ b/node_modules/from2/node_modules/string_decoder/.travis.yml @@ -0,0 +1,50 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test diff --git a/node_modules/from2/node_modules/string_decoder/LICENSE b/node_modules/from2/node_modules/string_decoder/LICENSE new file mode 100644 index 0000000000000..778edb20730ef --- /dev/null +++ b/node_modules/from2/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/node_modules/from2/node_modules/string_decoder/README.md b/node_modules/from2/node_modules/string_decoder/README.md new file mode 100644 index 0000000000000..5fd58315ed588 --- /dev/null +++ b/node_modules/from2/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/node_modules/from2/node_modules/string_decoder/lib/string_decoder.js b/node_modules/from2/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 0000000000000..2e89e63f7933e --- /dev/null +++ b/node_modules/from2/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/node_modules/from2/node_modules/string_decoder/package.json b/node_modules/from2/node_modules/string_decoder/package.json new file mode 100644 index 0000000000000..feec8efa23be6 --- /dev/null +++ b/node_modules/from2/node_modules/string_decoder/package.json @@ -0,0 +1,59 @@ +{ + "_from": "string_decoder@~1.1.1", + "_id": "string_decoder@1.1.1", + "_inBundle": false, + "_integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "_location": "/from2/string_decoder", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "string_decoder@~1.1.1", + "name": "string_decoder", + "escapedName": "string_decoder", + "rawSpec": "~1.1.1", + "saveSpec": null, + "fetchSpec": "~1.1.1" + }, + "_requiredBy": [ + "/from2/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "_shasum": "9cf1611ba62685d7030ae9e4ba34149c3af03fc8", + "_spec": "string_decoder@~1.1.1", + "_where": "/Users/aeschright/code/cli/node_modules/from2/node_modules/readable-stream", + "bugs": { + "url": "https://github.com/nodejs/string_decoder/issues" + }, + "bundleDependencies": false, + "dependencies": { + "safe-buffer": "~5.1.0" + }, + "deprecated": false, + "description": "The string_decoder module from Node core", + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT", + "main": "lib/string_decoder.js", + "name": "string_decoder", + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "test": "tap test/parallel/*.js && node test/verify-dependencies" + }, + "version": "1.1.1" +} diff --git a/node_modules/fs-constants/LICENSE b/node_modules/fs-constants/LICENSE new file mode 100644 index 0000000000000..cb757e5dbc9e5 --- /dev/null +++ b/node_modules/fs-constants/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2018 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fs-constants/README.md b/node_modules/fs-constants/README.md new file mode 100644 index 0000000000000..62b33742e65c7 --- /dev/null +++ b/node_modules/fs-constants/README.md @@ -0,0 +1,26 @@ +# fs-constants + +Small module that allows you to get the fs constants across +Node and the browser. + +``` +npm install fs-constants +``` + +Previously you would use `require('constants')` for this in node but that has been +deprecated and changed to `require('fs').constants` which does not browserify. + +This module uses `require('constants')` in the browser and `require('fs').constants` in node to work around this + + +## Usage + +``` js +var constants = require('fs-constants') + +console.log('constants:', constants) +``` + +## License + +MIT diff --git a/node_modules/fs-constants/browser.js b/node_modules/fs-constants/browser.js new file mode 100644 index 0000000000000..3c87638dcd5e3 --- /dev/null +++ b/node_modules/fs-constants/browser.js @@ -0,0 +1 @@ +module.exports = require('constants') diff --git a/node_modules/fs-constants/index.js b/node_modules/fs-constants/index.js new file mode 100644 index 0000000000000..2a3aadf393cbd --- /dev/null +++ b/node_modules/fs-constants/index.js @@ -0,0 +1 @@ +module.exports = require('fs').constants || require('constants') diff --git a/node_modules/fs-constants/package.json b/node_modules/fs-constants/package.json new file mode 100644 index 0000000000000..532d1ff17f370 --- /dev/null +++ b/node_modules/fs-constants/package.json @@ -0,0 +1,47 @@ +{ + "_from": "fs-constants@^1.0.0", + "_id": "fs-constants@1.0.0", + "_inBundle": false, + "_integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", + "_location": "/fs-constants", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "fs-constants@^1.0.0", + "name": "fs-constants", + "escapedName": "fs-constants", + "rawSpec": "^1.0.0", + "saveSpec": null, + "fetchSpec": "^1.0.0" + }, + "_requiredBy": [ + "/tar-stream" + ], + "_resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "_shasum": "6be0de9be998ce16af8afc24497b9ee9b7ccd9ad", + "_spec": "fs-constants@^1.0.0", + "_where": "/Users/zkat/Documents/code/work/npm/node_modules/tar-stream", + "author": { + "name": "Mathias Buus", + "url": "@mafintosh" + }, + "browser": "browser.js", + "bugs": { + "url": "https://github.com/mafintosh/fs-constants/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Require constants across node and the browser", + "devDependencies": {}, + "homepage": "https://github.com/mafintosh/fs-constants", + "license": "MIT", + "main": "index.js", + "name": "fs-constants", + "repository": { + "type": "git", + "url": "git+https://github.com/mafintosh/fs-constants.git" + }, + "version": "1.0.0" +} diff --git a/node_modules/fs-minipass/index.js b/node_modules/fs-minipass/index.js index 0f15c810ebf5f..cd585a83c59b8 100644 --- a/node_modules/fs-minipass/index.js +++ b/node_modules/fs-minipass/index.js @@ -6,7 +6,8 @@ const fs = require('fs') // for writev const binding = process.binding('fs') const writeBuffers = binding.writeBuffers -const FSReqWrap = binding.FSReqWrap +/* istanbul ignore next */ +const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback const _autoClose = Symbol('_autoClose') const _close = Symbol('_close') diff --git a/node_modules/fs-minipass/node_modules/minipass/LICENSE b/node_modules/fs-minipass/node_modules/minipass/LICENSE new file mode 100644 index 0000000000000..20a4762540923 --- /dev/null +++ b/node_modules/fs-minipass/node_modules/minipass/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fs-minipass/node_modules/minipass/README.md b/node_modules/fs-minipass/node_modules/minipass/README.md new file mode 100644 index 0000000000000..c989beea0e6d9 --- /dev/null +++ b/node_modules/fs-minipass/node_modules/minipass/README.md @@ -0,0 +1,606 @@ +# minipass + +A _very_ minimal implementation of a [PassThrough +stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough) + +[It's very +fast](https://docs.google.com/spreadsheets/d/1oObKSrVwLX_7Ut4Z6g3fZW-AX1j1-k6w-cDsrkaSbHM/edit#gid=0) +for objects, strings, and buffers. + +Supports pipe()ing (including multi-pipe() and backpressure +transmission), buffering data until either a `data` event handler or +`pipe()` is added (so you don't lose the first chunk), and most other +cases where PassThrough is a good idea. + +There is a `read()` method, but it's much more efficient to consume +data from this stream via `'data'` events or by calling `pipe()` into +some other stream. Calling `read()` requires the buffer to be +flattened in some cases, which requires copying memory. + +There is also no `unpipe()` method. Once you start piping, there is +no stopping it! + +If you set `objectMode: true` in the options, then whatever is written +will be emitted. Otherwise, it'll do a minimal amount of Buffer +copying to ensure proper Streams semantics when `read(n)` is called. + +`objectMode` can also be set by doing `stream.objectMode = true`, or by +writing any non-string/non-buffer data. `objectMode` cannot be set to +false once it is set. + +This is not a `through` or `through2` stream. It doesn't transform +the data, it just passes it right through. If you want to transform +the data, extend the class, and override the `write()` method. Once +you're done transforming the data however you want, call +`super.write()` with the transform output. + +For some examples of streams that extend Minipass in various ways, check +out: + +- [minizlib](http://npm.im/minizlib) +- [fs-minipass](http://npm.im/fs-minipass) +- [tar](http://npm.im/tar) +- [minipass-collect](http://npm.im/minipass-collect) +- [minipass-flush](http://npm.im/minipass-flush) +- [minipass-pipeline](http://npm.im/minipass-pipeline) +- [tap](http://npm.im/tap) +- [tap-parser](http://npm.im/tap) +- [treport](http://npm.im/tap) + +## Differences from Node.js Streams + +There are several things that make Minipass streams different from (and in +some ways superior to) Node.js core streams. + +Please read these caveats if you are familiar with noode-core streams and +intend to use Minipass streams in your programs. + +### Timing + +Minipass streams are designed to support synchronous use-cases. Thus, data +is emitted as soon as it is available, always. It is buffered until read, +but no longer. Another way to look at it is that Minipass streams are +exactly as synchronous as the logic that writes into them. + +This can be surprising if your code relies on `PassThrough.write()` always +providing data on the next tick rather than the current one, or being able +to call `resume()` and not have the entire buffer disappear immediately. + +However, without this synchronicity guarantee, there would be no way for +Minipass to achieve the speeds it does, or support the synchronous use +cases that it does. Simply put, waiting takes time. + +This non-deferring approach makes Minipass streams much easier to reason +about, especially in the context of Promises and other flow-control +mechanisms. + +### No High/Low Water Marks + +Node.js core streams will optimistically fill up a buffer, returning `true` +on all writes until the limit is hit, even if the data has nowhere to go. +Then, they will not attempt to draw more data in until the buffer size dips +below a minimum value. + +Minipass streams are much simpler. The `write()` method will return `true` +if the data has somewhere to go (which is to say, given the timing +guarantees, that the data is already there by the time `write()` returns). + +If the data has nowhere to go, then `write()` returns false, and the data +sits in a buffer, to be drained out immediately as soon as anyone consumes +it. + +### Hazards of Buffering (or: Why Minipass Is So Fast) + +Since data written to a Minipass stream is immediately written all the way +through the pipeline, and `write()` always returns true/false based on +whether the data was fully flushed, backpressure is communicated +immediately to the upstream caller. This minimizes buffering. + +Consider this case: + +```js +const {PassThrough} = require('stream') +const p1 = new PassThrough({ highWaterMark: 1024 }) +const p2 = new PassThrough({ highWaterMark: 1024 }) +const p3 = new PassThrough({ highWaterMark: 1024 }) +const p4 = new PassThrough({ highWaterMark: 1024 }) + +p1.pipe(p2).pipe(p3).pipe(p4) +p4.on('data', () => console.log('made it through')) + +// this returns false and buffers, then writes to p2 on next tick (1) +// p2 returns false and buffers, pausing p1, then writes to p3 on next tick (2) +// p3 returns false and buffers, pausing p2, then writes to p4 on next tick (3) +// p4 returns false and buffers, pausing p3, then emits 'data' and 'drain' +// on next tick (4) +// p3 sees p4's 'drain' event, and calls resume(), emitting 'resume' and +// 'drain' on next tick (5) +// p2 sees p3's 'drain', calls resume(), emits 'resume' and 'drain' on next tick (6) +// p1 sees p2's 'drain', calls resume(), emits 'resume' and 'drain' on next +// tick (7) + +p1.write(Buffer.alloc(2048)) // returns false +``` + +Along the way, the data was buffered and deferred at each stage, and +multiple event deferrals happened, for an unblocked pipeline where it was +perfectly safe to write all the way through! + +Furthermore, setting a `highWaterMark` of `1024` might lead someone reading +the code to think an advisory maximum of 1KiB is being set for the +pipeline. However, the actual advisory buffering level is the _sum_ of +`highWaterMark` values, since each one has its own bucket. + +Consider the Minipass case: + +```js +const m1 = new Minipass() +const m2 = new Minipass() +const m3 = new Minipass() +const m4 = new Minipass() + +m1.pipe(m2).pipe(m3).pipe(m4) +m4.on('data', () => console.log('made it through')) + +// m1 is flowing, so it writes the data to m2 immediately +// m2 is flowing, so it writes the data to m3 immediately +// m3 is flowing, so it writes the data to m4 immediately +// m4 is flowing, so it fires the 'data' event immediately, returns true +// m4's write returned true, so m3 is still flowing, returns true +// m3's write returned true, so m2 is still flowing, returns true +// m2's write returned true, so m1 is still flowing, returns true +// No event deferrals or buffering along the way! + +m1.write(Buffer.alloc(2048)) // returns true +``` + +It is extremely unlikely that you _don't_ want to buffer any data written, +or _ever_ buffer data that can be flushed all the way through. Neither +node-core streams nor Minipass ever fail to buffer written data, but +node-core streams do a lot of unnecessary buffering and pausing. + +As always, the faster implementation is the one that does less stuff and +waits less time to do it. + +### Immediately emit `end` for empty streams (when not paused) + +If a stream is not paused, and `end()` is called before writing any data +into it, then it will emit `end` immediately. + +If you have logic that occurs on the `end` event which you don't want to +potentially happen immediately (for example, closing file descriptors, +moving on to the next entry in an archive parse stream, etc.) then be sure +to call `stream.pause()` on creation, and then `stream.resume()` once you +are ready to respond to the `end` event. + +### Emit `end` When Asked + +One hazard of immediately emitting `'end'` is that you may not yet have had +a chance to add a listener. In order to avoid this hazard, Minipass +streams safely re-emit the `'end'` event if a new listener is added after +`'end'` has been emitted. + +Ie, if you do `stream.on('end', someFunction)`, and the stream has already +emitted `end`, then it will call the handler right away. (You can think of +this somewhat like attaching a new `.then(fn)` to a previously-resolved +Promise.) + +To prevent calling handlers multiple times who would not expect multiple +ends to occur, all listeners are removed from the `'end'` event whenever it +is emitted. + +### Impact of "immediate flow" on Tee-streams + +A "tee stream" is a stream piping to multiple destinations: + +```js +const tee = new Minipass() +t.pipe(dest1) +t.pipe(dest2) +t.write('foo') // goes to both destinations +``` + +Since Minipass streams _immediately_ process any pending data through the +pipeline when a new pipe destination is added, this can have surprising +effects, especially when a stream comes in from some other function and may +or may not have data in its buffer. + +```js +// WARNING! WILL LOSE DATA! +const src = new Minipass() +src.write('foo') +src.pipe(dest1) // 'foo' chunk flows to dest1 immediately, and is gone +src.pipe(dest2) // gets nothing! +``` + +The solution is to create a dedicated tee-stream junction that pipes to +both locations, and then pipe to _that_ instead. + +```js +// Safe example: tee to both places +const src = new Minipass() +src.write('foo') +const tee = new Minipass() +tee.pipe(dest1) +tee.pipe(dest2) +stream.pipe(tee) // tee gets 'foo', pipes to both locations +``` + +The same caveat applies to `on('data')` event listeners. The first one +added will _immediately_ receive all of the data, leaving nothing for the +second: + +```js +// WARNING! WILL LOSE DATA! +const src = new Minipass() +src.write('foo') +src.on('data', handler1) // receives 'foo' right away +src.on('data', handler2) // nothing to see here! +``` + +Using a dedicated tee-stream can be used in this case as well: + +```js +// Safe example: tee to both data handlers +const src = new Minipass() +src.write('foo') +const tee = new Minipass() +tee.on('data', handler1) +tee.on('data', handler2) +src.pipe(tee) +``` + +## USAGE + +It's a stream! Use it like a stream and it'll most likely do what you want. + +```js +const Minipass = require('minipass') +const mp = new Minipass(options) // optional: { encoding, objectMode } +mp.write('foo') +mp.pipe(someOtherStream) +mp.end('bar') +``` + +### OPTIONS + +* `encoding` How would you like the data coming _out_ of the stream to be + encoded? Accepts any values that can be passed to `Buffer.toString()`. +* `objectMode` Emit data exactly as it comes in. This will be flipped on + by default if you write() something other than a string or Buffer at any + point. Setting `objectMode: true` will prevent setting any encoding + value. + +### API + +Implements the user-facing portions of Node.js's `Readable` and `Writable` +streams. + +### Methods + +* `write(chunk, [encoding], [callback])` - Put data in. (Note that, in the + base Minipass class, the same data will come out.) Returns `false` if + the stream will buffer the next write, or true if it's still in + "flowing" mode. +* `end([chunk, [encoding]], [callback])` - Signal that you have no more + data to write. This will queue an `end` event to be fired when all the + data has been consumed. +* `setEncoding(encoding)` - Set the encoding for data coming of the + stream. This can only be done once. +* `pause()` - No more data for a while, please. This also prevents `end` + from being emitted for empty streams until the stream is resumed. +* `resume()` - Resume the stream. If there's data in the buffer, it is + all discarded. Any buffered events are immediately emitted. +* `pipe(dest)` - Send all output to the stream provided. There is no way + to unpipe. When data is emitted, it is immediately written to any and + all pipe destinations. +* `on(ev, fn)`, `emit(ev, fn)` - Minipass streams are EventEmitters. + Some events are given special treatment, however. (See below under + "events".) +* `promise()` - Returns a Promise that resolves when the stream emits + `end`, or rejects if the stream emits `error`. +* `collect()` - Return a Promise that resolves on `end` with an array + containing each chunk of data that was emitted, or rejects if the + stream emits `error`. Note that this consumes the stream data. +* `concat()` - Same as `collect()`, but concatenates the data into a + single Buffer object. Will reject the returned promise if the stream is + in objectMode, or if it goes into objectMode by the end of the data. +* `read(n)` - Consume `n` bytes of data out of the buffer. If `n` is not + provided, then consume all of it. If `n` bytes are not available, then + it returns null. **Note** consuming streams in this way is less + efficient, and can lead to unnecessary Buffer copying. +* `destroy([er])` - Destroy the stream. If an error is provided, then an + `'error'` event is emitted. If the stream has a `close()` method, and + has not emitted a `'close'` event yet, then `stream.close()` will be + called. Any Promises returned by `.promise()`, `.collect()` or + `.concat()` will be rejected. After being destroyed, writing to the + stream will emit an error. No more data will be emitted if the stream is + destroyed, even if it was previously buffered. + +### Properties + +* `bufferLength` Read-only. Total number of bytes buffered, or in the case + of objectMode, the total number of objects. +* `encoding` The encoding that has been set. (Setting this is equivalent + to calling `setEncoding(enc)` and has the same prohibition against + setting multiple times.) +* `flowing` Read-only. Boolean indicating whether a chunk written to the + stream will be immediately emitted. +* `emittedEnd` Read-only. Boolean indicating whether the end-ish events + (ie, `end`, `prefinish`, `finish`) have been emitted. Note that + listening on any end-ish event will immediateyl re-emit it if it has + already been emitted. +* `writable` Whether the stream is writable. Default `true`. Set to + `false` when `end()` +* `readable` Whether the stream is readable. Default `true`. +* `buffer` A [yallist](http://npm.im/yallist) linked list of chunks written + to the stream that have not yet been emitted. (It's probably a bad idea + to mess with this.) +* `pipes` A [yallist](http://npm.im/yallist) linked list of streams that + this stream is piping into. (It's probably a bad idea to mess with + this.) +* `destroyed` A getter that indicates whether the stream was destroyed. +* `paused` True if the stream has been explicitly paused, otherwise false. +* `objectMode` Indicates whether the stream is in `objectMode`. Once set + to `true`, it cannot be set to `false`. + +### Events + +* `data` Emitted when there's data to read. Argument is the data to read. + This is never emitted while not flowing. If a listener is attached, that + will resume the stream. +* `end` Emitted when there's no more data to read. This will be emitted + immediately for empty streams when `end()` is called. If a listener is + attached, and `end` was already emitted, then it will be emitted again. + All listeners are removed when `end` is emitted. +* `prefinish` An end-ish event that follows the same logic as `end` and is + emitted in the same conditions where `end` is emitted. Emitted after + `'end'`. +* `finish` An end-ish event that follows the same logic as `end` and is + emitted in the same conditions where `end` is emitted. Emitted after + `'prefinish'`. +* `close` An indication that an underlying resource has been released. + Minipass does not emit this event, but will defer it until after `end` + has been emitted, since it throws off some stream libraries otherwise. +* `drain` Emitted when the internal buffer empties, and it is again + suitable to `write()` into the stream. +* `readable` Emitted when data is buffered and ready to be read by a + consumer. +* `resume` Emitted when stream changes state from buffering to flowing + mode. (Ie, when `resume` is called, `pipe` is called, or a `data` event + listener is added.) + +### Static Methods + +* `Minipass.isStream(stream)` Returns `true` if the argument is a stream, + and false otherwise. To be considered a stream, the object must be + either an instance of Minipass, or an EventEmitter that has either a + `pipe()` method, or both `write()` and `end()` methods. (Pretty much any + stream in node-land will return `true` for this.) + +## EXAMPLES + +Here are some examples of things you can do with Minipass streams. + +### simple "are you done yet" promise + +```js +mp.promise().then(() => { + // stream is finished +}, er => { + // stream emitted an error +}) +``` + +### collecting + +```js +mp.collect().then(all => { + // all is an array of all the data emitted + // encoding is supported in this case, so + // so the result will be a collection of strings if + // an encoding is specified, or buffers/objects if not. + // + // In an async function, you may do + // const data = await stream.collect() +}) +``` + +### collecting into a single blob + +This is a bit slower because it concatenates the data into one chunk for +you, but if you're going to do it yourself anyway, it's convenient this +way: + +```js +mp.concat().then(onebigchunk => { + // onebigchunk is a string if the stream + // had an encoding set, or a buffer otherwise. +}) +``` + +### iteration + +You can iterate over streams synchronously or asynchronously in +platforms that support it. + +Synchronous iteration will end when the currently available data is +consumed, even if the `end` event has not been reached. In string and +buffer mode, the data is concatenated, so unless multiple writes are +occurring in the same tick as the `read()`, sync iteration loops will +generally only have a single iteration. + +To consume chunks in this way exactly as they have been written, with +no flattening, create the stream with the `{ objectMode: true }` +option. + +```js +const mp = new Minipass({ objectMode: true }) +mp.write('a') +mp.write('b') +for (let letter of mp) { + console.log(letter) // a, b +} +mp.write('c') +mp.write('d') +for (let letter of mp) { + console.log(letter) // c, d +} +mp.write('e') +mp.end() +for (let letter of mp) { + console.log(letter) // e +} +for (let letter of mp) { + console.log(letter) // nothing +} +``` + +Asynchronous iteration will continue until the end event is reached, +consuming all of the data. + +```js +const mp = new Minipass({ encoding: 'utf8' }) + +// some source of some data +let i = 5 +const inter = setInterval(() => { + if (i --> 0) + mp.write(Buffer.from('foo\n', 'utf8')) + else { + mp.end() + clearInterval(inter) + } +}, 100) + +// consume the data with asynchronous iteration +async function consume () { + for await (let chunk of mp) { + console.log(chunk) + } + return 'ok' +} + +consume().then(res => console.log(res)) +// logs `foo\n` 5 times, and then `ok` +``` + +### subclass that `console.log()`s everything written into it + +```js +class Logger extends Minipass { + write (chunk, encoding, callback) { + console.log('WRITE', chunk, encoding) + return super.write(chunk, encoding, callback) + } + end (chunk, encoding, callback) { + console.log('END', chunk, encoding) + return super.end(chunk, encoding, callback) + } +} + +someSource.pipe(new Logger()).pipe(someDest) +``` + +### same thing, but using an inline anonymous class + +```js +// js classes are fun +someSource + .pipe(new (class extends Minipass { + emit (ev, ...data) { + // let's also log events, because debugging some weird thing + console.log('EMIT', ev) + return super.emit(ev, ...data) + } + write (chunk, encoding, callback) { + console.log('WRITE', chunk, encoding) + return super.write(chunk, encoding, callback) + } + end (chunk, encoding, callback) { + console.log('END', chunk, encoding) + return super.end(chunk, encoding, callback) + } + })) + .pipe(someDest) +``` + +### subclass that defers 'end' for some reason + +```js +class SlowEnd extends Minipass { + emit (ev, ...args) { + if (ev === 'end') { + console.log('going to end, hold on a sec') + setTimeout(() => { + console.log('ok, ready to end now') + super.emit('end', ...args) + }, 100) + } else { + return super.emit(ev, ...args) + } + } +} +``` + +### transform that creates newline-delimited JSON + +```js +class NDJSONEncode extends Minipass { + write (obj, cb) { + try { + // JSON.stringify can throw, emit an error on that + return super.write(JSON.stringify(obj) + '\n', 'utf8', cb) + } catch (er) { + this.emit('error', er) + } + } + end (obj, cb) { + if (typeof obj === 'function') { + cb = obj + obj = undefined + } + if (obj !== undefined) { + this.write(obj) + } + return super.end(cb) + } +} +``` + +### transform that parses newline-delimited JSON + +```js +class NDJSONDecode extends Minipass { + constructor (options) { + // always be in object mode, as far as Minipass is concerned + super({ objectMode: true }) + this._jsonBuffer = '' + } + write (chunk, encoding, cb) { + if (typeof chunk === 'string' && + typeof encoding === 'string' && + encoding !== 'utf8') { + chunk = Buffer.from(chunk, encoding).toString() + } else if (Buffer.isBuffer(chunk)) + chunk = chunk.toString() + } + if (typeof encoding === 'function') { + cb = encoding + } + const jsonData = (this._jsonBuffer + chunk).split('\n') + this._jsonBuffer = jsonData.pop() + for (let i = 0; i < jsonData.length; i++) { + let parsed + try { + super.write(parsed) + } catch (er) { + this.emit('error', er) + continue + } + } + if (cb) + cb() + } +} +``` diff --git a/node_modules/fs-minipass/node_modules/minipass/index.js b/node_modules/fs-minipass/node_modules/minipass/index.js new file mode 100644 index 0000000000000..c072352d448a9 --- /dev/null +++ b/node_modules/fs-minipass/node_modules/minipass/index.js @@ -0,0 +1,537 @@ +'use strict' +const EE = require('events') +const Yallist = require('yallist') +const SD = require('string_decoder').StringDecoder + +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +const DESTROYED = Symbol('destroyed') + +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = doIter && Symbol.asyncIterator + || Symbol('asyncIterator not implemented') +const ITERATOR = doIter && Symbol.iterator + || Symbol('iterator not implemented') + +// Buffer in node 4.x < 4.5.0 doesn't have working Buffer.from +// or Buffer.alloc, and Buffer in node 10 deprecated the ctor. +// .M, this is fine .\^/M.. +const B = Buffer.alloc ? Buffer + : /* istanbul ignore next */ require('safe-buffer').Buffer + +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => + ev === 'end' || + ev === 'finish' || + ev === 'prefinish' + +const isArrayBuffer = b => b instanceof ArrayBuffer || + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0 + +const isArrayBufferView = b => !B.isBuffer(b) && ArrayBuffer.isView(b) + +module.exports = class Minipass extends EE { + constructor (options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this.pipes = new Yallist() + this.buffer = new Yallist() + this[OBJECTMODE] = options && options.objectMode || false + if (this[OBJECTMODE]) + this[ENCODING] = null + else + this[ENCODING] = options && options.encoding || null + if (this[ENCODING] === 'buffer') + this[ENCODING] = null + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + } + + get bufferLength () { return this[BUFFERLENGTH] } + + get encoding () { return this[ENCODING] } + set encoding (enc) { + if (this[OBJECTMODE]) + throw new Error('cannot set encoding in objectMode') + + if (this[ENCODING] && enc !== this[ENCODING] && + (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH])) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this.buffer.length) + this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding (enc) { + this.encoding = enc + } + + get objectMode () { return this[OBJECTMODE] } + set objectMode (ॐ ) { this[OBJECTMODE] = this[OBJECTMODE] || !!ॐ } + + write (chunk, encoding, cb) { + if (this[EOF]) + throw new Error('write after end') + + if (this[DESTROYED]) { + this.emit('error', Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + )) + return true + } + + if (typeof encoding === 'function') + cb = encoding, encoding = 'utf8' + + if (!encoding) + encoding = 'utf8' + + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !B.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = B.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) + chunk = B.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } + + // this ensures at this point that the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!this.objectMode && !chunk.length) { + const ret = this.flowing + if (this[BUFFERLENGTH] !== 0) + this.emit('readable') + if (cb) + cb() + return ret + } + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && !this[OBJECTMODE] && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) { + chunk = B.from(chunk, encoding) + } + + if (B.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + try { + return this.flowing + ? (this.emit('data', chunk), this.flowing) + : (this[BUFFERPUSH](chunk), false) + } finally { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable') + if (cb) + cb() + } + } + + read (n) { + if (this[DESTROYED]) + return null + + try { + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) + return null + + if (this[OBJECTMODE]) + n = null + + if (this.buffer.length > 1 && !this[OBJECTMODE]) { + if (this.encoding) + this.buffer = new Yallist([ + Array.from(this.buffer).join('') + ]) + else + this.buffer = new Yallist([ + B.concat(Array.from(this.buffer), this[BUFFERLENGTH]) + ]) + } + + return this[READ](n || null, this.buffer.head.value) + } finally { + this[MAYBE_EMIT_END]() + } + } + + [READ] (n, chunk) { + if (n === chunk.length || n === null) + this[BUFFERSHIFT]() + else { + this.buffer.head.value = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this.buffer.length && !this[EOF]) + this.emit('drain') + + return chunk + } + + end (chunk, encoding, cb) { + if (typeof chunk === 'function') + cb = chunk, chunk = null + if (typeof encoding === 'function') + cb = encoding, encoding = 'utf8' + if (chunk) + this.write(chunk, encoding) + if (cb) + this.once('end', cb) + this[EOF] = true + this.writable = false + + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) + this[MAYBE_EMIT_END]() + return this + } + + // don't let the internal resume be overwritten + [RESUME] () { + if (this[DESTROYED]) + return + + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this.buffer.length) + this[FLUSH]() + else if (this[EOF]) + this[MAYBE_EMIT_END]() + else + this.emit('drain') + } + + resume () { + return this[RESUME]() + } + + pause () { + this[FLOWING] = false + this[PAUSED] = true + } + + get destroyed () { + return this[DESTROYED] + } + + get flowing () { + return this[FLOWING] + } + + get paused () { + return this[PAUSED] + } + + [BUFFERPUSH] (chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1 + else + this[BUFFERLENGTH] += chunk.length + return this.buffer.push(chunk) + } + + [BUFFERSHIFT] () { + if (this.buffer.length) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1 + else + this[BUFFERLENGTH] -= this.buffer.head.value.length + } + return this.buffer.shift() + } + + [FLUSH] () { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) + + if (!this.buffer.length && !this[EOF]) + this.emit('drain') + } + + [FLUSHCHUNK] (chunk) { + return chunk ? (this.emit('data', chunk), this.flowing) : false + } + + pipe (dest, opts) { + if (this[DESTROYED]) + return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === process.stdout || dest === process.stderr) + opts.end = false + else + opts.end = opts.end !== false + + const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() } + this.pipes.push(p) + + dest.on('drain', p.ondrain) + this[RESUME]() + // piping an ended stream ends immediately + if (ended && p.opts.end) + p.dest.end() + return dest + } + + addListener (ev, fn) { + return this.on(ev, fn) + } + + on (ev, fn) { + try { + return super.on(ev, fn) + } finally { + if (ev === 'data' && !this.pipes.length && !this.flowing) + this[RESUME]() + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) + } + } + } + + get emittedEnd () { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END] () { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this.buffer.length === 0 && + this[EOF]) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) + this.emit('close') + this[EMITTING_END] = false + } + } + + emit (ev, data) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + if (!data) + return + + if (this.pipes.length) + this.pipes.forEach(p => + p.dest.write(data) === false && this.pause()) + } else if (ev === 'end') { + // only actual end gets this treatment + if (this[EMITTED_END] === true) + return + + this[EMITTED_END] = true + this.readable = false + + if (this[DECODER]) { + data = this[DECODER].end() + if (data) { + this.pipes.forEach(p => p.dest.write(data)) + super.emit('data', data) + } + } + + this.pipes.forEach(p => { + p.dest.removeListener('drain', p.ondrain) + if (p.opts.end) + p.dest.end() + }) + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return + } + + // TODO: replace with a spread operator when Node v4 support drops + const args = new Array(arguments.length) + args[0] = ev + args[1] = data + if (arguments.length > 2) { + for (let i = 2; i < arguments.length; i++) { + args[i] = arguments[i] + } + } + + try { + return super.emit.apply(this, args) + } finally { + if (!isEndish(ev)) + this[MAYBE_EMIT_END]() + else + this.removeAllListeners(ev) + } + } + + // const all = await stream.collect() + collect () { + const buf = [] + buf.dataLength = 0 + this.on('data', c => { + buf.push(c) + buf.dataLength += c.length + }) + return this.promise().then(() => buf) + } + + // const data = await stream.concat() + concat () { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] ? buf.join('') : B.concat(buf, buf.dataLength)) + } + + // stream.promise().then(() => done, er => emitted error) + promise () { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('end', () => resolve()) + this.on('error', er => reject(er)) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR] () { + const next = () => { + const res = this.read() + if (res !== null) + return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) + return Promise.resolve({ done: true }) + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { next } + } + + // for (let chunk of stream) + [ITERATOR] () { + const next = () => { + const value = this.read() + const done = value === null + return { value, done } + } + return { next } + } + + destroy (er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er) + else + this.emit(DESTROYED) + return this + } + + this[DESTROYED] = true + + // throw away all buffered data, it's never coming out + this.buffer = new Yallist() + this[BUFFERLENGTH] = 0 + + if (typeof this.close === 'function' && !this[CLOSED]) + this.close() + + if (er) + this.emit('error', er) + else // if no error to emit, still reject pending promises + this.emit(DESTROYED) + + return this + } + + static isStream (s) { + return !!s && (s instanceof Minipass || s instanceof EE && ( + typeof s.pipe === 'function' || // readable + (typeof s.write === 'function' && typeof s.end === 'function') // writable + )) + } +} diff --git a/node_modules/fs-minipass/node_modules/minipass/package.json b/node_modules/fs-minipass/node_modules/minipass/package.json new file mode 100644 index 0000000000000..416e231c9c6a2 --- /dev/null +++ b/node_modules/fs-minipass/node_modules/minipass/package.json @@ -0,0 +1,70 @@ +{ + "_from": "minipass@^2.6.0", + "_id": "minipass@2.9.0", + "_inBundle": false, + "_integrity": "sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==", + "_location": "/fs-minipass/minipass", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "minipass@^2.6.0", + "name": "minipass", + "escapedName": "minipass", + "rawSpec": "^2.6.0", + "saveSpec": null, + "fetchSpec": "^2.6.0" + }, + "_requiredBy": [ + "/fs-minipass" + ], + "_resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz", + "_shasum": "e713762e7d3e32fed803115cf93e04bca9fcc9a6", + "_spec": "minipass@^2.6.0", + "_where": "/Users/mperrotte/npminc/cli/node_modules/fs-minipass", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/isaacs/minipass/issues" + }, + "bundleDependencies": false, + "dependencies": { + "safe-buffer": "^5.1.2", + "yallist": "^3.0.0" + }, + "deprecated": false, + "description": "minimal implementation of a PassThrough stream", + "devDependencies": { + "end-of-stream": "^1.4.0", + "tap": "^14.6.5", + "through2": "^2.0.3" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/isaacs/minipass#readme", + "keywords": [ + "passthrough", + "stream" + ], + "license": "ISC", + "main": "index.js", + "name": "minipass", + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minipass.git" + }, + "scripts": { + "postpublish": "git push origin --follow-tags", + "postversion": "npm publish", + "preversion": "npm test", + "test": "tap" + }, + "tap": { + "check-coverage": true + }, + "version": "2.9.0" +} diff --git a/node_modules/fs-minipass/package.json b/node_modules/fs-minipass/package.json index c2f925e8895a6..870d08f6fb963 100644 --- a/node_modules/fs-minipass/package.json +++ b/node_modules/fs-minipass/package.json @@ -1,10 +1,13 @@ { "_from": "fs-minipass@^1.2.5", - "_id": "fs-minipass@1.2.5", + "_id": "fs-minipass@1.2.7", "_inBundle": false, - "_integrity": "sha512-JhBl0skXjUPCFH7x6x61gQxrKyXsxB5gcgePLZCwfyCGGsTISMoIeObbrvVeP6Xmyaudw4TT43qV2Gz+iyd2oQ==", + "_integrity": "sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA==", "_location": "/fs-minipass", - "_phantomChildren": {}, + "_phantomChildren": { + "safe-buffer": "5.1.2", + "yallist": "3.0.3" + }, "_requested": { "type": "range", "registry": true, @@ -18,10 +21,10 @@ "_requiredBy": [ "/tar" ], - "_resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.5.tgz", - "_shasum": "06c277218454ec288df77ada54a03b8702aacb9d", + "_resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.7.tgz", + "_shasum": "ccff8570841e7fe4265693da88936c55aed7f7c7", "_spec": "fs-minipass@^1.2.5", - "_where": "/Users/rebecca/code/npm/node_modules/tar", + "_where": "/Users/mperrotte/npminc/cli/node_modules/tar", "author": { "name": "Isaac Z. Schlueter", "email": "i@izs.me", @@ -32,13 +35,13 @@ }, "bundleDependencies": false, "dependencies": { - "minipass": "^2.2.1" + "minipass": "^2.6.0" }, "deprecated": false, "description": "fs read and write streams based on minipass", "devDependencies": { "mutate-fs": "^2.0.1", - "tap": "^10.7.2" + "tap": "^14.6.4" }, "files": [ "index.js" @@ -53,10 +56,13 @@ "url": "git+https://github.com/npm/fs-minipass.git" }, "scripts": { - "postpublish": "git push origin --all; git push origin --tags", + "postpublish": "git push origin --follow-tags", "postversion": "npm publish", "preversion": "npm test", - "test": "tap test/*.js --100 -J" + "test": "tap" + }, + "tap": { + "check-coverage": true }, - "version": "1.2.5" + "version": "1.2.7" } diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/.travis.yml b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/.travis.yml new file mode 100644 index 0000000000000..40992555bf5cc --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/.travis.yml @@ -0,0 +1,55 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test +script: "npm run $TASK" +env: + global: + - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= + - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 0000000000000..f478d58dca85b --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/GOVERNANCE.md b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 0000000000000..16ffb93f24bec --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/LICENSE b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000000000..2873b3b2e5950 --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/README.md b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/README.md new file mode 100644 index 0000000000000..23fe3f3e3009a --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/README.md @@ -0,0 +1,58 @@ +# readable-stream + +***Node-core v8.11.1 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) + +```bash +npm install --save readable-stream +``` + +***Node-core streams for userland*** + +This package is a mirror of the Streams2 and Streams3 implementations in +Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.11.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> + - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> + - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D +* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 0000000000000..83275f192e407 --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,60 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section + + diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/duplex-browser.js b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/duplex-browser.js new file mode 100644 index 0000000000000..f8b2db83dbe73 --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/duplex-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_duplex.js'); diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/duplex.js b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/duplex.js new file mode 100644 index 0000000000000..46924cbfdf538 --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require('./readable').Duplex diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000000000..a1ca813e5acbd --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,131 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); + + pna.nextTick(cb, err); +}; \ No newline at end of file diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000000000..a9c835884828d --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,47 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000000000..bf34ac65e1108 --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1019 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Readable; + +/**/ +var isArray = require('isarray'); +/**/ + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = require('./internal/streams/BufferList'); +var destroyImpl = require('./internal/streams/destroy'); +var StringDecoder; + +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } + } + + return needMoreData(state); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, unpipeInfo); + }return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this, unpipeInfo); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000000000..5d1f8b876d98c --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,214 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } + + ts.writechunk = null; + ts.writecb = null; + + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + + cb(er); + + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; + + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); + + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000000000..b3f4e85a2f6e3 --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,687 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +var destroyImpl = require('./internal/streams/destroy'); + +util.inherits(Writable, Stream); + +function nop() {} + +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = corkReq; + } else { + state.corkedRequestsFree = corkReq; + } +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); + +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; \ No newline at end of file diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/internal/streams/BufferList.js new file mode 100644 index 0000000000000..aefc68bd90b9c --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/internal/streams/BufferList.js @@ -0,0 +1,79 @@ +'use strict'; + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var Buffer = require('safe-buffer').Buffer; +var util = require('util'); + +function copyBuffer(src, target, offset) { + src.copy(target, offset); +} + +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; + + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; + + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; + + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; + + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; + + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + if (this.length === 1) return this.head.data; + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }; + + return BufferList; +}(); + +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; +} \ No newline at end of file diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 0000000000000..5a0a0d88cec6f --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,74 @@ +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { + pna.nextTick(emitErrorNT, this, err); + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + pna.nextTick(emitErrorNT, _this, err); + if (_this._writableState) { + _this._writableState.errorEmitted = true; + } + } else if (cb) { + cb(err); + } + }); + + return this; +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy +}; \ No newline at end of file diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 0000000000000..9332a3fdae706 --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 0000000000000..ce2ad5b6ee57f --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/package.json b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/package.json new file mode 100644 index 0000000000000..6ef1cd32a9de4 --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/package.json @@ -0,0 +1,81 @@ +{ + "_from": "readable-stream@1 || 2", + "_id": "readable-stream@2.3.6", + "_inBundle": false, + "_integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "_location": "/fs-write-stream-atomic/readable-stream", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "readable-stream@1 || 2", + "name": "readable-stream", + "escapedName": "readable-stream", + "rawSpec": "1 || 2", + "saveSpec": null, + "fetchSpec": "1 || 2" + }, + "_requiredBy": [ + "/fs-write-stream-atomic" + ], + "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "_shasum": "b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf", + "_spec": "readable-stream@1 || 2", + "_where": "/Users/aeschright/code/cli/node_modules/fs-write-stream-atomic", + "browser": { + "util": false, + "./readable.js": "./readable-browser.js", + "./writable.js": "./writable-browser.js", + "./duplex.js": "./duplex-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "bugs": { + "url": "https://github.com/nodejs/readable-stream/issues" + }, + "bundleDependencies": false, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "deprecated": false, + "description": "Streams3, a user-land copy of the stream library from Node.js", + "devDependencies": { + "assert": "^1.4.0", + "babel-polyfill": "^6.9.1", + "buffer": "^4.9.0", + "lolex": "^2.3.2", + "nyc": "^6.4.0", + "tap": "^0.7.0", + "tape": "^4.8.0" + }, + "homepage": "https://github.com/nodejs/readable-stream#readme", + "keywords": [ + "readable", + "stream", + "pipe" + ], + "license": "MIT", + "main": "readable.js", + "name": "readable-stream", + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js" + }, + "version": "2.3.6" +} diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/passthrough.js b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/passthrough.js new file mode 100644 index 0000000000000..ffd791d7ff275 --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require('./readable').PassThrough diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/readable-browser.js b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/readable-browser.js new file mode 100644 index 0000000000000..e50372592ee6c --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,7 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/readable.js b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/readable.js new file mode 100644 index 0000000000000..ec89ec5330649 --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/readable.js @@ -0,0 +1,19 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); +} diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/transform.js b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/transform.js new file mode 100644 index 0000000000000..b1baba26da03d --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require('./readable').Transform diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/writable-browser.js b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/writable-browser.js new file mode 100644 index 0000000000000..ebdde6a85dcb1 --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/writable-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_writable.js'); diff --git a/node_modules/fs-write-stream-atomic/node_modules/readable-stream/writable.js b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/writable.js new file mode 100644 index 0000000000000..3211a6f80d1ab --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/readable-stream/writable.js @@ -0,0 +1,8 @@ +var Stream = require("stream") +var Writable = require("./lib/_stream_writable.js") + +if (process.env.READABLE_STREAM === 'disable') { + module.exports = Stream && Stream.Writable || Writable +} else { + module.exports = Writable +} diff --git a/node_modules/fs-write-stream-atomic/node_modules/string_decoder/.travis.yml b/node_modules/fs-write-stream-atomic/node_modules/string_decoder/.travis.yml new file mode 100644 index 0000000000000..3347a72546505 --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/string_decoder/.travis.yml @@ -0,0 +1,50 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test diff --git a/node_modules/fs-write-stream-atomic/node_modules/string_decoder/LICENSE b/node_modules/fs-write-stream-atomic/node_modules/string_decoder/LICENSE new file mode 100644 index 0000000000000..778edb20730ef --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/node_modules/fs-write-stream-atomic/node_modules/string_decoder/README.md b/node_modules/fs-write-stream-atomic/node_modules/string_decoder/README.md new file mode 100644 index 0000000000000..5fd58315ed588 --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/node_modules/fs-write-stream-atomic/node_modules/string_decoder/lib/string_decoder.js b/node_modules/fs-write-stream-atomic/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 0000000000000..2e89e63f7933e --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/node_modules/fs-write-stream-atomic/node_modules/string_decoder/package.json b/node_modules/fs-write-stream-atomic/node_modules/string_decoder/package.json new file mode 100644 index 0000000000000..65d08e2cac18f --- /dev/null +++ b/node_modules/fs-write-stream-atomic/node_modules/string_decoder/package.json @@ -0,0 +1,59 @@ +{ + "_from": "string_decoder@~1.1.1", + "_id": "string_decoder@1.1.1", + "_inBundle": false, + "_integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "_location": "/fs-write-stream-atomic/string_decoder", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "string_decoder@~1.1.1", + "name": "string_decoder", + "escapedName": "string_decoder", + "rawSpec": "~1.1.1", + "saveSpec": null, + "fetchSpec": "~1.1.1" + }, + "_requiredBy": [ + "/fs-write-stream-atomic/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "_shasum": "9cf1611ba62685d7030ae9e4ba34149c3af03fc8", + "_spec": "string_decoder@~1.1.1", + "_where": "/Users/aeschright/code/cli/node_modules/fs-write-stream-atomic/node_modules/readable-stream", + "bugs": { + "url": "https://github.com/nodejs/string_decoder/issues" + }, + "bundleDependencies": false, + "dependencies": { + "safe-buffer": "~5.1.0" + }, + "deprecated": false, + "description": "The string_decoder module from Node core", + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT", + "main": "lib/string_decoder.js", + "name": "string_decoder", + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "test": "tap test/parallel/*.js && node test/verify-dependencies" + }, + "version": "1.1.1" +} diff --git a/node_modules/fstream/.npmignore b/node_modules/fstream/.npmignore deleted file mode 100644 index 494272a81afb6..0000000000000 --- a/node_modules/fstream/.npmignore +++ /dev/null @@ -1,5 +0,0 @@ -.*.swp -node_modules/ -examples/deep-copy/ -examples/path/ -examples/filter-copy/ diff --git a/node_modules/fstream/.travis.yml b/node_modules/fstream/.travis.yml deleted file mode 100644 index 9f5972ab5aa1c..0000000000000 --- a/node_modules/fstream/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -language: node_js -node_js: - - "6" - - "4" - - "0.10" - - "0.12" -before_install: - - "npm config set spin false" - - "npm install -g npm/npm" diff --git a/node_modules/fstream/README.md b/node_modules/fstream/README.md deleted file mode 100644 index 9d8cb77e5c30b..0000000000000 --- a/node_modules/fstream/README.md +++ /dev/null @@ -1,76 +0,0 @@ -Like FS streams, but with stat on them, and supporting directories and -symbolic links, as well as normal files. Also, you can use this to set -the stats on a file, even if you don't change its contents, or to create -a symlink, etc. - -So, for example, you can "write" a directory, and it'll call `mkdir`. You -can specify a uid and gid, and it'll call `chown`. You can specify a -`mtime` and `atime`, and it'll call `utimes`. You can call it a symlink -and provide a `linkpath` and it'll call `symlink`. - -Note that it won't automatically resolve symbolic links. So, if you -call `fstream.Reader('/some/symlink')` then you'll get an object -that stats and then ends immediately (since it has no data). To follow -symbolic links, do this: `fstream.Reader({path:'/some/symlink', follow: -true })`. - -There are various checks to make sure that the bytes emitted are the -same as the intended size, if the size is set. - -## Examples - -```javascript -fstream - .Writer({ path: "path/to/file" - , mode: 0755 - , size: 6 - }) - .write("hello\n") - .end() -``` - -This will create the directories if they're missing, and then write -`hello\n` into the file, chmod it to 0755, and assert that 6 bytes have -been written when it's done. - -```javascript -fstream - .Writer({ path: "path/to/file" - , mode: 0755 - , size: 6 - , flags: "a" - }) - .write("hello\n") - .end() -``` - -You can pass flags in, if you want to append to a file. - -```javascript -fstream - .Writer({ path: "path/to/symlink" - , linkpath: "./file" - , SymbolicLink: true - , mode: "0755" // octal strings supported - }) - .end() -``` - -If isSymbolicLink is a function, it'll be called, and if it returns -true, then it'll treat it as a symlink. If it's not a function, then -any truish value will make a symlink, or you can set `type: -'SymbolicLink'`, which does the same thing. - -Note that the linkpath is relative to the symbolic link location, not -the parent dir or cwd. - -```javascript -fstream - .Reader("path/to/dir") - .pipe(fstream.Writer("path/to/other/dir")) -``` - -This will do like `cp -Rp path/to/dir path/to/other/dir`. If the other -dir exists and isn't a directory, then it'll emit an error. It'll also -set the uid, gid, mode, etc. to be identical. In this way, it's more -like `rsync -a` than simply a copy. diff --git a/node_modules/fstream/examples/filter-pipe.js b/node_modules/fstream/examples/filter-pipe.js deleted file mode 100644 index 83dadef8a6f39..0000000000000 --- a/node_modules/fstream/examples/filter-pipe.js +++ /dev/null @@ -1,134 +0,0 @@ -var fstream = require('../fstream.js') -var path = require('path') - -var r = fstream.Reader({ - path: path.dirname(__dirname), - filter: function () { - return !this.basename.match(/^\./) && - !this.basename.match(/^node_modules$/) && - !this.basename.match(/^deep-copy$/) && - !this.basename.match(/^filter-copy$/) - } -}) - -// this writer will only write directories -var w = fstream.Writer({ - path: path.resolve(__dirname, 'filter-copy'), - type: 'Directory', - filter: function () { - return this.type === 'Directory' - } -}) - -var indent = '' - -r.on('entry', appears) -r.on('ready', function () { - console.error('ready to begin!', r.path) -}) - -function appears (entry) { - console.error(indent + 'a %s appears!', entry.type, entry.basename, typeof entry.basename) - if (foggy) { - console.error('FOGGY!') - var p = entry - do { - console.error(p.depth, p.path, p._paused) - p = p.parent - } while (p) - - throw new Error('\u001b[mshould not have entries while foggy') - } - indent += '\t' - entry.on('data', missile(entry)) - entry.on('end', runaway(entry)) - entry.on('entry', appears) -} - -var foggy -function missile (entry) { - function liftFog (who) { - if (!foggy) return - if (who) { - console.error('%s breaks the spell!', who && who.path) - } else { - console.error('the spell expires!') - } - console.error('\u001b[mthe fog lifts!\n') - clearTimeout(foggy) - foggy = null - if (entry._paused) entry.resume() - } - - if (entry.type === 'Directory') { - var ended = false - entry.once('end', function () { ended = true }) - return function (c) { - // throw in some pathological pause()/resume() behavior - // just for extra fun. - process.nextTick(function () { - if (!foggy && !ended) { // && Math.random() < 0.3) { - console.error(indent + '%s casts a spell', entry.basename) - console.error('\na slowing fog comes over the battlefield...\n\u001b[32m') - entry.pause() - entry.once('resume', liftFog) - foggy = setTimeout(liftFog, 1000) - } - }) - } - } - - return function (c) { - var e = Math.random() < 0.5 - console.error(indent + '%s %s for %d damage!', - entry.basename, - e ? 'is struck' : 'fires a chunk', - c.length) - } -} - -function runaway (entry) { - return function () { - var e = Math.random() < 0.5 - console.error(indent + '%s %s', - entry.basename, - e ? 'turns to flee' : 'is vanquished!') - indent = indent.slice(0, -1) - } -} - -w.on('entry', attacks) -// w.on('ready', function () { attacks(w) }) -function attacks (entry) { - console.error(indent + '%s %s!', entry.basename, - entry.type === 'Directory' ? 'calls for backup' : 'attacks') - entry.on('entry', attacks) -} - -var ended = false -var i = 1 -r.on('end', function () { - if (foggy) clearTimeout(foggy) - console.error("\u001b[mIT'S OVER!!") - console.error('A WINNAR IS YOU!') - - console.log('ok ' + (i++) + ' A WINNAR IS YOU') - ended = true - // now go through and verify that everything in there is a dir. - var p = path.resolve(__dirname, 'filter-copy') - var checker = fstream.Reader({ path: p }) - checker.checker = true - checker.on('child', function (e) { - var ok = e.type === 'Directory' - console.log((ok ? '' : 'not ') + 'ok ' + (i++) + - ' should be a dir: ' + - e.path.substr(checker.path.length + 1)) - }) -}) - -process.on('exit', function () { - console.log((ended ? '' : 'not ') + 'ok ' + (i) + ' ended') - console.log('1..' + i) -}) - -r.pipe(w) diff --git a/node_modules/fstream/examples/pipe.js b/node_modules/fstream/examples/pipe.js deleted file mode 100644 index 3de42ef32bfd7..0000000000000 --- a/node_modules/fstream/examples/pipe.js +++ /dev/null @@ -1,118 +0,0 @@ -var fstream = require('../fstream.js') -var path = require('path') - -var r = fstream.Reader({ - path: path.dirname(__dirname), - filter: function () { - return !this.basename.match(/^\./) && - !this.basename.match(/^node_modules$/) && - !this.basename.match(/^deep-copy$/) - } -}) - -var w = fstream.Writer({ - path: path.resolve(__dirname, 'deep-copy'), - type: 'Directory' -}) - -var indent = '' - -r.on('entry', appears) -r.on('ready', function () { - console.error('ready to begin!', r.path) -}) - -function appears (entry) { - console.error(indent + 'a %s appears!', entry.type, entry.basename, typeof entry.basename, entry) - if (foggy) { - console.error('FOGGY!') - var p = entry - do { - console.error(p.depth, p.path, p._paused) - p = p.parent - } while (p) - - throw new Error('\u001b[mshould not have entries while foggy') - } - indent += '\t' - entry.on('data', missile(entry)) - entry.on('end', runaway(entry)) - entry.on('entry', appears) -} - -var foggy -function missile (entry) { - function liftFog (who) { - if (!foggy) return - if (who) { - console.error('%s breaks the spell!', who && who.path) - } else { - console.error('the spell expires!') - } - console.error('\u001b[mthe fog lifts!\n') - clearTimeout(foggy) - foggy = null - if (entry._paused) entry.resume() - } - - if (entry.type === 'Directory') { - var ended = false - entry.once('end', function () { ended = true }) - return function (c) { - // throw in some pathological pause()/resume() behavior - // just for extra fun. - process.nextTick(function () { - if (!foggy && !ended) { // && Math.random() < 0.3) { - console.error(indent + '%s casts a spell', entry.basename) - console.error('\na slowing fog comes over the battlefield...\n\u001b[32m') - entry.pause() - entry.once('resume', liftFog) - foggy = setTimeout(liftFog, 10) - } - }) - } - } - - return function (c) { - var e = Math.random() < 0.5 - console.error(indent + '%s %s for %d damage!', - entry.basename, - e ? 'is struck' : 'fires a chunk', - c.length) - } -} - -function runaway (entry) { - return function () { - var e = Math.random() < 0.5 - console.error(indent + '%s %s', - entry.basename, - e ? 'turns to flee' : 'is vanquished!') - indent = indent.slice(0, -1) - } -} - -w.on('entry', attacks) -// w.on('ready', function () { attacks(w) }) -function attacks (entry) { - console.error(indent + '%s %s!', entry.basename, - entry.type === 'Directory' ? 'calls for backup' : 'attacks') - entry.on('entry', attacks) -} - -var ended = false -r.on('end', function () { - if (foggy) clearTimeout(foggy) - console.error("\u001b[mIT'S OVER!!") - console.error('A WINNAR IS YOU!') - - console.log('ok 1 A WINNAR IS YOU') - ended = true -}) - -process.on('exit', function () { - console.log((ended ? '' : 'not ') + 'ok 2 ended') - console.log('1..2') -}) - -r.pipe(w) diff --git a/node_modules/fstream/examples/reader.js b/node_modules/fstream/examples/reader.js deleted file mode 100644 index 19affbe7e6e23..0000000000000 --- a/node_modules/fstream/examples/reader.js +++ /dev/null @@ -1,68 +0,0 @@ -var fstream = require('../fstream.js') -var tap = require('tap') -var fs = require('fs') -var path = require('path') -var dir = path.dirname(__dirname) - -tap.test('reader test', function (t) { - var children = -1 - var gotReady = false - var ended = false - - var r = fstream.Reader({ - path: dir, - filter: function () { - // return this.parent === r - return this.parent === r || this === r - } - }) - - r.on('ready', function () { - gotReady = true - children = fs.readdirSync(dir).length - console.error('Setting expected children to ' + children) - t.equal(r.type, 'Directory', 'should be a directory') - }) - - r.on('entry', function (entry) { - children-- - if (!gotReady) { - t.fail('children before ready!') - } - t.equal(entry.dirname, r.path, 'basename is parent dir') - }) - - r.on('error', function (er) { - t.fail(er) - t.end() - process.exit(1) - }) - - r.on('end', function () { - t.equal(children, 0, 'should have seen all children') - ended = true - }) - - var closed = false - r.on('close', function () { - t.ok(ended, 'saw end before close') - t.notOk(closed, 'close should only happen once') - closed = true - t.end() - }) -}) - -tap.test('reader error test', function (t) { - // assumes non-root on a *nix system - var r = fstream.Reader({ path: '/etc/shadow' }) - - r.once('error', function (er) { - t.ok(true) - t.end() - }) - - r.on('end', function () { - t.fail('reader ended without error') - t.end() - }) -}) diff --git a/node_modules/fstream/examples/symlink-write.js b/node_modules/fstream/examples/symlink-write.js deleted file mode 100644 index 19e81eea9fe80..0000000000000 --- a/node_modules/fstream/examples/symlink-write.js +++ /dev/null @@ -1,27 +0,0 @@ -var fstream = require('../fstream.js') -var notOpen = false -process.chdir(__dirname) - -fstream - .Writer({ - path: 'path/to/symlink', - linkpath: './file', - isSymbolicLink: true, - mode: '0755' // octal strings supported - }) - .on('close', function () { - notOpen = true - var fs = require('fs') - var s = fs.lstatSync('path/to/symlink') - var isSym = s.isSymbolicLink() - console.log((isSym ? '' : 'not ') + 'ok 1 should be symlink') - var t = fs.readlinkSync('path/to/symlink') - var isTarget = t === './file' - console.log((isTarget ? '' : 'not ') + 'ok 2 should link to ./file') - }) - .end() - -process.on('exit', function () { - console.log((notOpen ? '' : 'not ') + 'ok 3 should be closed') - console.log('1..3') -}) diff --git a/node_modules/fstream/fstream.js b/node_modules/fstream/fstream.js deleted file mode 100644 index c0eb3bea78803..0000000000000 --- a/node_modules/fstream/fstream.js +++ /dev/null @@ -1,35 +0,0 @@ -exports.Abstract = require('./lib/abstract.js') -exports.Reader = require('./lib/reader.js') -exports.Writer = require('./lib/writer.js') - -exports.File = { - Reader: require('./lib/file-reader.js'), - Writer: require('./lib/file-writer.js') -} - -exports.Dir = { - Reader: require('./lib/dir-reader.js'), - Writer: require('./lib/dir-writer.js') -} - -exports.Link = { - Reader: require('./lib/link-reader.js'), - Writer: require('./lib/link-writer.js') -} - -exports.Proxy = { - Reader: require('./lib/proxy-reader.js'), - Writer: require('./lib/proxy-writer.js') -} - -exports.Reader.Dir = exports.DirReader = exports.Dir.Reader -exports.Reader.File = exports.FileReader = exports.File.Reader -exports.Reader.Link = exports.LinkReader = exports.Link.Reader -exports.Reader.Proxy = exports.ProxyReader = exports.Proxy.Reader - -exports.Writer.Dir = exports.DirWriter = exports.Dir.Writer -exports.Writer.File = exports.FileWriter = exports.File.Writer -exports.Writer.Link = exports.LinkWriter = exports.Link.Writer -exports.Writer.Proxy = exports.ProxyWriter = exports.Proxy.Writer - -exports.collect = require('./lib/collect.js') diff --git a/node_modules/fstream/lib/abstract.js b/node_modules/fstream/lib/abstract.js deleted file mode 100644 index 97c120e1d5277..0000000000000 --- a/node_modules/fstream/lib/abstract.js +++ /dev/null @@ -1,85 +0,0 @@ -// the parent class for all fstreams. - -module.exports = Abstract - -var Stream = require('stream').Stream -var inherits = require('inherits') - -function Abstract () { - Stream.call(this) -} - -inherits(Abstract, Stream) - -Abstract.prototype.on = function (ev, fn) { - if (ev === 'ready' && this.ready) { - process.nextTick(fn.bind(this)) - } else { - Stream.prototype.on.call(this, ev, fn) - } - return this -} - -Abstract.prototype.abort = function () { - this._aborted = true - this.emit('abort') -} - -Abstract.prototype.destroy = function () {} - -Abstract.prototype.warn = function (msg, code) { - var self = this - var er = decorate(msg, code, self) - if (!self.listeners('warn')) { - console.error('%s %s\n' + - 'path = %s\n' + - 'syscall = %s\n' + - 'fstream_type = %s\n' + - 'fstream_path = %s\n' + - 'fstream_unc_path = %s\n' + - 'fstream_class = %s\n' + - 'fstream_stack =\n%s\n', - code || 'UNKNOWN', - er.stack, - er.path, - er.syscall, - er.fstream_type, - er.fstream_path, - er.fstream_unc_path, - er.fstream_class, - er.fstream_stack.join('\n')) - } else { - self.emit('warn', er) - } -} - -Abstract.prototype.info = function (msg, code) { - this.emit('info', msg, code) -} - -Abstract.prototype.error = function (msg, code, th) { - var er = decorate(msg, code, this) - if (th) throw er - else this.emit('error', er) -} - -function decorate (er, code, self) { - if (!(er instanceof Error)) er = new Error(er) - er.code = er.code || code - er.path = er.path || self.path - er.fstream_type = er.fstream_type || self.type - er.fstream_path = er.fstream_path || self.path - if (self._path !== self.path) { - er.fstream_unc_path = er.fstream_unc_path || self._path - } - if (self.linkpath) { - er.fstream_linkpath = er.fstream_linkpath || self.linkpath - } - er.fstream_class = er.fstream_class || self.constructor.name - er.fstream_stack = er.fstream_stack || - new Error().stack.split(/\n/).slice(3).map(function (s) { - return s.replace(/^ {4}at /, '') - }) - - return er -} diff --git a/node_modules/fstream/lib/collect.js b/node_modules/fstream/lib/collect.js deleted file mode 100644 index e5d4f35833476..0000000000000 --- a/node_modules/fstream/lib/collect.js +++ /dev/null @@ -1,70 +0,0 @@ -module.exports = collect - -function collect (stream) { - if (stream._collected) return - - if (stream._paused) return stream.on('resume', collect.bind(null, stream)) - - stream._collected = true - stream.pause() - - stream.on('data', save) - stream.on('end', save) - var buf = [] - function save (b) { - if (typeof b === 'string') b = new Buffer(b) - if (Buffer.isBuffer(b) && !b.length) return - buf.push(b) - } - - stream.on('entry', saveEntry) - var entryBuffer = [] - function saveEntry (e) { - collect(e) - entryBuffer.push(e) - } - - stream.on('proxy', proxyPause) - function proxyPause (p) { - p.pause() - } - - // replace the pipe method with a new version that will - // unlock the buffered stuff. if you just call .pipe() - // without a destination, then it'll re-play the events. - stream.pipe = (function (orig) { - return function (dest) { - // console.error(' === open the pipes', dest && dest.path) - - // let the entries flow through one at a time. - // Once they're all done, then we can resume completely. - var e = 0 - ;(function unblockEntry () { - var entry = entryBuffer[e++] - // console.error(" ==== unblock entry", entry && entry.path) - if (!entry) return resume() - entry.on('end', unblockEntry) - if (dest) dest.add(entry) - else stream.emit('entry', entry) - })() - - function resume () { - stream.removeListener('entry', saveEntry) - stream.removeListener('data', save) - stream.removeListener('end', save) - - stream.pipe = orig - if (dest) stream.pipe(dest) - - buf.forEach(function (b) { - if (b) stream.emit('data', b) - else stream.emit('end') - }) - - stream.resume() - } - - return dest - } - })(stream.pipe) -} diff --git a/node_modules/fstream/lib/dir-reader.js b/node_modules/fstream/lib/dir-reader.js deleted file mode 100644 index 820cdc85a8e9c..0000000000000 --- a/node_modules/fstream/lib/dir-reader.js +++ /dev/null @@ -1,252 +0,0 @@ -// A thing that emits "entry" events with Reader objects -// Pausing it causes it to stop emitting entry events, and also -// pauses the current entry if there is one. - -module.exports = DirReader - -var fs = require('graceful-fs') -var inherits = require('inherits') -var path = require('path') -var Reader = require('./reader.js') -var assert = require('assert').ok - -inherits(DirReader, Reader) - -function DirReader (props) { - var self = this - if (!(self instanceof DirReader)) { - throw new Error('DirReader must be called as constructor.') - } - - // should already be established as a Directory type - if (props.type !== 'Directory' || !props.Directory) { - throw new Error('Non-directory type ' + props.type) - } - - self.entries = null - self._index = -1 - self._paused = false - self._length = -1 - - if (props.sort) { - this.sort = props.sort - } - - Reader.call(this, props) -} - -DirReader.prototype._getEntries = function () { - var self = this - - // race condition. might pause() before calling _getEntries, - // and then resume, and try to get them a second time. - if (self._gotEntries) return - self._gotEntries = true - - fs.readdir(self._path, function (er, entries) { - if (er) return self.error(er) - - self.entries = entries - - self.emit('entries', entries) - if (self._paused) self.once('resume', processEntries) - else processEntries() - - function processEntries () { - self._length = self.entries.length - if (typeof self.sort === 'function') { - self.entries = self.entries.sort(self.sort.bind(self)) - } - self._read() - } - }) -} - -// start walking the dir, and emit an "entry" event for each one. -DirReader.prototype._read = function () { - var self = this - - if (!self.entries) return self._getEntries() - - if (self._paused || self._currentEntry || self._aborted) { - // console.error('DR paused=%j, current=%j, aborted=%j', self._paused, !!self._currentEntry, self._aborted) - return - } - - self._index++ - if (self._index >= self.entries.length) { - if (!self._ended) { - self._ended = true - self.emit('end') - self.emit('close') - } - return - } - - // ok, handle this one, then. - - // save creating a proxy, by stat'ing the thing now. - var p = path.resolve(self._path, self.entries[self._index]) - assert(p !== self._path) - assert(self.entries[self._index]) - - // set this to prevent trying to _read() again in the stat time. - self._currentEntry = p - fs[ self.props.follow ? 'stat' : 'lstat' ](p, function (er, stat) { - if (er) return self.error(er) - - var who = self._proxy || self - - stat.path = p - stat.basename = path.basename(p) - stat.dirname = path.dirname(p) - var childProps = self.getChildProps.call(who, stat) - childProps.path = p - childProps.basename = path.basename(p) - childProps.dirname = path.dirname(p) - - var entry = Reader(childProps, stat) - - // console.error("DR Entry", p, stat.size) - - self._currentEntry = entry - - // "entry" events are for direct entries in a specific dir. - // "child" events are for any and all children at all levels. - // This nomenclature is not completely final. - - entry.on('pause', function (who) { - if (!self._paused && !entry._disowned) { - self.pause(who) - } - }) - - entry.on('resume', function (who) { - if (self._paused && !entry._disowned) { - self.resume(who) - } - }) - - entry.on('stat', function (props) { - self.emit('_entryStat', entry, props) - if (entry._aborted) return - if (entry._paused) { - entry.once('resume', function () { - self.emit('entryStat', entry, props) - }) - } else self.emit('entryStat', entry, props) - }) - - entry.on('ready', function EMITCHILD () { - // console.error("DR emit child", entry._path) - if (self._paused) { - // console.error(" DR emit child - try again later") - // pause the child, and emit the "entry" event once we drain. - // console.error("DR pausing child entry") - entry.pause(self) - return self.once('resume', EMITCHILD) - } - - // skip over sockets. they can't be piped around properly, - // so there's really no sense even acknowledging them. - // if someone really wants to see them, they can listen to - // the "socket" events. - if (entry.type === 'Socket') { - self.emit('socket', entry) - } else { - self.emitEntry(entry) - } - }) - - var ended = false - entry.on('close', onend) - entry.on('disown', onend) - function onend () { - if (ended) return - ended = true - self.emit('childEnd', entry) - self.emit('entryEnd', entry) - self._currentEntry = null - if (!self._paused) { - self._read() - } - } - - // XXX Remove this. Works in node as of 0.6.2 or so. - // Long filenames should not break stuff. - entry.on('error', function (er) { - if (entry._swallowErrors) { - self.warn(er) - entry.emit('end') - entry.emit('close') - } else { - self.emit('error', er) - } - }) - - // proxy up some events. - ;[ - 'child', - 'childEnd', - 'warn' - ].forEach(function (ev) { - entry.on(ev, self.emit.bind(self, ev)) - }) - }) -} - -DirReader.prototype.disown = function (entry) { - entry.emit('beforeDisown') - entry._disowned = true - entry.parent = entry.root = null - if (entry === this._currentEntry) { - this._currentEntry = null - } - entry.emit('disown') -} - -DirReader.prototype.getChildProps = function () { - return { - depth: this.depth + 1, - root: this.root || this, - parent: this, - follow: this.follow, - filter: this.filter, - sort: this.props.sort, - hardlinks: this.props.hardlinks - } -} - -DirReader.prototype.pause = function (who) { - var self = this - if (self._paused) return - who = who || self - self._paused = true - if (self._currentEntry && self._currentEntry.pause) { - self._currentEntry.pause(who) - } - self.emit('pause', who) -} - -DirReader.prototype.resume = function (who) { - var self = this - if (!self._paused) return - who = who || self - - self._paused = false - // console.error('DR Emit Resume', self._path) - self.emit('resume', who) - if (self._paused) { - // console.error('DR Re-paused', self._path) - return - } - - if (self._currentEntry) { - if (self._currentEntry.resume) self._currentEntry.resume(who) - } else self._read() -} - -DirReader.prototype.emitEntry = function (entry) { - this.emit('entry', entry) - this.emit('child', entry) -} diff --git a/node_modules/fstream/lib/dir-writer.js b/node_modules/fstream/lib/dir-writer.js deleted file mode 100644 index ec50dca900dcf..0000000000000 --- a/node_modules/fstream/lib/dir-writer.js +++ /dev/null @@ -1,174 +0,0 @@ -// It is expected that, when .add() returns false, the consumer -// of the DirWriter will pause until a "drain" event occurs. Note -// that this is *almost always going to be the case*, unless the -// thing being written is some sort of unsupported type, and thus -// skipped over. - -module.exports = DirWriter - -var Writer = require('./writer.js') -var inherits = require('inherits') -var mkdir = require('mkdirp') -var path = require('path') -var collect = require('./collect.js') - -inherits(DirWriter, Writer) - -function DirWriter (props) { - var self = this - if (!(self instanceof DirWriter)) { - self.error('DirWriter must be called as constructor.', null, true) - } - - // should already be established as a Directory type - if (props.type !== 'Directory' || !props.Directory) { - self.error('Non-directory type ' + props.type + ' ' + - JSON.stringify(props), null, true) - } - - Writer.call(this, props) -} - -DirWriter.prototype._create = function () { - var self = this - mkdir(self._path, Writer.dirmode, function (er) { - if (er) return self.error(er) - // ready to start getting entries! - self.ready = true - self.emit('ready') - self._process() - }) -} - -// a DirWriter has an add(entry) method, but its .write() doesn't -// do anything. Why a no-op rather than a throw? Because this -// leaves open the door for writing directory metadata for -// gnu/solaris style dumpdirs. -DirWriter.prototype.write = function () { - return true -} - -DirWriter.prototype.end = function () { - this._ended = true - this._process() -} - -DirWriter.prototype.add = function (entry) { - var self = this - - // console.error('\tadd', entry._path, '->', self._path) - collect(entry) - if (!self.ready || self._currentEntry) { - self._buffer.push(entry) - return false - } - - // create a new writer, and pipe the incoming entry into it. - if (self._ended) { - return self.error('add after end') - } - - self._buffer.push(entry) - self._process() - - return this._buffer.length === 0 -} - -DirWriter.prototype._process = function () { - var self = this - - // console.error('DW Process p=%j', self._processing, self.basename) - - if (self._processing) return - - var entry = self._buffer.shift() - if (!entry) { - // console.error("DW Drain") - self.emit('drain') - if (self._ended) self._finish() - return - } - - self._processing = true - // console.error("DW Entry", entry._path) - - self.emit('entry', entry) - - // ok, add this entry - // - // don't allow recursive copying - var p = entry - var pp - do { - pp = p._path || p.path - if (pp === self.root._path || pp === self._path || - (pp && pp.indexOf(self._path) === 0)) { - // console.error('DW Exit (recursive)', entry.basename, self._path) - self._processing = false - if (entry._collected) entry.pipe() - return self._process() - } - p = p.parent - } while (p) - - // console.error("DW not recursive") - - // chop off the entry's root dir, replace with ours - var props = { - parent: self, - root: self.root || self, - type: entry.type, - depth: self.depth + 1 - } - - pp = entry._path || entry.path || entry.props.path - if (entry.parent) { - pp = pp.substr(entry.parent._path.length + 1) - } - // get rid of any ../../ shenanigans - props.path = path.join(self.path, path.join('/', pp)) - - // if i have a filter, the child should inherit it. - props.filter = self.filter - - // all the rest of the stuff, copy over from the source. - Object.keys(entry.props).forEach(function (k) { - if (!props.hasOwnProperty(k)) { - props[k] = entry.props[k] - } - }) - - // not sure at this point what kind of writer this is. - var child = self._currentChild = new Writer(props) - child.on('ready', function () { - // console.error("DW Child Ready", child.type, child._path) - // console.error(" resuming", entry._path) - entry.pipe(child) - entry.resume() - }) - - // XXX Make this work in node. - // Long filenames should not break stuff. - child.on('error', function (er) { - if (child._swallowErrors) { - self.warn(er) - child.emit('end') - child.emit('close') - } else { - self.emit('error', er) - } - }) - - // we fire _end internally *after* end, so that we don't move on - // until any "end" listeners have had their chance to do stuff. - child.on('close', onend) - var ended = false - function onend () { - if (ended) return - ended = true - // console.error("* DW Child end", child.basename) - self._currentChild = null - self._processing = false - self._process() - } -} diff --git a/node_modules/fstream/lib/file-reader.js b/node_modules/fstream/lib/file-reader.js deleted file mode 100644 index baa01f4b3db7e..0000000000000 --- a/node_modules/fstream/lib/file-reader.js +++ /dev/null @@ -1,150 +0,0 @@ -// Basically just a wrapper around an fs.ReadStream - -module.exports = FileReader - -var fs = require('graceful-fs') -var inherits = require('inherits') -var Reader = require('./reader.js') -var EOF = {EOF: true} -var CLOSE = {CLOSE: true} - -inherits(FileReader, Reader) - -function FileReader (props) { - // console.error(" FR create", props.path, props.size, new Error().stack) - var self = this - if (!(self instanceof FileReader)) { - throw new Error('FileReader must be called as constructor.') - } - - // should already be established as a File type - // XXX Todo: preserve hardlinks by tracking dev+inode+nlink, - // with a HardLinkReader class. - if (!((props.type === 'Link' && props.Link) || - (props.type === 'File' && props.File))) { - throw new Error('Non-file type ' + props.type) - } - - self._buffer = [] - self._bytesEmitted = 0 - Reader.call(self, props) -} - -FileReader.prototype._getStream = function () { - var self = this - var stream = self._stream = fs.createReadStream(self._path, self.props) - - if (self.props.blksize) { - stream.bufferSize = self.props.blksize - } - - stream.on('open', self.emit.bind(self, 'open')) - - stream.on('data', function (c) { - // console.error('\t\t%d %s', c.length, self.basename) - self._bytesEmitted += c.length - // no point saving empty chunks - if (!c.length) { - return - } else if (self._paused || self._buffer.length) { - self._buffer.push(c) - self._read() - } else self.emit('data', c) - }) - - stream.on('end', function () { - if (self._paused || self._buffer.length) { - // console.error('FR Buffering End', self._path) - self._buffer.push(EOF) - self._read() - } else { - self.emit('end') - } - - if (self._bytesEmitted !== self.props.size) { - self.error("Didn't get expected byte count\n" + - 'expect: ' + self.props.size + '\n' + - 'actual: ' + self._bytesEmitted) - } - }) - - stream.on('close', function () { - if (self._paused || self._buffer.length) { - // console.error('FR Buffering Close', self._path) - self._buffer.push(CLOSE) - self._read() - } else { - // console.error('FR close 1', self._path) - self.emit('close') - } - }) - - stream.on('error', function (e) { - self.emit('error', e) - }) - - self._read() -} - -FileReader.prototype._read = function () { - var self = this - // console.error('FR _read', self._path) - if (self._paused) { - // console.error('FR _read paused', self._path) - return - } - - if (!self._stream) { - // console.error('FR _getStream calling', self._path) - return self._getStream() - } - - // clear out the buffer, if there is one. - if (self._buffer.length) { - // console.error('FR _read has buffer', self._buffer.length, self._path) - var buf = self._buffer - for (var i = 0, l = buf.length; i < l; i++) { - var c = buf[i] - if (c === EOF) { - // console.error('FR Read emitting buffered end', self._path) - self.emit('end') - } else if (c === CLOSE) { - // console.error('FR Read emitting buffered close', self._path) - self.emit('close') - } else { - // console.error('FR Read emitting buffered data', self._path) - self.emit('data', c) - } - - if (self._paused) { - // console.error('FR Read Re-pausing at '+i, self._path) - self._buffer = buf.slice(i) - return - } - } - self._buffer.length = 0 - } -// console.error("FR _read done") -// that's about all there is to it. -} - -FileReader.prototype.pause = function (who) { - var self = this - // console.error('FR Pause', self._path) - if (self._paused) return - who = who || self - self._paused = true - if (self._stream) self._stream.pause() - self.emit('pause', who) -} - -FileReader.prototype.resume = function (who) { - var self = this - // console.error('FR Resume', self._path) - if (!self._paused) return - who = who || self - self.emit('resume', who) - self._paused = false - if (self._stream) self._stream.resume() - self._read() -} diff --git a/node_modules/fstream/lib/file-writer.js b/node_modules/fstream/lib/file-writer.js deleted file mode 100644 index 4c803d8d68d2f..0000000000000 --- a/node_modules/fstream/lib/file-writer.js +++ /dev/null @@ -1,107 +0,0 @@ -module.exports = FileWriter - -var fs = require('graceful-fs') -var Writer = require('./writer.js') -var inherits = require('inherits') -var EOF = {} - -inherits(FileWriter, Writer) - -function FileWriter (props) { - var self = this - if (!(self instanceof FileWriter)) { - throw new Error('FileWriter must be called as constructor.') - } - - // should already be established as a File type - if (props.type !== 'File' || !props.File) { - throw new Error('Non-file type ' + props.type) - } - - self._buffer = [] - self._bytesWritten = 0 - - Writer.call(this, props) -} - -FileWriter.prototype._create = function () { - var self = this - if (self._stream) return - - var so = {} - if (self.props.flags) so.flags = self.props.flags - so.mode = Writer.filemode - if (self._old && self._old.blksize) so.bufferSize = self._old.blksize - - self._stream = fs.createWriteStream(self._path, so) - - self._stream.on('open', function () { - // console.error("FW open", self._buffer, self._path) - self.ready = true - self._buffer.forEach(function (c) { - if (c === EOF) self._stream.end() - else self._stream.write(c) - }) - self.emit('ready') - // give this a kick just in case it needs it. - self.emit('drain') - }) - - self._stream.on('error', function (er) { self.emit('error', er) }) - - self._stream.on('drain', function () { self.emit('drain') }) - - self._stream.on('close', function () { - // console.error('\n\nFW Stream Close', self._path, self.size) - self._finish() - }) -} - -FileWriter.prototype.write = function (c) { - var self = this - - self._bytesWritten += c.length - - if (!self.ready) { - if (!Buffer.isBuffer(c) && typeof c !== 'string') { - throw new Error('invalid write data') - } - self._buffer.push(c) - return false - } - - var ret = self._stream.write(c) - // console.error('\t-- fw wrote, _stream says', ret, self._stream._queue.length) - - // allow 2 buffered writes, because otherwise there's just too - // much stop and go bs. - if (ret === false && self._stream._queue) { - return self._stream._queue.length <= 2 - } else { - return ret - } -} - -FileWriter.prototype.end = function (c) { - var self = this - - if (c) self.write(c) - - if (!self.ready) { - self._buffer.push(EOF) - return false - } - - return self._stream.end() -} - -FileWriter.prototype._finish = function () { - var self = this - if (typeof self.size === 'number' && self._bytesWritten !== self.size) { - self.error( - 'Did not get expected byte count.\n' + - 'expect: ' + self.size + '\n' + - 'actual: ' + self._bytesWritten) - } - Writer.prototype._finish.call(self) -} diff --git a/node_modules/fstream/lib/get-type.js b/node_modules/fstream/lib/get-type.js deleted file mode 100644 index 19f6a657db847..0000000000000 --- a/node_modules/fstream/lib/get-type.js +++ /dev/null @@ -1,33 +0,0 @@ -module.exports = getType - -function getType (st) { - var types = [ - 'Directory', - 'File', - 'SymbolicLink', - 'Link', // special for hardlinks from tarballs - 'BlockDevice', - 'CharacterDevice', - 'FIFO', - 'Socket' - ] - var type - - if (st.type && types.indexOf(st.type) !== -1) { - st[st.type] = true - return st.type - } - - for (var i = 0, l = types.length; i < l; i++) { - type = types[i] - var is = st[type] || st['is' + type] - if (typeof is === 'function') is = is.call(st) - if (is) { - st[type] = true - st.type = type - return type - } - } - - return null -} diff --git a/node_modules/fstream/lib/link-reader.js b/node_modules/fstream/lib/link-reader.js deleted file mode 100644 index fb4cc67a98dc5..0000000000000 --- a/node_modules/fstream/lib/link-reader.js +++ /dev/null @@ -1,53 +0,0 @@ -// Basically just a wrapper around an fs.readlink -// -// XXX: Enhance this to support the Link type, by keeping -// a lookup table of {:}, so that hardlinks -// can be preserved in tarballs. - -module.exports = LinkReader - -var fs = require('graceful-fs') -var inherits = require('inherits') -var Reader = require('./reader.js') - -inherits(LinkReader, Reader) - -function LinkReader (props) { - var self = this - if (!(self instanceof LinkReader)) { - throw new Error('LinkReader must be called as constructor.') - } - - if (!((props.type === 'Link' && props.Link) || - (props.type === 'SymbolicLink' && props.SymbolicLink))) { - throw new Error('Non-link type ' + props.type) - } - - Reader.call(self, props) -} - -// When piping a LinkReader into a LinkWriter, we have to -// already have the linkpath property set, so that has to -// happen *before* the "ready" event, which means we need to -// override the _stat method. -LinkReader.prototype._stat = function (currentStat) { - var self = this - fs.readlink(self._path, function (er, linkpath) { - if (er) return self.error(er) - self.linkpath = self.props.linkpath = linkpath - self.emit('linkpath', linkpath) - Reader.prototype._stat.call(self, currentStat) - }) -} - -LinkReader.prototype._read = function () { - var self = this - if (self._paused) return - // basically just a no-op, since we got all the info we need - // from the _stat method - if (!self._ended) { - self.emit('end') - self.emit('close') - self._ended = true - } -} diff --git a/node_modules/fstream/lib/link-writer.js b/node_modules/fstream/lib/link-writer.js deleted file mode 100644 index af54284008faa..0000000000000 --- a/node_modules/fstream/lib/link-writer.js +++ /dev/null @@ -1,95 +0,0 @@ -module.exports = LinkWriter - -var fs = require('graceful-fs') -var Writer = require('./writer.js') -var inherits = require('inherits') -var path = require('path') -var rimraf = require('rimraf') - -inherits(LinkWriter, Writer) - -function LinkWriter (props) { - var self = this - if (!(self instanceof LinkWriter)) { - throw new Error('LinkWriter must be called as constructor.') - } - - // should already be established as a Link type - if (!((props.type === 'Link' && props.Link) || - (props.type === 'SymbolicLink' && props.SymbolicLink))) { - throw new Error('Non-link type ' + props.type) - } - - if (props.linkpath === '') props.linkpath = '.' - if (!props.linkpath) { - self.error('Need linkpath property to create ' + props.type) - } - - Writer.call(this, props) -} - -LinkWriter.prototype._create = function () { - // console.error(" LW _create") - var self = this - var hard = self.type === 'Link' || process.platform === 'win32' - var link = hard ? 'link' : 'symlink' - var lp = hard ? path.resolve(self.dirname, self.linkpath) : self.linkpath - - // can only change the link path by clobbering - // For hard links, let's just assume that's always the case, since - // there's no good way to read them if we don't already know. - if (hard) return clobber(self, lp, link) - - fs.readlink(self._path, function (er, p) { - // only skip creation if it's exactly the same link - if (p && p === lp) return finish(self) - clobber(self, lp, link) - }) -} - -function clobber (self, lp, link) { - rimraf(self._path, function (er) { - if (er) return self.error(er) - create(self, lp, link) - }) -} - -function create (self, lp, link) { - fs[link](lp, self._path, function (er) { - // if this is a hard link, and we're in the process of writing out a - // directory, it's very possible that the thing we're linking to - // doesn't exist yet (especially if it was intended as a symlink), - // so swallow ENOENT errors here and just soldier in. - // Additionally, an EPERM or EACCES can happen on win32 if it's trying - // to make a link to a directory. Again, just skip it. - // A better solution would be to have fs.symlink be supported on - // windows in some nice fashion. - if (er) { - if ((er.code === 'ENOENT' || - er.code === 'EACCES' || - er.code === 'EPERM') && process.platform === 'win32') { - self.ready = true - self.emit('ready') - self.emit('end') - self.emit('close') - self.end = self._finish = function () {} - } else return self.error(er) - } - finish(self) - }) -} - -function finish (self) { - self.ready = true - self.emit('ready') - if (self._ended && !self._finished) self._finish() -} - -LinkWriter.prototype.end = function () { - // console.error("LW finish in end") - this._ended = true - if (this.ready) { - this._finished = true - this._finish() - } -} diff --git a/node_modules/fstream/lib/proxy-reader.js b/node_modules/fstream/lib/proxy-reader.js deleted file mode 100644 index 4f431c9d9e27d..0000000000000 --- a/node_modules/fstream/lib/proxy-reader.js +++ /dev/null @@ -1,95 +0,0 @@ -// A reader for when we don't yet know what kind of thing -// the thing is. - -module.exports = ProxyReader - -var Reader = require('./reader.js') -var getType = require('./get-type.js') -var inherits = require('inherits') -var fs = require('graceful-fs') - -inherits(ProxyReader, Reader) - -function ProxyReader (props) { - var self = this - if (!(self instanceof ProxyReader)) { - throw new Error('ProxyReader must be called as constructor.') - } - - self.props = props - self._buffer = [] - self.ready = false - - Reader.call(self, props) -} - -ProxyReader.prototype._stat = function () { - var self = this - var props = self.props - // stat the thing to see what the proxy should be. - var stat = props.follow ? 'stat' : 'lstat' - - fs[stat](props.path, function (er, current) { - var type - if (er || !current) { - type = 'File' - } else { - type = getType(current) - } - - props[type] = true - props.type = self.type = type - - self._old = current - self._addProxy(Reader(props, current)) - }) -} - -ProxyReader.prototype._addProxy = function (proxy) { - var self = this - if (self._proxyTarget) { - return self.error('proxy already set') - } - - self._proxyTarget = proxy - proxy._proxy = self - - ;[ - 'error', - 'data', - 'end', - 'close', - 'linkpath', - 'entry', - 'entryEnd', - 'child', - 'childEnd', - 'warn', - 'stat' - ].forEach(function (ev) { - // console.error('~~ proxy event', ev, self.path) - proxy.on(ev, self.emit.bind(self, ev)) - }) - - self.emit('proxy', proxy) - - proxy.on('ready', function () { - // console.error("~~ proxy is ready!", self.path) - self.ready = true - self.emit('ready') - }) - - var calls = self._buffer - self._buffer.length = 0 - calls.forEach(function (c) { - proxy[c[0]].apply(proxy, c[1]) - }) -} - -ProxyReader.prototype.pause = function () { - return this._proxyTarget ? this._proxyTarget.pause() : false -} - -ProxyReader.prototype.resume = function () { - return this._proxyTarget ? this._proxyTarget.resume() : false -} diff --git a/node_modules/fstream/lib/proxy-writer.js b/node_modules/fstream/lib/proxy-writer.js deleted file mode 100644 index a6544621bfbe7..0000000000000 --- a/node_modules/fstream/lib/proxy-writer.js +++ /dev/null @@ -1,111 +0,0 @@ -// A writer for when we don't know what kind of thing -// the thing is. That is, it's not explicitly set, -// so we're going to make it whatever the thing already -// is, or "File" -// -// Until then, collect all events. - -module.exports = ProxyWriter - -var Writer = require('./writer.js') -var getType = require('./get-type.js') -var inherits = require('inherits') -var collect = require('./collect.js') -var fs = require('fs') - -inherits(ProxyWriter, Writer) - -function ProxyWriter (props) { - var self = this - if (!(self instanceof ProxyWriter)) { - throw new Error('ProxyWriter must be called as constructor.') - } - - self.props = props - self._needDrain = false - - Writer.call(self, props) -} - -ProxyWriter.prototype._stat = function () { - var self = this - var props = self.props - // stat the thing to see what the proxy should be. - var stat = props.follow ? 'stat' : 'lstat' - - fs[stat](props.path, function (er, current) { - var type - if (er || !current) { - type = 'File' - } else { - type = getType(current) - } - - props[type] = true - props.type = self.type = type - - self._old = current - self._addProxy(Writer(props, current)) - }) -} - -ProxyWriter.prototype._addProxy = function (proxy) { - // console.error("~~ set proxy", this.path) - var self = this - if (self._proxy) { - return self.error('proxy already set') - } - - self._proxy = proxy - ;[ - 'ready', - 'error', - 'close', - 'pipe', - 'drain', - 'warn' - ].forEach(function (ev) { - proxy.on(ev, self.emit.bind(self, ev)) - }) - - self.emit('proxy', proxy) - - var calls = self._buffer - calls.forEach(function (c) { - // console.error("~~ ~~ proxy buffered call", c[0], c[1]) - proxy[c[0]].apply(proxy, c[1]) - }) - self._buffer.length = 0 - if (self._needsDrain) self.emit('drain') -} - -ProxyWriter.prototype.add = function (entry) { - // console.error("~~ proxy add") - collect(entry) - - if (!this._proxy) { - this._buffer.push(['add', [entry]]) - this._needDrain = true - return false - } - return this._proxy.add(entry) -} - -ProxyWriter.prototype.write = function (c) { - // console.error('~~ proxy write') - if (!this._proxy) { - this._buffer.push(['write', [c]]) - this._needDrain = true - return false - } - return this._proxy.write(c) -} - -ProxyWriter.prototype.end = function (c) { - // console.error('~~ proxy end') - if (!this._proxy) { - this._buffer.push(['end', [c]]) - return false - } - return this._proxy.end(c) -} diff --git a/node_modules/fstream/lib/reader.js b/node_modules/fstream/lib/reader.js deleted file mode 100644 index be4f570eeb281..0000000000000 --- a/node_modules/fstream/lib/reader.js +++ /dev/null @@ -1,255 +0,0 @@ -module.exports = Reader - -var fs = require('graceful-fs') -var Stream = require('stream').Stream -var inherits = require('inherits') -var path = require('path') -var getType = require('./get-type.js') -var hardLinks = Reader.hardLinks = {} -var Abstract = require('./abstract.js') - -// Must do this *before* loading the child classes -inherits(Reader, Abstract) - -var LinkReader = require('./link-reader.js') - -function Reader (props, currentStat) { - var self = this - if (!(self instanceof Reader)) return new Reader(props, currentStat) - - if (typeof props === 'string') { - props = { path: props } - } - - // polymorphism. - // call fstream.Reader(dir) to get a DirReader object, etc. - // Note that, unlike in the Writer case, ProxyReader is going - // to be the *normal* state of affairs, since we rarely know - // the type of a file prior to reading it. - - var type - var ClassType - - if (props.type && typeof props.type === 'function') { - type = props.type - ClassType = type - } else { - type = getType(props) - ClassType = Reader - } - - if (currentStat && !type) { - type = getType(currentStat) - props[type] = true - props.type = type - } - - switch (type) { - case 'Directory': - ClassType = require('./dir-reader.js') - break - - case 'Link': - // XXX hard links are just files. - // However, it would be good to keep track of files' dev+inode - // and nlink values, and create a HardLinkReader that emits - // a linkpath value of the original copy, so that the tar - // writer can preserve them. - // ClassType = HardLinkReader - // break - - case 'File': - ClassType = require('./file-reader.js') - break - - case 'SymbolicLink': - ClassType = LinkReader - break - - case 'Socket': - ClassType = require('./socket-reader.js') - break - - case null: - ClassType = require('./proxy-reader.js') - break - } - - if (!(self instanceof ClassType)) { - return new ClassType(props) - } - - Abstract.call(self) - - if (!props.path) { - self.error('Must provide a path', null, true) - } - - self.readable = true - self.writable = false - - self.type = type - self.props = props - self.depth = props.depth = props.depth || 0 - self.parent = props.parent || null - self.root = props.root || (props.parent && props.parent.root) || self - - self._path = self.path = path.resolve(props.path) - if (process.platform === 'win32') { - self.path = self._path = self.path.replace(/\?/g, '_') - if (self._path.length >= 260) { - // how DOES one create files on the moon? - // if the path has spaces in it, then UNC will fail. - self._swallowErrors = true - // if (self._path.indexOf(" ") === -1) { - self._path = '\\\\?\\' + self.path.replace(/\//g, '\\') - // } - } - } - self.basename = props.basename = path.basename(self.path) - self.dirname = props.dirname = path.dirname(self.path) - - // these have served their purpose, and are now just noisy clutter - props.parent = props.root = null - - // console.error("\n\n\n%s setting size to", props.path, props.size) - self.size = props.size - self.filter = typeof props.filter === 'function' ? props.filter : null - if (props.sort === 'alpha') props.sort = alphasort - - // start the ball rolling. - // this will stat the thing, and then call self._read() - // to start reading whatever it is. - // console.error("calling stat", props.path, currentStat) - self._stat(currentStat) -} - -function alphasort (a, b) { - return a === b ? 0 - : a.toLowerCase() > b.toLowerCase() ? 1 - : a.toLowerCase() < b.toLowerCase() ? -1 - : a > b ? 1 - : -1 -} - -Reader.prototype._stat = function (currentStat) { - var self = this - var props = self.props - var stat = props.follow ? 'stat' : 'lstat' - // console.error("Reader._stat", self._path, currentStat) - if (currentStat) process.nextTick(statCb.bind(null, null, currentStat)) - else fs[stat](self._path, statCb) - - function statCb (er, props_) { - // console.error("Reader._stat, statCb", self._path, props_, props_.nlink) - if (er) return self.error(er) - - Object.keys(props_).forEach(function (k) { - props[k] = props_[k] - }) - - // if it's not the expected size, then abort here. - if (undefined !== self.size && props.size !== self.size) { - return self.error('incorrect size') - } - self.size = props.size - - var type = getType(props) - var handleHardlinks = props.hardlinks !== false - - // special little thing for handling hardlinks. - if (handleHardlinks && type !== 'Directory' && props.nlink && props.nlink > 1) { - var k = props.dev + ':' + props.ino - // console.error("Reader has nlink", self._path, k) - if (hardLinks[k] === self._path || !hardLinks[k]) { - hardLinks[k] = self._path - } else { - // switch into hardlink mode. - type = self.type = self.props.type = 'Link' - self.Link = self.props.Link = true - self.linkpath = self.props.linkpath = hardLinks[k] - // console.error("Hardlink detected, switching mode", self._path, self.linkpath) - // Setting __proto__ would arguably be the "correct" - // approach here, but that just seems too wrong. - self._stat = self._read = LinkReader.prototype._read - } - } - - if (self.type && self.type !== type) { - self.error('Unexpected type: ' + type) - } - - // if the filter doesn't pass, then just skip over this one. - // still have to emit end so that dir-walking can move on. - if (self.filter) { - var who = self._proxy || self - // special handling for ProxyReaders - if (!self.filter.call(who, who, props)) { - if (!self._disowned) { - self.abort() - self.emit('end') - self.emit('close') - } - return - } - } - - // last chance to abort or disown before the flow starts! - var events = ['_stat', 'stat', 'ready'] - var e = 0 - ;(function go () { - if (self._aborted) { - self.emit('end') - self.emit('close') - return - } - - if (self._paused && self.type !== 'Directory') { - self.once('resume', go) - return - } - - var ev = events[e++] - if (!ev) { - return self._read() - } - self.emit(ev, props) - go() - })() - } -} - -Reader.prototype.pipe = function (dest) { - var self = this - if (typeof dest.add === 'function') { - // piping to a multi-compatible, and we've got directory entries. - self.on('entry', function (entry) { - var ret = dest.add(entry) - if (ret === false) { - self.pause() - } - }) - } - - // console.error("R Pipe apply Stream Pipe") - return Stream.prototype.pipe.apply(this, arguments) -} - -Reader.prototype.pause = function (who) { - this._paused = true - who = who || this - this.emit('pause', who) - if (this._stream) this._stream.pause(who) -} - -Reader.prototype.resume = function (who) { - this._paused = false - who = who || this - this.emit('resume', who) - if (this._stream) this._stream.resume(who) - this._read() -} - -Reader.prototype._read = function () { - this.error('Cannot read unknown type: ' + this.type) -} diff --git a/node_modules/fstream/lib/socket-reader.js b/node_modules/fstream/lib/socket-reader.js deleted file mode 100644 index e0456ba890ede..0000000000000 --- a/node_modules/fstream/lib/socket-reader.js +++ /dev/null @@ -1,36 +0,0 @@ -// Just get the stats, and then don't do anything. -// You can't really "read" from a socket. You "connect" to it. -// Mostly, this is here so that reading a dir with a socket in it -// doesn't blow up. - -module.exports = SocketReader - -var inherits = require('inherits') -var Reader = require('./reader.js') - -inherits(SocketReader, Reader) - -function SocketReader (props) { - var self = this - if (!(self instanceof SocketReader)) { - throw new Error('SocketReader must be called as constructor.') - } - - if (!(props.type === 'Socket' && props.Socket)) { - throw new Error('Non-socket type ' + props.type) - } - - Reader.call(self, props) -} - -SocketReader.prototype._read = function () { - var self = this - if (self._paused) return - // basically just a no-op, since we got all the info we have - // from the _stat method - if (!self._ended) { - self.emit('end') - self.emit('close') - self._ended = true - } -} diff --git a/node_modules/fstream/lib/writer.js b/node_modules/fstream/lib/writer.js deleted file mode 100644 index 140e449e06dd5..0000000000000 --- a/node_modules/fstream/lib/writer.js +++ /dev/null @@ -1,390 +0,0 @@ -module.exports = Writer - -var fs = require('graceful-fs') -var inherits = require('inherits') -var rimraf = require('rimraf') -var mkdir = require('mkdirp') -var path = require('path') -var umask = process.platform === 'win32' ? 0 : process.umask() -var getType = require('./get-type.js') -var Abstract = require('./abstract.js') - -// Must do this *before* loading the child classes -inherits(Writer, Abstract) - -Writer.dirmode = parseInt('0777', 8) & (~umask) -Writer.filemode = parseInt('0666', 8) & (~umask) - -var DirWriter = require('./dir-writer.js') -var LinkWriter = require('./link-writer.js') -var FileWriter = require('./file-writer.js') -var ProxyWriter = require('./proxy-writer.js') - -// props is the desired state. current is optionally the current stat, -// provided here so that subclasses can avoid statting the target -// more than necessary. -function Writer (props, current) { - var self = this - - if (typeof props === 'string') { - props = { path: props } - } - - // polymorphism. - // call fstream.Writer(dir) to get a DirWriter object, etc. - var type = getType(props) - var ClassType = Writer - - switch (type) { - case 'Directory': - ClassType = DirWriter - break - case 'File': - ClassType = FileWriter - break - case 'Link': - case 'SymbolicLink': - ClassType = LinkWriter - break - case null: - default: - // Don't know yet what type to create, so we wrap in a proxy. - ClassType = ProxyWriter - break - } - - if (!(self instanceof ClassType)) return new ClassType(props) - - // now get down to business. - - Abstract.call(self) - - if (!props.path) self.error('Must provide a path', null, true) - - // props is what we want to set. - // set some convenience properties as well. - self.type = props.type - self.props = props - self.depth = props.depth || 0 - self.clobber = props.clobber === false ? props.clobber : true - self.parent = props.parent || null - self.root = props.root || (props.parent && props.parent.root) || self - - self._path = self.path = path.resolve(props.path) - if (process.platform === 'win32') { - self.path = self._path = self.path.replace(/\?/g, '_') - if (self._path.length >= 260) { - self._swallowErrors = true - self._path = '\\\\?\\' + self.path.replace(/\//g, '\\') - } - } - self.basename = path.basename(props.path) - self.dirname = path.dirname(props.path) - self.linkpath = props.linkpath || null - - props.parent = props.root = null - - // console.error("\n\n\n%s setting size to", props.path, props.size) - self.size = props.size - - if (typeof props.mode === 'string') { - props.mode = parseInt(props.mode, 8) - } - - self.readable = false - self.writable = true - - // buffer until ready, or while handling another entry - self._buffer = [] - self.ready = false - - self.filter = typeof props.filter === 'function' ? props.filter : null - - // start the ball rolling. - // this checks what's there already, and then calls - // self._create() to call the impl-specific creation stuff. - self._stat(current) -} - -// Calling this means that it's something we can't create. -// Just assert that it's already there, otherwise raise a warning. -Writer.prototype._create = function () { - var self = this - fs[self.props.follow ? 'stat' : 'lstat'](self._path, function (er) { - if (er) { - return self.warn('Cannot create ' + self._path + '\n' + - 'Unsupported type: ' + self.type, 'ENOTSUP') - } - self._finish() - }) -} - -Writer.prototype._stat = function (current) { - var self = this - var props = self.props - var stat = props.follow ? 'stat' : 'lstat' - var who = self._proxy || self - - if (current) statCb(null, current) - else fs[stat](self._path, statCb) - - function statCb (er, current) { - if (self.filter && !self.filter.call(who, who, current)) { - self._aborted = true - self.emit('end') - self.emit('close') - return - } - - // if it's not there, great. We'll just create it. - // if it is there, then we'll need to change whatever differs - if (er || !current) { - return create(self) - } - - self._old = current - var currentType = getType(current) - - // if it's a type change, then we need to clobber or error. - // if it's not a type change, then let the impl take care of it. - if (currentType !== self.type) { - return rimraf(self._path, function (er) { - if (er) return self.error(er) - self._old = null - create(self) - }) - } - - // otherwise, just handle in the app-specific way - // this creates a fs.WriteStream, or mkdir's, or whatever - create(self) - } -} - -function create (self) { - // console.error("W create", self._path, Writer.dirmode) - - // XXX Need to clobber non-dirs that are in the way, - // unless { clobber: false } in the props. - mkdir(path.dirname(self._path), Writer.dirmode, function (er, made) { - // console.error("W created", path.dirname(self._path), er) - if (er) return self.error(er) - - // later on, we have to set the mode and owner for these - self._madeDir = made - return self._create() - }) -} - -function endChmod (self, want, current, path, cb) { - var wantMode = want.mode - var chmod = want.follow || self.type !== 'SymbolicLink' - ? 'chmod' : 'lchmod' - - if (!fs[chmod]) return cb() - if (typeof wantMode !== 'number') return cb() - - var curMode = current.mode & parseInt('0777', 8) - wantMode = wantMode & parseInt('0777', 8) - if (wantMode === curMode) return cb() - - fs[chmod](path, wantMode, cb) -} - -function endChown (self, want, current, path, cb) { - // Don't even try it unless root. Too easy to EPERM. - if (process.platform === 'win32') return cb() - if (!process.getuid || process.getuid() !== 0) return cb() - if (typeof want.uid !== 'number' && - typeof want.gid !== 'number') return cb() - - if (current.uid === want.uid && - current.gid === want.gid) return cb() - - var chown = (self.props.follow || self.type !== 'SymbolicLink') - ? 'chown' : 'lchown' - if (!fs[chown]) return cb() - - if (typeof want.uid !== 'number') want.uid = current.uid - if (typeof want.gid !== 'number') want.gid = current.gid - - fs[chown](path, want.uid, want.gid, cb) -} - -function endUtimes (self, want, current, path, cb) { - if (!fs.utimes || process.platform === 'win32') return cb() - - var utimes = (want.follow || self.type !== 'SymbolicLink') - ? 'utimes' : 'lutimes' - - if (utimes === 'lutimes' && !fs[utimes]) { - utimes = 'utimes' - } - - if (!fs[utimes]) return cb() - - var curA = current.atime - var curM = current.mtime - var meA = want.atime - var meM = want.mtime - - if (meA === undefined) meA = curA - if (meM === undefined) meM = curM - - if (!isDate(meA)) meA = new Date(meA) - if (!isDate(meM)) meA = new Date(meM) - - if (meA.getTime() === curA.getTime() && - meM.getTime() === curM.getTime()) return cb() - - fs[utimes](path, meA, meM, cb) -} - -// XXX This function is beastly. Break it up! -Writer.prototype._finish = function () { - var self = this - - if (self._finishing) return - self._finishing = true - - // console.error(" W Finish", self._path, self.size) - - // set up all the things. - // At this point, we're already done writing whatever we've gotta write, - // adding files to the dir, etc. - var todo = 0 - var errState = null - var done = false - - if (self._old) { - // the times will almost *certainly* have changed. - // adds the utimes syscall, but remove another stat. - self._old.atime = new Date(0) - self._old.mtime = new Date(0) - // console.error(" W Finish Stale Stat", self._path, self.size) - setProps(self._old) - } else { - var stat = self.props.follow ? 'stat' : 'lstat' - // console.error(" W Finish Stating", self._path, self.size) - fs[stat](self._path, function (er, current) { - // console.error(" W Finish Stated", self._path, self.size, current) - if (er) { - // if we're in the process of writing out a - // directory, it's very possible that the thing we're linking to - // doesn't exist yet (especially if it was intended as a symlink), - // so swallow ENOENT errors here and just soldier on. - if (er.code === 'ENOENT' && - (self.type === 'Link' || self.type === 'SymbolicLink') && - process.platform === 'win32') { - self.ready = true - self.emit('ready') - self.emit('end') - self.emit('close') - self.end = self._finish = function () {} - return - } else return self.error(er) - } - setProps(self._old = current) - }) - } - - return - - function setProps (current) { - todo += 3 - endChmod(self, self.props, current, self._path, next('chmod')) - endChown(self, self.props, current, self._path, next('chown')) - endUtimes(self, self.props, current, self._path, next('utimes')) - } - - function next (what) { - return function (er) { - // console.error(" W Finish", what, todo) - if (errState) return - if (er) { - er.fstream_finish_call = what - return self.error(errState = er) - } - if (--todo > 0) return - if (done) return - done = true - - // we may still need to set the mode/etc. on some parent dirs - // that were created previously. delay end/close until then. - if (!self._madeDir) return end() - else endMadeDir(self, self._path, end) - - function end (er) { - if (er) { - er.fstream_finish_call = 'setupMadeDir' - return self.error(er) - } - // all the props have been set, so we're completely done. - self.emit('end') - self.emit('close') - } - } - } -} - -function endMadeDir (self, p, cb) { - var made = self._madeDir - // everything *between* made and path.dirname(self._path) - // needs to be set up. Note that this may just be one dir. - var d = path.dirname(p) - - endMadeDir_(self, d, function (er) { - if (er) return cb(er) - if (d === made) { - return cb() - } - endMadeDir(self, d, cb) - }) -} - -function endMadeDir_ (self, p, cb) { - var dirProps = {} - Object.keys(self.props).forEach(function (k) { - dirProps[k] = self.props[k] - - // only make non-readable dirs if explicitly requested. - if (k === 'mode' && self.type !== 'Directory') { - dirProps[k] = dirProps[k] | parseInt('0111', 8) - } - }) - - var todo = 3 - var errState = null - fs.stat(p, function (er, current) { - if (er) return cb(errState = er) - endChmod(self, dirProps, current, p, next) - endChown(self, dirProps, current, p, next) - endUtimes(self, dirProps, current, p, next) - }) - - function next (er) { - if (errState) return - if (er) return cb(errState = er) - if (--todo === 0) return cb() - } -} - -Writer.prototype.pipe = function () { - this.error("Can't pipe from writable stream") -} - -Writer.prototype.add = function () { - this.error("Can't add to non-Directory type") -} - -Writer.prototype.write = function () { - return true -} - -function objectToString (d) { - return Object.prototype.toString.call(d) -} - -function isDate (d) { - return typeof d === 'object' && objectToString(d) === '[object Date]' -} diff --git a/node_modules/fstream/package.json b/node_modules/fstream/package.json deleted file mode 100644 index 25418b6709e64..0000000000000 --- a/node_modules/fstream/package.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "_from": "fstream@^1.0.0", - "_id": "fstream@1.0.11", - "_inBundle": false, - "_integrity": "sha1-XB+x8RdHcRTwYyoOtLcbPLD9MXE=", - "_location": "/fstream", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "fstream@^1.0.0", - "name": "fstream", - "escapedName": "fstream", - "rawSpec": "^1.0.0", - "saveSpec": null, - "fetchSpec": "^1.0.0" - }, - "_requiredBy": [ - "/node-gyp", - "/node-gyp/tar" - ], - "_resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.11.tgz", - "_shasum": "5c1fb1f117477114f0632a0eb4b71b3cb0fd3171", - "_spec": "fstream@^1.0.0", - "_where": "/Users/rebecca/code/npm/node_modules/node-gyp", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "bugs": { - "url": "https://github.com/npm/fstream/issues" - }, - "bundleDependencies": false, - "dependencies": { - "graceful-fs": "^4.1.2", - "inherits": "~2.0.0", - "mkdirp": ">=0.5 0", - "rimraf": "2" - }, - "deprecated": false, - "description": "Advanced file system stream things", - "devDependencies": { - "standard": "^4.0.0", - "tap": "^1.2.0" - }, - "engines": { - "node": ">=0.6" - }, - "homepage": "https://github.com/npm/fstream#readme", - "license": "ISC", - "main": "fstream.js", - "name": "fstream", - "repository": { - "type": "git", - "url": "git+https://github.com/npm/fstream.git" - }, - "scripts": { - "test": "standard && tap examples/*.js" - }, - "version": "1.0.11" -} diff --git a/node_modules/function-bind/.editorconfig b/node_modules/function-bind/.editorconfig new file mode 100644 index 0000000000000..ac29adef0361c --- /dev/null +++ b/node_modules/function-bind/.editorconfig @@ -0,0 +1,20 @@ +root = true + +[*] +indent_style = tab +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true +max_line_length = 120 + +[CHANGELOG.md] +indent_style = space +indent_size = 2 + +[*.json] +max_line_length = off + +[Makefile] +max_line_length = off diff --git a/node_modules/function-bind/.jscs.json b/node_modules/function-bind/.jscs.json new file mode 100644 index 0000000000000..8c4479480be70 --- /dev/null +++ b/node_modules/function-bind/.jscs.json @@ -0,0 +1,176 @@ +{ + "es3": true, + + "additionalRules": [], + + "requireSemicolons": true, + + "disallowMultipleSpaces": true, + + "disallowIdentifierNames": [], + + "requireCurlyBraces": { + "allExcept": [], + "keywords": ["if", "else", "for", "while", "do", "try", "catch"] + }, + + "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"], + + "disallowSpaceAfterKeywords": [], + + "disallowSpaceBeforeComma": true, + "disallowSpaceAfterComma": false, + "disallowSpaceBeforeSemicolon": true, + + "disallowNodeTypes": [ + "DebuggerStatement", + "ForInStatement", + "LabeledStatement", + "SwitchCase", + "SwitchStatement", + "WithStatement" + ], + + "requireObjectKeysOnNewLine": { "allExcept": ["sameLine"] }, + + "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true }, + "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true }, + "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true }, + + "requireSpaceBetweenArguments": true, + + "disallowSpacesInsideParentheses": true, + + "disallowSpacesInsideArrayBrackets": true, + + "disallowQuotedKeysInObjects": { "allExcept": ["reserved"] }, + + "disallowSpaceAfterObjectKeys": true, + + "requireCommaBeforeLineBreak": true, + + "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"], + "requireSpaceAfterPrefixUnaryOperators": [], + + "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"], + "requireSpaceBeforePostfixUnaryOperators": [], + + "disallowSpaceBeforeBinaryOperators": [], + "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + + "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + "disallowSpaceAfterBinaryOperators": [], + + "disallowImplicitTypeConversion": ["binary", "string"], + + "disallowKeywords": ["with", "eval"], + + "requireKeywordsOnNewLine": [], + "disallowKeywordsOnNewLine": ["else"], + + "requireLineFeedAtFileEnd": true, + + "disallowTrailingWhitespace": true, + + "disallowTrailingComma": true, + + "excludeFiles": ["node_modules/**", "vendor/**"], + + "disallowMultipleLineStrings": true, + + "requireDotNotation": { "allExcept": ["keywords"] }, + + "requireParenthesesAroundIIFE": true, + + "validateLineBreaks": "LF", + + "validateQuoteMarks": { + "escape": true, + "mark": "'" + }, + + "disallowOperatorBeforeLineBreak": [], + + "requireSpaceBeforeKeywords": [ + "do", + "for", + "if", + "else", + "switch", + "case", + "try", + "catch", + "finally", + "while", + "with", + "return" + ], + + "validateAlignedFunctionParameters": { + "lineBreakAfterOpeningBraces": true, + "lineBreakBeforeClosingBraces": true + }, + + "requirePaddingNewLinesBeforeExport": true, + + "validateNewlineAfterArrayElements": { + "maximum": 8 + }, + + "requirePaddingNewLinesAfterUseStrict": true, + + "disallowArrowFunctions": true, + + "disallowMultiLineTernary": true, + + "validateOrderInObjectKeys": "asc-insensitive", + + "disallowIdenticalDestructuringNames": true, + + "disallowNestedTernaries": { "maxLevel": 1 }, + + "requireSpaceAfterComma": { "allExcept": ["trailing"] }, + "requireAlignedMultilineParams": false, + + "requireSpacesInGenerator": { + "afterStar": true + }, + + "disallowSpacesInGenerator": { + "beforeStar": true + }, + + "disallowVar": false, + + "requireArrayDestructuring": false, + + "requireEnhancedObjectLiterals": false, + + "requireObjectDestructuring": false, + + "requireEarlyReturn": false, + + "requireCapitalizedConstructorsNew": { + "allExcept": ["Function", "String", "Object", "Symbol", "Number", "Date", "RegExp", "Error", "Boolean", "Array"] + }, + + "requireImportAlphabetized": false, + + "requireSpaceBeforeObjectValues": true, + "requireSpaceBeforeDestructuredValues": true, + + "disallowSpacesInsideTemplateStringPlaceholders": true, + + "disallowArrayDestructuringReturn": false, + + "requireNewlineBeforeSingleStatementsInIf": false, + + "disallowUnusedVariables": true, + + "requireSpacesInsideImportedObjectBraces": true, + + "requireUseStrict": true +} + diff --git a/node_modules/function-bind/.npmignore b/node_modules/function-bind/.npmignore new file mode 100644 index 0000000000000..dbb555fd1f9f5 --- /dev/null +++ b/node_modules/function-bind/.npmignore @@ -0,0 +1,22 @@ +# gitignore +.DS_Store +.monitor +.*.swp +.nodemonignore +releases +*.log +*.err +fleet.json +public/browserify +bin/*.json +.bin +build +compile +.lock-wscript +coverage +node_modules + +# Only apps should have lockfiles +npm-shrinkwrap.json +package-lock.json +yarn.lock diff --git a/node_modules/function-bind/.travis.yml b/node_modules/function-bind/.travis.yml new file mode 100644 index 0000000000000..85f70d2464f39 --- /dev/null +++ b/node_modules/function-bind/.travis.yml @@ -0,0 +1,168 @@ +language: node_js +os: + - linux +node_js: + - "8.4" + - "7.10" + - "6.11" + - "5.12" + - "4.8" + - "iojs-v3.3" + - "iojs-v2.5" + - "iojs-v1.8" + - "0.12" + - "0.10" + - "0.8" +before_install: + - 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ]; then npm install -g npm@1.3 ; elif [ "${TRAVIS_NODE_VERSION}" != "0.9" ]; then case "$(npm --version)" in 1.*) npm install -g npm@1.4.28 ;; 2.*) npm install -g npm@2 ;; esac ; fi' + - 'if [ "${TRAVIS_NODE_VERSION}" != "0.6" ] && [ "${TRAVIS_NODE_VERSION}" != "0.9" ]; then if [ "${TRAVIS_NODE_VERSION%${TRAVIS_NODE_VERSION#[0-9]}}" = "0" ] || [ "${TRAVIS_NODE_VERSION:0:4}" = "iojs" ]; then npm install -g npm@4.5 ; else npm install -g npm; fi; fi' +install: + - 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ]; then nvm install 0.8 && npm install -g npm@1.3 && npm install -g npm@1.4.28 && npm install -g npm@2 && npm install && nvm use "${TRAVIS_NODE_VERSION}"; else npm install; fi;' +script: + - 'if [ -n "${PRETEST-}" ]; then npm run pretest ; fi' + - 'if [ -n "${POSTTEST-}" ]; then npm run posttest ; fi' + - 'if [ -n "${COVERAGE-}" ]; then npm run coverage ; fi' + - 'if [ -n "${TEST-}" ]; then npm run tests-only ; fi' +sudo: false +env: + - TEST=true +matrix: + fast_finish: true + include: + - node_js: "node" + env: PRETEST=true + - node_js: "4" + env: COVERAGE=true + - node_js: "8.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.4" + env: TEST=true ALLOW_FAILURE=true + allow_failures: + - os: osx + - env: TEST=true ALLOW_FAILURE=true diff --git a/node_modules/function-bind/LICENSE b/node_modules/function-bind/LICENSE new file mode 100644 index 0000000000000..62d6d237ff179 --- /dev/null +++ b/node_modules/function-bind/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2013 Raynos. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + diff --git a/node_modules/function-bind/README.md b/node_modules/function-bind/README.md new file mode 100644 index 0000000000000..81862a02cb940 --- /dev/null +++ b/node_modules/function-bind/README.md @@ -0,0 +1,48 @@ +# function-bind + + + + + +Implementation of function.prototype.bind + +## Example + +I mainly do this for unit tests I run on phantomjs. +PhantomJS does not have Function.prototype.bind :( + +```js +Function.prototype.bind = require("function-bind") +``` + +## Installation + +`npm install function-bind` + +## Contributors + + - Raynos + +## MIT Licenced + + [travis-svg]: https://travis-ci.org/Raynos/function-bind.svg + [travis-url]: https://travis-ci.org/Raynos/function-bind + [npm-badge-svg]: https://badge.fury.io/js/function-bind.svg + [npm-url]: https://npmjs.org/package/function-bind + [5]: https://coveralls.io/repos/Raynos/function-bind/badge.png + [6]: https://coveralls.io/r/Raynos/function-bind + [7]: https://gemnasium.com/Raynos/function-bind.png + [8]: https://gemnasium.com/Raynos/function-bind + [deps-svg]: https://david-dm.org/Raynos/function-bind.svg + [deps-url]: https://david-dm.org/Raynos/function-bind + [dev-deps-svg]: https://david-dm.org/Raynos/function-bind/dev-status.svg + [dev-deps-url]: https://david-dm.org/Raynos/function-bind#info=devDependencies + [11]: https://ci.testling.com/Raynos/function-bind.png + [12]: https://ci.testling.com/Raynos/function-bind diff --git a/node_modules/function-bind/implementation.js b/node_modules/function-bind/implementation.js new file mode 100644 index 0000000000000..cc4daec1b080a --- /dev/null +++ b/node_modules/function-bind/implementation.js @@ -0,0 +1,52 @@ +'use strict'; + +/* eslint no-invalid-this: 1 */ + +var ERROR_MESSAGE = 'Function.prototype.bind called on incompatible '; +var slice = Array.prototype.slice; +var toStr = Object.prototype.toString; +var funcType = '[object Function]'; + +module.exports = function bind(that) { + var target = this; + if (typeof target !== 'function' || toStr.call(target) !== funcType) { + throw new TypeError(ERROR_MESSAGE + target); + } + var args = slice.call(arguments, 1); + + var bound; + var binder = function () { + if (this instanceof bound) { + var result = target.apply( + this, + args.concat(slice.call(arguments)) + ); + if (Object(result) === result) { + return result; + } + return this; + } else { + return target.apply( + that, + args.concat(slice.call(arguments)) + ); + } + }; + + var boundLength = Math.max(0, target.length - args.length); + var boundArgs = []; + for (var i = 0; i < boundLength; i++) { + boundArgs.push('$' + i); + } + + bound = Function('binder', 'return function (' + boundArgs.join(',') + '){ return binder.apply(this,arguments); }')(binder); + + if (target.prototype) { + var Empty = function Empty() {}; + Empty.prototype = target.prototype; + bound.prototype = new Empty(); + Empty.prototype = null; + } + + return bound; +}; diff --git a/node_modules/function-bind/index.js b/node_modules/function-bind/index.js new file mode 100644 index 0000000000000..3bb6b9609889f --- /dev/null +++ b/node_modules/function-bind/index.js @@ -0,0 +1,5 @@ +'use strict'; + +var implementation = require('./implementation'); + +module.exports = Function.prototype.bind || implementation; diff --git a/node_modules/function-bind/package.json b/node_modules/function-bind/package.json new file mode 100644 index 0000000000000..426db8c4d8705 --- /dev/null +++ b/node_modules/function-bind/package.json @@ -0,0 +1,67 @@ +{ + "name": "function-bind", + "version": "1.1.1", + "description": "Implementation of Function.prototype.bind", + "keywords": [ + "function", + "bind", + "shim", + "es5" + ], + "author": "Raynos ", + "repository": "git://github.com/Raynos/function-bind.git", + "main": "index", + "homepage": "https://github.com/Raynos/function-bind", + "contributors": [ + { + "name": "Raynos" + }, + { + "name": "Jordan Harband", + "url": "https://github.com/ljharb" + } + ], + "bugs": { + "url": "https://github.com/Raynos/function-bind/issues", + "email": "raynos2@gmail.com" + }, + "dependencies": {}, + "devDependencies": { + "@ljharb/eslint-config": "^12.2.1", + "covert": "^1.1.0", + "eslint": "^4.5.0", + "jscs": "^3.0.7", + "tape": "^4.8.0" + }, + "license": "MIT", + "scripts": { + "pretest": "npm run lint", + "test": "npm run tests-only", + "posttest": "npm run coverage -- --quiet", + "tests-only": "node test", + "coverage": "covert test/*.js", + "lint": "npm run jscs && npm run eslint", + "jscs": "jscs *.js */*.js", + "eslint": "eslint *.js */*.js" + }, + "testling": { + "files": "test/index.js", + "browsers": [ + "ie/8..latest", + "firefox/16..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } + +,"_resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz" +,"_integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" +,"_from": "function-bind@1.1.1" +} \ No newline at end of file diff --git a/node_modules/function-bind/test/index.js b/node_modules/function-bind/test/index.js new file mode 100644 index 0000000000000..2edecce2f0fa5 --- /dev/null +++ b/node_modules/function-bind/test/index.js @@ -0,0 +1,252 @@ +// jscs:disable requireUseStrict + +var test = require('tape'); + +var functionBind = require('../implementation'); +var getCurrentContext = function () { return this; }; + +test('functionBind is a function', function (t) { + t.equal(typeof functionBind, 'function'); + t.end(); +}); + +test('non-functions', function (t) { + var nonFunctions = [true, false, [], {}, 42, 'foo', NaN, /a/g]; + t.plan(nonFunctions.length); + for (var i = 0; i < nonFunctions.length; ++i) { + try { functionBind.call(nonFunctions[i]); } catch (ex) { + t.ok(ex instanceof TypeError, 'throws when given ' + String(nonFunctions[i])); + } + } + t.end(); +}); + +test('without a context', function (t) { + t.test('binds properly', function (st) { + var args, context; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + context = this; + }) + }; + namespace.func(1, 2, 3); + st.deepEqual(args, [1, 2, 3]); + st.equal(context, getCurrentContext.call()); + st.end(); + }); + + t.test('binds properly, and still supplies bound arguments', function (st) { + var args, context; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + context = this; + }, undefined, 1, 2, 3) + }; + namespace.func(4, 5, 6); + st.deepEqual(args, [1, 2, 3, 4, 5, 6]); + st.equal(context, getCurrentContext.call()); + st.end(); + }); + + t.test('returns properly', function (st) { + var args; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + return this; + }, null) + }; + var context = namespace.func(1, 2, 3); + st.equal(context, getCurrentContext.call(), 'returned context is namespaced context'); + st.deepEqual(args, [1, 2, 3], 'passed arguments are correct'); + st.end(); + }); + + t.test('returns properly with bound arguments', function (st) { + var args; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + return this; + }, null, 1, 2, 3) + }; + var context = namespace.func(4, 5, 6); + st.equal(context, getCurrentContext.call(), 'returned context is namespaced context'); + st.deepEqual(args, [1, 2, 3, 4, 5, 6], 'passed arguments are correct'); + st.end(); + }); + + t.test('called as a constructor', function (st) { + var thunkify = function (value) { + return function () { return value; }; + }; + st.test('returns object value', function (sst) { + var expectedReturnValue = [1, 2, 3]; + var Constructor = functionBind.call(thunkify(expectedReturnValue), null); + var result = new Constructor(); + sst.equal(result, expectedReturnValue); + sst.end(); + }); + + st.test('does not return primitive value', function (sst) { + var Constructor = functionBind.call(thunkify(42), null); + var result = new Constructor(); + sst.notEqual(result, 42); + sst.end(); + }); + + st.test('object from bound constructor is instance of original and bound constructor', function (sst) { + var A = function (x) { + this.name = x || 'A'; + }; + var B = functionBind.call(A, null, 'B'); + + var result = new B(); + sst.ok(result instanceof B, 'result is instance of bound constructor'); + sst.ok(result instanceof A, 'result is instance of original constructor'); + sst.end(); + }); + + st.end(); + }); + + t.end(); +}); + +test('with a context', function (t) { + t.test('with no bound arguments', function (st) { + var args, context; + var boundContext = {}; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + context = this; + }, boundContext) + }; + namespace.func(1, 2, 3); + st.equal(context, boundContext, 'binds a context properly'); + st.deepEqual(args, [1, 2, 3], 'supplies passed arguments'); + st.end(); + }); + + t.test('with bound arguments', function (st) { + var args, context; + var boundContext = {}; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + context = this; + }, boundContext, 1, 2, 3) + }; + namespace.func(4, 5, 6); + st.equal(context, boundContext, 'binds a context properly'); + st.deepEqual(args, [1, 2, 3, 4, 5, 6], 'supplies bound and passed arguments'); + st.end(); + }); + + t.test('returns properly', function (st) { + var boundContext = {}; + var args; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + return this; + }, boundContext) + }; + var context = namespace.func(1, 2, 3); + st.equal(context, boundContext, 'returned context is bound context'); + st.notEqual(context, getCurrentContext.call(), 'returned context is not lexical context'); + st.deepEqual(args, [1, 2, 3], 'passed arguments are correct'); + st.end(); + }); + + t.test('returns properly with bound arguments', function (st) { + var boundContext = {}; + var args; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + return this; + }, boundContext, 1, 2, 3) + }; + var context = namespace.func(4, 5, 6); + st.equal(context, boundContext, 'returned context is bound context'); + st.notEqual(context, getCurrentContext.call(), 'returned context is not lexical context'); + st.deepEqual(args, [1, 2, 3, 4, 5, 6], 'passed arguments are correct'); + st.end(); + }); + + t.test('passes the correct arguments when called as a constructor', function (st) { + var expected = { name: 'Correct' }; + var namespace = { + Func: functionBind.call(function (arg) { + return arg; + }, { name: 'Incorrect' }) + }; + var returned = new namespace.Func(expected); + st.equal(returned, expected, 'returns the right arg when called as a constructor'); + st.end(); + }); + + t.test('has the new instance\'s context when called as a constructor', function (st) { + var actualContext; + var expectedContext = { foo: 'bar' }; + var namespace = { + Func: functionBind.call(function () { + actualContext = this; + }, expectedContext) + }; + var result = new namespace.Func(); + st.equal(result instanceof namespace.Func, true); + st.notEqual(actualContext, expectedContext); + st.end(); + }); + + t.end(); +}); + +test('bound function length', function (t) { + t.test('sets a correct length without thisArg', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }); + st.equal(subject.length, 3); + st.equal(subject(1, 2, 3), 6); + st.end(); + }); + + t.test('sets a correct length with thisArg', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }, {}); + st.equal(subject.length, 3); + st.equal(subject(1, 2, 3), 6); + st.end(); + }); + + t.test('sets a correct length without thisArg and first argument', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }, undefined, 1); + st.equal(subject.length, 2); + st.equal(subject(2, 3), 6); + st.end(); + }); + + t.test('sets a correct length with thisArg and first argument', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }, {}, 1); + st.equal(subject.length, 2); + st.equal(subject(2, 3), 6); + st.end(); + }); + + t.test('sets a correct length without thisArg and too many arguments', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }, undefined, 1, 2, 3, 4); + st.equal(subject.length, 0); + st.equal(subject(), 6); + st.end(); + }); + + t.test('sets a correct length with thisArg and too many arguments', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }, {}, 1, 2, 3, 4); + st.equal(subject.length, 0); + st.equal(subject(), 6); + st.end(); + }); +}); diff --git a/node_modules/gauge/node_modules/aproba/LICENSE b/node_modules/gauge/node_modules/aproba/LICENSE new file mode 100644 index 0000000000000..f4be44d881b2d --- /dev/null +++ b/node_modules/gauge/node_modules/aproba/LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/gauge/node_modules/aproba/README.md b/node_modules/gauge/node_modules/aproba/README.md new file mode 100644 index 0000000000000..0bfc594c56a37 --- /dev/null +++ b/node_modules/gauge/node_modules/aproba/README.md @@ -0,0 +1,94 @@ +aproba +====== + +A ridiculously light-weight function argument validator + +``` +var validate = require("aproba") + +function myfunc(a, b, c) { + // `a` must be a string, `b` a number, `c` a function + validate('SNF', arguments) // [a,b,c] is also valid +} + +myfunc('test', 23, function () {}) // ok +myfunc(123, 23, function () {}) // type error +myfunc('test', 23) // missing arg error +myfunc('test', 23, function () {}, true) // too many args error + +``` + +Valid types are: + +| type | description +| :--: | :---------- +| * | matches any type +| A | `Array.isArray` OR an `arguments` object +| S | typeof == string +| N | typeof == number +| F | typeof == function +| O | typeof == object and not type A and not type E +| B | typeof == boolean +| E | `instanceof Error` OR `null` **(special: see below)** +| Z | == `null` + +Validation failures throw one of three exception types, distinguished by a +`code` property of `EMISSINGARG`, `EINVALIDTYPE` or `ETOOMANYARGS`. + +If you pass in an invalid type then it will throw with a code of +`EUNKNOWNTYPE`. + +If an **error** argument is found and is not null then the remaining +arguments are optional. That is, if you say `ESO` then that's like using a +non-magical `E` in: `E|ESO|ZSO`. + +### But I have optional arguments?! + +You can provide more than one signature by separating them with pipes `|`. +If any signature matches the arguments then they'll be considered valid. + +So for example, say you wanted to write a signature for +`fs.createWriteStream`. The docs for it describe it thusly: + +``` +fs.createWriteStream(path[, options]) +``` + +This would be a signature of `SO|S`. That is, a string and and object, or +just a string. + +Now, if you read the full `fs` docs, you'll see that actually path can ALSO +be a buffer. And options can be a string, that is: +``` +path | +options | +``` + +To reproduce this you have to fully enumerate all of the possible +combinations and that implies a signature of `SO|SS|OO|OS|S|O`. The +awkwardness is a feature: It reminds you of the complexity you're adding to +your API when you do this sort of thing. + + +### Browser support + +This has no dependencies and should work in browsers, though you'll have +noisier stack traces. + +### Why this exists + +I wanted a very simple argument validator. It needed to do two things: + +1. Be more concise and easier to use than assertions + +2. Not encourage an infinite bikeshed of DSLs + +This is why types are specified by a single character and there's no such +thing as an optional argument. + +This is not intended to validate user data. This is specifically about +asserting the interface of your functions. + +If you need greater validation, I encourage you to write them by hand or +look elsewhere. + diff --git a/node_modules/gauge/node_modules/aproba/index.js b/node_modules/gauge/node_modules/aproba/index.js new file mode 100644 index 0000000000000..6f3f797c09a75 --- /dev/null +++ b/node_modules/gauge/node_modules/aproba/index.js @@ -0,0 +1,105 @@ +'use strict' + +function isArguments (thingy) { + return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee') +} + +var types = { + '*': {label: 'any', check: function () { return true }}, + A: {label: 'array', check: function (thingy) { return Array.isArray(thingy) || isArguments(thingy) }}, + S: {label: 'string', check: function (thingy) { return typeof thingy === 'string' }}, + N: {label: 'number', check: function (thingy) { return typeof thingy === 'number' }}, + F: {label: 'function', check: function (thingy) { return typeof thingy === 'function' }}, + O: {label: 'object', check: function (thingy) { return typeof thingy === 'object' && thingy != null && !types.A.check(thingy) && !types.E.check(thingy) }}, + B: {label: 'boolean', check: function (thingy) { return typeof thingy === 'boolean' }}, + E: {label: 'error', check: function (thingy) { return thingy instanceof Error }}, + Z: {label: 'null', check: function (thingy) { return thingy == null }} +} + +function addSchema (schema, arity) { + var group = arity[schema.length] = arity[schema.length] || [] + if (group.indexOf(schema) === -1) group.push(schema) +} + +var validate = module.exports = function (rawSchemas, args) { + if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length) + if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas') + if (!args) throw missingRequiredArg(1, 'args') + if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas) + if (!types.A.check(args)) throw invalidType(1, ['array'], args) + var schemas = rawSchemas.split('|') + var arity = {} + + schemas.forEach(function (schema) { + for (var ii = 0; ii < schema.length; ++ii) { + var type = schema[ii] + if (!types[type]) throw unknownType(ii, type) + } + if (/E.*E/.test(schema)) throw moreThanOneError(schema) + addSchema(schema, arity) + if (/E/.test(schema)) { + addSchema(schema.replace(/E.*$/, 'E'), arity) + addSchema(schema.replace(/E/, 'Z'), arity) + if (schema.length === 1) addSchema('', arity) + } + }) + var matching = arity[args.length] + if (!matching) { + throw wrongNumberOfArgs(Object.keys(arity), args.length) + } + for (var ii = 0; ii < args.length; ++ii) { + var newMatching = matching.filter(function (schema) { + var type = schema[ii] + var typeCheck = types[type].check + return typeCheck(args[ii]) + }) + if (!newMatching.length) { + var labels = matching.map(function (schema) { + return types[schema[ii]].label + }).filter(function (schema) { return schema != null }) + throw invalidType(ii, labels, args[ii]) + } + matching = newMatching + } +} + +function missingRequiredArg (num) { + return newException('EMISSINGARG', 'Missing required argument #' + (num + 1)) +} + +function unknownType (num, type) { + return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1)) +} + +function invalidType (num, expectedTypes, value) { + var valueType + Object.keys(types).forEach(function (typeCode) { + if (types[typeCode].check(value)) valueType = types[typeCode].label + }) + return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' + + englishList(expectedTypes) + ' but got ' + valueType) +} + +function englishList (list) { + return list.join(', ').replace(/, ([^,]+)$/, ' or $1') +} + +function wrongNumberOfArgs (expected, got) { + var english = englishList(expected) + var args = expected.every(function (ex) { return ex.length === 1 }) + ? 'argument' + : 'arguments' + return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got) +} + +function moreThanOneError (schema) { + return newException('ETOOMANYERRORTYPES', + 'Only one error type per argument signature is allowed, more than one found in "' + schema + '"') +} + +function newException (code, msg) { + var e = new Error(msg) + e.code = code + if (Error.captureStackTrace) Error.captureStackTrace(e, validate) + return e +} diff --git a/node_modules/gauge/node_modules/aproba/package.json b/node_modules/gauge/node_modules/aproba/package.json new file mode 100644 index 0000000000000..f654576f8eda8 --- /dev/null +++ b/node_modules/gauge/node_modules/aproba/package.json @@ -0,0 +1,62 @@ +{ + "_from": "aproba@^1.0.3", + "_id": "aproba@1.2.0", + "_inBundle": false, + "_integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", + "_location": "/gauge/aproba", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "aproba@^1.0.3", + "name": "aproba", + "escapedName": "aproba", + "rawSpec": "^1.0.3", + "saveSpec": null, + "fetchSpec": "^1.0.3" + }, + "_requiredBy": [ + "/gauge" + ], + "_resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "_shasum": "6802e6264efd18c790a1b0d517f0f2627bf2c94a", + "_spec": "aproba@^1.0.3", + "_where": "/Users/aeschright/code/cli/node_modules/gauge", + "author": { + "name": "Rebecca Turner", + "email": "me@re-becca.org" + }, + "bugs": { + "url": "https://github.com/iarna/aproba/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "A ridiculously light-weight argument validator (now browser friendly)", + "devDependencies": { + "standard": "^10.0.3", + "tap": "^10.0.2" + }, + "directories": { + "test": "test" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/iarna/aproba", + "keywords": [ + "argument", + "validate" + ], + "license": "ISC", + "main": "index.js", + "name": "aproba", + "repository": { + "type": "git", + "url": "git+https://github.com/iarna/aproba.git" + }, + "scripts": { + "test": "standard && tap -j3 test/*.js" + }, + "version": "1.2.0" +} diff --git a/node_modules/genfun/CHANGELOG.md b/node_modules/genfun/CHANGELOG.md index c72d719ea385a..461e22fc59626 100644 --- a/node_modules/genfun/CHANGELOG.md +++ b/node_modules/genfun/CHANGELOG.md @@ -2,6 +2,23 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +# [5.0.0](https://github.com/zkat/genfun/compare/v4.0.1...v5.0.0) (2017-12-12) + + +### Bug Fixes + +* **license:** relicense to MIT ([857e720](https://github.com/zkat/genfun/commit/857e720)) +* **platforms:** drop support for node 4 and 7 ([2cdbe32](https://github.com/zkat/genfun/commit/2cdbe32)) + + +### BREAKING CHANGES + +* **platforms:** node 4 and node 7 are no longer officially supported +* **license:** license changed from CC0-1.0 to MIT + + + ## [4.0.1](https://github.com/zkat/genfun/compare/v4.0.0...v4.0.1) (2017-04-16) diff --git a/node_modules/genfun/LICENSE b/node_modules/genfun/LICENSE new file mode 100644 index 0000000000000..ab41caa64b86c --- /dev/null +++ b/node_modules/genfun/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) +Copyright (c) 2017 Kat Marchán + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE +OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/genfun/lib/method.js b/node_modules/genfun/lib/method.js index 5a9d9f788ed88..eddb7d325370e 100644 --- a/node_modules/genfun/lib/method.js +++ b/node_modules/genfun/lib/method.js @@ -46,13 +46,9 @@ function Method (genfun, selector, func) { } else { method.minimalSelector++ if (!Object.hasOwnProperty.call(object, Role.roleKeyName)) { - if (Object.defineProperty) { - // Object.defineProperty is JS 1.8.0+ - Object.defineProperty( - object, Role.roleKeyName, {value: [], enumerable: false}) - } else { - object[Role.roleKeyName] = [] - } + // Object.defineProperty is JS 1.8.0+ + Object.defineProperty( + object, Role.roleKeyName, {value: [], enumerable: false}) } // XXX HACK - no method replacement now, so we just shove // it in a place where it'll always show up first. This diff --git a/node_modules/genfun/package.json b/node_modules/genfun/package.json index 60295ade5dd31..4a557eb45a3f9 100644 --- a/node_modules/genfun/package.json +++ b/node_modules/genfun/package.json @@ -1,27 +1,27 @@ { - "_from": "genfun@^4.0.1", - "_id": "genfun@4.0.1", + "_from": "genfun@^5.0.0", + "_id": "genfun@5.0.0", "_inBundle": false, - "_integrity": "sha1-7RAEHy5KfxsKOEZtF6XD4n3x38E=", + "_integrity": "sha512-KGDOARWVga7+rnB3z9Sd2Letx515owfk0hSxHGuqjANb1M+x2bGZGqHLiozPsYMdM2OubeMni/Hpwmjq6qIUhA==", "_location": "/genfun", "_phantomChildren": {}, "_requested": { "type": "range", "registry": true, - "raw": "genfun@^4.0.1", + "raw": "genfun@^5.0.0", "name": "genfun", "escapedName": "genfun", - "rawSpec": "^4.0.1", + "rawSpec": "^5.0.0", "saveSpec": null, - "fetchSpec": "^4.0.1" + "fetchSpec": "^5.0.0" }, "_requiredBy": [ "/protoduck" ], - "_resolved": "https://registry.npmjs.org/genfun/-/genfun-4.0.1.tgz", - "_shasum": "ed10041f2e4a7f1b0a38466d17a5c3e27df1dfc1", - "_spec": "genfun@^4.0.1", - "_where": "/Users/rebecca/code/npm/node_modules/protoduck", + "_resolved": "https://registry.npmjs.org/genfun/-/genfun-5.0.0.tgz", + "_shasum": "9dd9710a06900a5c4a5bf57aca5da4e52fe76537", + "_spec": "genfun@^5.0.0", + "_where": "/Users/zkat/Documents/code/work/npm/node_modules/protoduck", "author": { "name": "Kat Marchán", "email": "kzm@sykosomatic.org" @@ -59,7 +59,7 @@ "polymorphic", "protocols" ], - "license": "CC0-1.0", + "license": "MIT", "main": "lib/genfun.js", "name": "genfun", "repository": { @@ -75,5 +75,5 @@ "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "4.0.1" + "version": "5.0.0" } diff --git a/node_modules/gentle-fs/CHANGELOG.md b/node_modules/gentle-fs/CHANGELOG.md index e9bb23d98b891..50dfcd74c99f1 100644 --- a/node_modules/gentle-fs/CHANGELOG.md +++ b/node_modules/gentle-fs/CHANGELOG.md @@ -2,6 +2,51 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +# [2.3.0](https://github.com/npm/gentle-fs/compare/v2.2.1...v2.3.0) (2019-12-11) + + +### Features + +* add option to gently create bin links/shims ([a929196](https://github.com/npm/gentle-fs/commit/a929196)) + + + + +## [2.2.1](https://github.com/npm/gentle-fs/compare/v2.2.0...v2.2.1) (2019-08-15) + + +### Bug Fixes + +* **link:** properly detect that we should chown the link ([1c69beb](https://github.com/npm/gentle-fs/commit/1c69beb)) + + + + +# [2.2.0](https://github.com/npm/gentle-fs/compare/v2.1.0...v2.2.0) (2019-08-14) + + +### Bug Fixes + +* don't chown if we didn't make any dirs ([c4df8a8](https://github.com/npm/gentle-fs/commit/c4df8a8)) + + +### Features + +* export mkdir method ([4891c09](https://github.com/npm/gentle-fs/commit/4891c09)) + + + + +# [2.1.0](https://github.com/npm/gentle-fs/compare/v2.0.1...v2.1.0) (2019-08-14) + + +### Features + +* infer ownership of created dirs and links ([0dd2879](https://github.com/npm/gentle-fs/commit/0dd2879)) + + + ## [2.0.1](https://github.com/npm/gentle-fs/compare/v2.0.0...v2.0.1) (2017-11-28) diff --git a/node_modules/gentle-fs/index.js b/node_modules/gentle-fs/index.js index 2828fdb2bd318..4aeda1c846bf2 100644 --- a/node_modules/gentle-fs/index.js +++ b/node_modules/gentle-fs/index.js @@ -2,9 +2,13 @@ const rm = require('./lib/rm.js') const link = require('./lib/link.js') +const mkdir = require('./lib/mkdir.js') +const binLink = require('./lib/bin-link.js') exports = module.exports = { rm: rm, link: link.link, - linkIfExists: link.linkIfExists + linkIfExists: link.linkIfExists, + mkdir: mkdir, + binLink: binLink } diff --git a/node_modules/gentle-fs/lib/bin-link.js b/node_modules/gentle-fs/lib/bin-link.js new file mode 100644 index 0000000000000..104c5b6c935c8 --- /dev/null +++ b/node_modules/gentle-fs/lib/bin-link.js @@ -0,0 +1,96 @@ +'use strict' +// calls linkIfExists on unix, or cmdShimIfExists on Windows +// reads the cmd shim to ensure it's where we need it to be in the case of +// top level global packages + +const readCmdShim = require('read-cmd-shim') +const cmdShim = require('cmd-shim') +const {linkIfExists} = require('./link.js') + +const binLink = (from, to, opts, cb) => { + // just for testing + const platform = opts._FAKE_PLATFORM_ || process.platform + if (platform !== 'win32') { + return linkIfExists(from, to, opts, cb) + } + + if (!opts.clobberLinkGently || + opts.force === true || + !opts.gently || + typeof opts.gently !== 'string') { + // easy, just go ahead and delete anything in the way + return cmdShim.ifExists(from, to, cb) + } + + // read all three shim targets + // if any exist, and are not a shim to our gently folder, then + // exit with a simulated EEXIST error. + + const shimFiles = [ + to, + to + '.cmd', + to + '.ps1' + ] + + // call this once we've checked all three, if we're good + const done = () => cmdShim.ifExists(from, to, cb) + const then = times(3, done, cb) + shimFiles.forEach(to => isClobberable(from, to, opts, then)) +} + +const times = (n, ok, cb) => { + let errState = null + return er => { + if (!errState) { + if (er) { + cb(errState = er) + } else if (--n === 0) { + ok() + } + } + } +} + +const isClobberable = (from, to, opts, cb) => { + readCmdShim(to, (er, target) => { + // either going to get an error, or the target of where this + // cmd shim points. + // shim, not in opts.gently: simulate EEXIST + // not a shim: simulate EEXIST + // ENOENT: fine, move forward + // shim in opts.gently: fine + if (er) { + switch (er.code) { + case 'ENOENT': + // totally fine, nothing there to clobber + return cb() + case 'ENOTASHIM': + // something is there, and it's not one of ours + return cb(simulateEEXIST(from, to)) + default: + // would probably fail this way later anyway + // can't read the file, likely can't write it either + return cb(er) + } + } + // no error, check the target + if (target.indexOf(opts.gently) !== 0) { + return cb(simulateEEXIST(from, to)) + } + // ok! it's one of ours. + return cb() + }) +} + +const simulateEEXIST = (from, to) => { + // simulate the EEXIST we'd get from fs.symlink to the file + const err = new Error('EEXIST: file already exists, cmd shim \'' + + from + '\' -> \'' + to + '\'') + + err.code = 'EEXIST' + err.path = from + err.dest = to + return err +} + +module.exports = binLink diff --git a/node_modules/gentle-fs/lib/chown.js b/node_modules/gentle-fs/lib/chown.js new file mode 100644 index 0000000000000..5921e56345769 --- /dev/null +++ b/node_modules/gentle-fs/lib/chown.js @@ -0,0 +1,24 @@ +'use strict' + +// A module for chowning things we just created, to preserve +// ownership of new links and directories. + +const chownr = require('chownr') + +const selfOwner = { + uid: process.getuid && process.getuid(), + gid: process.getgid && process.getgid() +} + +module.exports = (path, uid, gid, cb) => { + if (selfOwner.uid !== 0 || + uid === undefined || gid === undefined || + (selfOwner.uid === uid && selfOwner.gid === gid)) { + // don't need to, or can't chown anyway, so just leave it. + // this also handles platforms where process.getuid is undefined + return cb() + } + chownr(path, uid, gid, cb) +} + +module.exports.selfOwner = selfOwner diff --git a/node_modules/gentle-fs/lib/link.js b/node_modules/gentle-fs/lib/link.js index 246d801479a11..7cdfef4ca95d3 100644 --- a/node_modules/gentle-fs/lib/link.js +++ b/node_modules/gentle-fs/lib/link.js @@ -3,8 +3,10 @@ const path = require('path') const fs = require('graceful-fs') const chain = require('slide').chain -const mkdir = require('mkdirp') +const mkdir = require('./mkdir.js') const rm = require('./rm.js') +const inferOwner = require('infer-owner') +const chown = require('./chown.js') exports = module.exports = { link: link, @@ -12,15 +14,22 @@ exports = module.exports = { } function linkIfExists (from, to, opts, cb) { + opts.currentIsLink = false + opts.currentExists = false fs.stat(from, function (er) { if (er) return cb() fs.readlink(to, function (er, fromOnDisk) { + if (!er || er.code !== 'ENOENT') { + opts.currentExists = true + } // if the link already exists and matches what we would do, // we don't need to do anything if (!er) { + opts.currentIsLink = true var toDir = path.dirname(to) var absoluteFrom = path.resolve(toDir, from) var absoluteFromOnDisk = path.resolve(toDir, fromOnDisk) + opts.currentTarget = absoluteFromOnDisk if (absoluteFrom === absoluteFromOnDisk) return cb() } link(from, to, opts, cb) @@ -53,14 +62,65 @@ function link (from, to, opts, cb) { var relativeTarget = path.relative(opts.base, absTarget) var target = opts.absolute ? absTarget : relativeTarget - chain( - [ - [ensureFromIsNotSource, absTarget, to], - [fs, 'stat', absTarget], - [rm, to, opts], - [mkdir, path.dirname(to)], - [fs, 'symlink', target, to, 'junction'] - ], - cb - ) + const tasks = [ + [ensureFromIsNotSource, absTarget, to], + [fs, 'stat', absTarget], + [clobberLinkGently, from, to, opts], + [mkdir, path.dirname(to)], + [fs, 'symlink', target, to, 'junction'] + ] + + if (chown.selfOwner.uid !== 0) { + chain(tasks, cb) + } else { + inferOwner(to).then(owner => { + tasks.push([chown, to, owner.uid, owner.gid]) + chain(tasks, cb) + }) + } +} + +exports._clobberLinkGently = clobberLinkGently +function clobberLinkGently (from, to, opts, cb) { + if (opts.currentExists === false) { + // nothing to clobber! + opts.log.silly('gently link', 'link does not already exist', { + link: to, + target: from + }) + return cb() + } + + if (!opts.clobberLinkGently || + opts.force === true || + !opts.gently || + typeof opts.gently !== 'string') { + opts.log.silly('gently link', 'deleting existing link forcefully', { + link: to, + target: from, + force: opts.force, + gently: opts.gently, + clobberLinkGently: opts.clobberLinkGently + }) + return rm(to, opts, cb) + } + + if (!opts.currentIsLink) { + opts.log.verbose('gently link', 'cannot remove, not a link', to) + // don't delete. it'll fail with EEXIST when it tries to symlink. + return cb() + } + + if (opts.currentTarget.indexOf(opts.gently) === 0) { + opts.log.silly('gently link', 'delete existing link', to) + return rm(to, opts, cb) + } else { + opts.log.verbose('gently link', 'refusing to delete existing link', { + link: to, + currentTarget: opts.currentTarget, + newTarget: from, + gently: opts.gently + }) + return cb() + } } diff --git a/node_modules/gentle-fs/lib/mkdir.js b/node_modules/gentle-fs/lib/mkdir.js new file mode 100644 index 0000000000000..5b419959716bd --- /dev/null +++ b/node_modules/gentle-fs/lib/mkdir.js @@ -0,0 +1,22 @@ +'use strict' + +const mkdirp = require('mkdirp') +const inferOwner = require('infer-owner') +const chown = require('./chown.js') + +module.exports = (path, cb) => { + // don't bother chowning if we can't anyway + if (process.platform === 'win32' || chown.selfOwner.uid !== 0) { + return mkdirp(path, cb) + } + + inferOwner(path).then(owner => { + mkdirp(path, (er, made) => { + if (er || !made) { + cb(er, made) + } else { + chown(made || path, owner.uid, owner.gid, cb) + } + }) + }, cb) +} diff --git a/node_modules/gentle-fs/node_modules/aproba/LICENSE b/node_modules/gentle-fs/node_modules/aproba/LICENSE new file mode 100644 index 0000000000000..f4be44d881b2d --- /dev/null +++ b/node_modules/gentle-fs/node_modules/aproba/LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/gentle-fs/node_modules/aproba/README.md b/node_modules/gentle-fs/node_modules/aproba/README.md new file mode 100644 index 0000000000000..0bfc594c56a37 --- /dev/null +++ b/node_modules/gentle-fs/node_modules/aproba/README.md @@ -0,0 +1,94 @@ +aproba +====== + +A ridiculously light-weight function argument validator + +``` +var validate = require("aproba") + +function myfunc(a, b, c) { + // `a` must be a string, `b` a number, `c` a function + validate('SNF', arguments) // [a,b,c] is also valid +} + +myfunc('test', 23, function () {}) // ok +myfunc(123, 23, function () {}) // type error +myfunc('test', 23) // missing arg error +myfunc('test', 23, function () {}, true) // too many args error + +``` + +Valid types are: + +| type | description +| :--: | :---------- +| * | matches any type +| A | `Array.isArray` OR an `arguments` object +| S | typeof == string +| N | typeof == number +| F | typeof == function +| O | typeof == object and not type A and not type E +| B | typeof == boolean +| E | `instanceof Error` OR `null` **(special: see below)** +| Z | == `null` + +Validation failures throw one of three exception types, distinguished by a +`code` property of `EMISSINGARG`, `EINVALIDTYPE` or `ETOOMANYARGS`. + +If you pass in an invalid type then it will throw with a code of +`EUNKNOWNTYPE`. + +If an **error** argument is found and is not null then the remaining +arguments are optional. That is, if you say `ESO` then that's like using a +non-magical `E` in: `E|ESO|ZSO`. + +### But I have optional arguments?! + +You can provide more than one signature by separating them with pipes `|`. +If any signature matches the arguments then they'll be considered valid. + +So for example, say you wanted to write a signature for +`fs.createWriteStream`. The docs for it describe it thusly: + +``` +fs.createWriteStream(path[, options]) +``` + +This would be a signature of `SO|S`. That is, a string and and object, or +just a string. + +Now, if you read the full `fs` docs, you'll see that actually path can ALSO +be a buffer. And options can be a string, that is: +``` +path | +options | +``` + +To reproduce this you have to fully enumerate all of the possible +combinations and that implies a signature of `SO|SS|OO|OS|S|O`. The +awkwardness is a feature: It reminds you of the complexity you're adding to +your API when you do this sort of thing. + + +### Browser support + +This has no dependencies and should work in browsers, though you'll have +noisier stack traces. + +### Why this exists + +I wanted a very simple argument validator. It needed to do two things: + +1. Be more concise and easier to use than assertions + +2. Not encourage an infinite bikeshed of DSLs + +This is why types are specified by a single character and there's no such +thing as an optional argument. + +This is not intended to validate user data. This is specifically about +asserting the interface of your functions. + +If you need greater validation, I encourage you to write them by hand or +look elsewhere. + diff --git a/node_modules/gentle-fs/node_modules/aproba/index.js b/node_modules/gentle-fs/node_modules/aproba/index.js new file mode 100644 index 0000000000000..6f3f797c09a75 --- /dev/null +++ b/node_modules/gentle-fs/node_modules/aproba/index.js @@ -0,0 +1,105 @@ +'use strict' + +function isArguments (thingy) { + return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee') +} + +var types = { + '*': {label: 'any', check: function () { return true }}, + A: {label: 'array', check: function (thingy) { return Array.isArray(thingy) || isArguments(thingy) }}, + S: {label: 'string', check: function (thingy) { return typeof thingy === 'string' }}, + N: {label: 'number', check: function (thingy) { return typeof thingy === 'number' }}, + F: {label: 'function', check: function (thingy) { return typeof thingy === 'function' }}, + O: {label: 'object', check: function (thingy) { return typeof thingy === 'object' && thingy != null && !types.A.check(thingy) && !types.E.check(thingy) }}, + B: {label: 'boolean', check: function (thingy) { return typeof thingy === 'boolean' }}, + E: {label: 'error', check: function (thingy) { return thingy instanceof Error }}, + Z: {label: 'null', check: function (thingy) { return thingy == null }} +} + +function addSchema (schema, arity) { + var group = arity[schema.length] = arity[schema.length] || [] + if (group.indexOf(schema) === -1) group.push(schema) +} + +var validate = module.exports = function (rawSchemas, args) { + if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length) + if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas') + if (!args) throw missingRequiredArg(1, 'args') + if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas) + if (!types.A.check(args)) throw invalidType(1, ['array'], args) + var schemas = rawSchemas.split('|') + var arity = {} + + schemas.forEach(function (schema) { + for (var ii = 0; ii < schema.length; ++ii) { + var type = schema[ii] + if (!types[type]) throw unknownType(ii, type) + } + if (/E.*E/.test(schema)) throw moreThanOneError(schema) + addSchema(schema, arity) + if (/E/.test(schema)) { + addSchema(schema.replace(/E.*$/, 'E'), arity) + addSchema(schema.replace(/E/, 'Z'), arity) + if (schema.length === 1) addSchema('', arity) + } + }) + var matching = arity[args.length] + if (!matching) { + throw wrongNumberOfArgs(Object.keys(arity), args.length) + } + for (var ii = 0; ii < args.length; ++ii) { + var newMatching = matching.filter(function (schema) { + var type = schema[ii] + var typeCheck = types[type].check + return typeCheck(args[ii]) + }) + if (!newMatching.length) { + var labels = matching.map(function (schema) { + return types[schema[ii]].label + }).filter(function (schema) { return schema != null }) + throw invalidType(ii, labels, args[ii]) + } + matching = newMatching + } +} + +function missingRequiredArg (num) { + return newException('EMISSINGARG', 'Missing required argument #' + (num + 1)) +} + +function unknownType (num, type) { + return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1)) +} + +function invalidType (num, expectedTypes, value) { + var valueType + Object.keys(types).forEach(function (typeCode) { + if (types[typeCode].check(value)) valueType = types[typeCode].label + }) + return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' + + englishList(expectedTypes) + ' but got ' + valueType) +} + +function englishList (list) { + return list.join(', ').replace(/, ([^,]+)$/, ' or $1') +} + +function wrongNumberOfArgs (expected, got) { + var english = englishList(expected) + var args = expected.every(function (ex) { return ex.length === 1 }) + ? 'argument' + : 'arguments' + return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got) +} + +function moreThanOneError (schema) { + return newException('ETOOMANYERRORTYPES', + 'Only one error type per argument signature is allowed, more than one found in "' + schema + '"') +} + +function newException (code, msg) { + var e = new Error(msg) + e.code = code + if (Error.captureStackTrace) Error.captureStackTrace(e, validate) + return e +} diff --git a/node_modules/gentle-fs/node_modules/aproba/package.json b/node_modules/gentle-fs/node_modules/aproba/package.json new file mode 100644 index 0000000000000..34b51a0df22ae --- /dev/null +++ b/node_modules/gentle-fs/node_modules/aproba/package.json @@ -0,0 +1,62 @@ +{ + "_from": "aproba@^1.1.2", + "_id": "aproba@1.2.0", + "_inBundle": false, + "_integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", + "_location": "/gentle-fs/aproba", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "aproba@^1.1.2", + "name": "aproba", + "escapedName": "aproba", + "rawSpec": "^1.1.2", + "saveSpec": null, + "fetchSpec": "^1.1.2" + }, + "_requiredBy": [ + "/gentle-fs" + ], + "_resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "_shasum": "6802e6264efd18c790a1b0d517f0f2627bf2c94a", + "_spec": "aproba@^1.1.2", + "_where": "/Users/aeschright/code/cli/node_modules/gentle-fs", + "author": { + "name": "Rebecca Turner", + "email": "me@re-becca.org" + }, + "bugs": { + "url": "https://github.com/iarna/aproba/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "A ridiculously light-weight argument validator (now browser friendly)", + "devDependencies": { + "standard": "^10.0.3", + "tap": "^10.0.2" + }, + "directories": { + "test": "test" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/iarna/aproba", + "keywords": [ + "argument", + "validate" + ], + "license": "ISC", + "main": "index.js", + "name": "aproba", + "repository": { + "type": "git", + "url": "git+https://github.com/iarna/aproba.git" + }, + "scripts": { + "test": "standard && tap -j3 test/*.js" + }, + "version": "1.2.0" +} diff --git a/node_modules/gentle-fs/package.json b/node_modules/gentle-fs/package.json index 55bc6bd40eca1..d162899757b0f 100644 --- a/node_modules/gentle-fs/package.json +++ b/node_modules/gentle-fs/package.json @@ -1,49 +1,50 @@ { - "_args": [ - [ - "gentle-fs@2.0.1", - "/Users/rebecca/code/npm" - ] - ], - "_from": "gentle-fs@2.0.1", - "_id": "gentle-fs@2.0.1", + "_from": "gentle-fs@2.3.0", + "_id": "gentle-fs@2.3.0", "_inBundle": false, - "_integrity": "sha512-cEng5+3fuARewXktTEGbwsktcldA+YsnUEaXZwcK/3pjSE1X9ObnTs+/8rYf8s+RnIcQm2D5x3rwpN7Zom8Bew==", + "_integrity": "sha512-3k2CgAmPxuz7S6nKK+AqFE2AdM1QuwqKLPKzIET3VRwK++3q96MsNFobScDjlCrq97ZJ8y5R725MOlm6ffUCjg==", "_location": "/gentle-fs", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "gentle-fs@2.0.1", + "raw": "gentle-fs@2.3.0", "name": "gentle-fs", "escapedName": "gentle-fs", - "rawSpec": "2.0.1", + "rawSpec": "2.3.0", "saveSpec": null, - "fetchSpec": "2.0.1" + "fetchSpec": "2.3.0" }, "_requiredBy": [ + "#USER", "/", "/bin-links" ], - "_resolved": "https://registry.npmjs.org/gentle-fs/-/gentle-fs-2.0.1.tgz", - "_spec": "2.0.1", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/gentle-fs/-/gentle-fs-2.3.0.tgz", + "_shasum": "13538db5029400f98684be4894e8a7d8f0d1ea7f", + "_spec": "gentle-fs@2.3.0", + "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Mike Sherov" }, "bugs": { "url": "https://github.com/npm/gentle-fs/issues" }, + "bundleDependencies": false, "dependencies": { "aproba": "^1.1.2", + "chownr": "^1.1.2", + "cmd-shim": "^3.0.3", "fs-vacuum": "^1.2.10", "graceful-fs": "^4.1.11", "iferr": "^0.1.5", + "infer-owner": "^1.0.4", "mkdirp": "^0.5.1", "path-is-inside": "^1.0.2", "read-cmd-shim": "^1.0.1", "slide": "^1.1.6" }, + "deprecated": false, "description": "Gentle Filesystem operations", "devDependencies": { "dezalgo": "^1.0.3", @@ -74,12 +75,12 @@ }, "scripts": { "postrelease": "npm publish && git push --follow-tags", + "posttest": "standard", "prerelease": "npm t", - "pretest": "standard", "release": "standard-version -s", - "test": "tap -J --nyc-arg=--all --coverage test/*.js test/**/*.js", + "test": "tap -J --nyc-arg=--all --coverage test", "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "2.0.1" + "version": "2.3.0" } diff --git a/node_modules/get-caller-file/LICENSE.md b/node_modules/get-caller-file/LICENSE.md new file mode 100644 index 0000000000000..bf3e1c071bae4 --- /dev/null +++ b/node_modules/get-caller-file/LICENSE.md @@ -0,0 +1,6 @@ +ISC License (ISC) +Copyright 2018 Stefan Penner + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/get-caller-file/README.md b/node_modules/get-caller-file/README.md index c32df54c39135..194492736bc4f 100644 --- a/node_modules/get-caller-file/README.md +++ b/node_modules/get-caller-file/README.md @@ -1,4 +1,4 @@ # get-caller-file -[![Build Status](https://travis-ci.org/ember-cli/ember-cli.svg?branch=master)](https://travis-ci.org/ember-cli/ember-cli) +[![Build Status](https://travis-ci.org/stefanpenner/get-caller-file.svg?branch=master)](https://travis-ci.org/stefanpenner/get-caller-file) [![Build status](https://ci.appveyor.com/api/projects/status/ol2q94g1932cy14a/branch/master?svg=true)](https://ci.appveyor.com/project/embercli/get-caller-file/branch/master) diff --git a/node_modules/get-caller-file/package.json b/node_modules/get-caller-file/package.json index d5c90d52fca1e..e853deec1b056 100644 --- a/node_modules/get-caller-file/package.json +++ b/node_modules/get-caller-file/package.json @@ -1,8 +1,8 @@ { "_from": "get-caller-file@^1.0.1", - "_id": "get-caller-file@1.0.2", + "_id": "get-caller-file@1.0.3", "_inBundle": false, - "_integrity": "sha1-9wLmMSfn4jHBYKgMFVSstw1QR+U=", + "_integrity": "sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==", "_location": "/get-caller-file", "_phantomChildren": {}, "_requested": { @@ -18,10 +18,10 @@ "_requiredBy": [ "/yargs" ], - "_resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.2.tgz", - "_shasum": "f702e63127e7e231c160a80c1554acb70d5047e5", + "_resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", + "_shasum": "f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a", "_spec": "get-caller-file@^1.0.1", - "_where": "/Users/rebecca/code/npm/node_modules/yargs", + "_where": "/Users/mperrotte/npminc/cli/node_modules/yargs", "author": { "name": "Stefan Penner" }, @@ -30,11 +30,11 @@ }, "bundleDependencies": false, "deprecated": false, - "description": "[![Build Status](https://travis-ci.org/ember-cli/ember-cli.svg?branch=master)](https://travis-ci.org/ember-cli/ember-cli) [![Build status](https://ci.appveyor.com/api/projects/status/ol2q94g1932cy14a/branch/master?svg=true)](https://ci.appveyor.com/project/embercli/get-caller-file/branch/master)", + "description": "[![Build Status](https://travis-ci.org/stefanpenner/get-caller-file.svg?branch=master)](https://travis-ci.org/stefanpenner/get-caller-file) [![Build status](https://ci.appveyor.com/api/projects/status/ol2q94g1932cy14a/branch/master?svg=true)](https://ci.appveyor.com/project/embercli/get-caller-file/branch/master)", "devDependencies": { - "chai": "^3.4.1", + "chai": "^4.1.2", "ensure-posix-path": "^1.0.1", - "mocha": "^2.3.4" + "mocha": "^5.2.0" }, "directories": { "test": "tests" @@ -54,5 +54,5 @@ "test": "mocha test", "test:debug": "mocha test" }, - "version": "1.0.2" + "version": "1.0.3" } diff --git a/node_modules/get-stream/buffer-stream.js b/node_modules/get-stream/buffer-stream.js index ae45d3d9e7417..4121c8e56f9a3 100644 --- a/node_modules/get-stream/buffer-stream.js +++ b/node_modules/get-stream/buffer-stream.js @@ -1,11 +1,11 @@ 'use strict'; -const PassThrough = require('stream').PassThrough; +const {PassThrough} = require('stream'); -module.exports = opts => { - opts = Object.assign({}, opts); +module.exports = options => { + options = Object.assign({}, options); - const array = opts.array; - let encoding = opts.encoding; + const {array} = options; + let {encoding} = options; const buffer = encoding === 'buffer'; let objectMode = false; diff --git a/node_modules/get-stream/index.js b/node_modules/get-stream/index.js index 2dc5ee96af2d9..7e5584a63df07 100644 --- a/node_modules/get-stream/index.js +++ b/node_modules/get-stream/index.js @@ -1,51 +1,50 @@ 'use strict'; +const pump = require('pump'); const bufferStream = require('./buffer-stream'); -function getStream(inputStream, opts) { +class MaxBufferError extends Error { + constructor() { + super('maxBuffer exceeded'); + this.name = 'MaxBufferError'; + } +} + +function getStream(inputStream, options) { if (!inputStream) { return Promise.reject(new Error('Expected a stream')); } - opts = Object.assign({maxBuffer: Infinity}, opts); + options = Object.assign({maxBuffer: Infinity}, options); - const maxBuffer = opts.maxBuffer; - let stream; - let clean; + const {maxBuffer} = options; - const p = new Promise((resolve, reject) => { - const error = err => { - if (err) { // null check - err.bufferedData = stream.getBufferedValue(); + let stream; + return new Promise((resolve, reject) => { + const rejectPromise = error => { + if (error) { // A null check + error.bufferedData = stream.getBufferedValue(); } - - reject(err); + reject(error); }; - stream = bufferStream(opts); - inputStream.once('error', error); - inputStream.pipe(stream); + stream = pump(inputStream, bufferStream(options), error => { + if (error) { + rejectPromise(error); + return; + } + + resolve(); + }); stream.on('data', () => { if (stream.getBufferedLength() > maxBuffer) { - reject(new Error('maxBuffer exceeded')); + rejectPromise(new MaxBufferError()); } }); - stream.once('error', error); - stream.on('end', resolve); - - clean = () => { - // some streams doesn't implement the `stream.Readable` interface correctly - if (inputStream.unpipe) { - inputStream.unpipe(stream); - } - }; - }); - - p.then(clean, clean); - - return p.then(() => stream.getBufferedValue()); + }).then(() => stream.getBufferedValue()); } module.exports = getStream; -module.exports.buffer = (stream, opts) => getStream(stream, Object.assign({}, opts, {encoding: 'buffer'})); -module.exports.array = (stream, opts) => getStream(stream, Object.assign({}, opts, {array: true})); +module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'})); +module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true})); +module.exports.MaxBufferError = MaxBufferError; diff --git a/node_modules/get-stream/license b/node_modules/get-stream/license index 654d0bfe94343..e7af2f77107d7 100644 --- a/node_modules/get-stream/license +++ b/node_modules/get-stream/license @@ -1,21 +1,9 @@ -The MIT License (MIT) +MIT License Copyright (c) Sindre Sorhus (sindresorhus.com) -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/get-stream/package.json b/node_modules/get-stream/package.json index 3042176e14932..a34aa1daf0bfd 100644 --- a/node_modules/get-stream/package.json +++ b/node_modules/get-stream/package.json @@ -1,29 +1,35 @@ { - "_from": "get-stream@^3.0.0", - "_id": "get-stream@3.0.0", + "_from": "get-stream@", + "_id": "get-stream@4.1.0", "_inBundle": false, - "_integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=", + "_integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", "_location": "/get-stream", "_phantomChildren": {}, "_requested": { - "type": "range", + "type": "tag", "registry": true, - "raw": "get-stream@^3.0.0", + "raw": "get-stream@", "name": "get-stream", "escapedName": "get-stream", - "rawSpec": "^3.0.0", + "rawSpec": "", "saveSpec": null, - "fetchSpec": "^3.0.0" + "fetchSpec": "latest" }, "_requiredBy": [ - "/execa", - "/got", + "#DEV:/", + "#USER", + "/libnpm/libnpmhook", + "/libnpmaccess", + "/libnpmorg", + "/libnpmpublish", + "/libnpmsearch", + "/libnpmteam", "/pacote" ], - "_resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", - "_shasum": "8e943d1358dc37555054ecbe2edb05aa174ede14", - "_spec": "get-stream@^3.0.0", - "_where": "/Users/rebecca/code/npm/node_modules/pacote", + "_resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", + "_shasum": "c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5", + "_spec": "get-stream@", + "_where": "/Users/zkat/Documents/code/work/npm", "author": { "name": "Sindre Sorhus", "email": "sindresorhus@gmail.com", @@ -33,6 +39,9 @@ "url": "https://github.com/sindresorhus/get-stream/issues" }, "bundleDependencies": false, + "dependencies": { + "pump": "^3.0.0" + }, "deprecated": false, "description": "Get a stream as a string, buffer, or array", "devDependencies": { @@ -41,7 +50,7 @@ "xo": "*" }, "engines": { - "node": ">=4" + "node": ">=6" }, "files": [ "index.js", @@ -54,7 +63,6 @@ "promise", "concat", "string", - "str", "text", "buffer", "read", @@ -63,8 +71,7 @@ "readable", "readablestream", "array", - "object", - "obj" + "object" ], "license": "MIT", "name": "get-stream", @@ -75,8 +82,5 @@ "scripts": { "test": "xo && ava" }, - "version": "3.0.0", - "xo": { - "esnext": true - } + "version": "4.1.0" } diff --git a/node_modules/get-stream/readme.md b/node_modules/get-stream/readme.md index 73b188fb420f2..b87a4d37ce5b3 100644 --- a/node_modules/get-stream/readme.md +++ b/node_modules/get-stream/readme.md @@ -6,7 +6,7 @@ ## Install ``` -$ npm install --save get-stream +$ npm install get-stream ``` @@ -15,10 +15,11 @@ $ npm install --save get-stream ```js const fs = require('fs'); const getStream = require('get-stream'); -const stream = fs.createReadStream('unicorn.txt'); -getStream(stream).then(str => { - console.log(str); +(async () => { + const stream = fs.createReadStream('unicorn.txt'); + + console.log(await getStream(stream)); /* ,,))))))));, __)))))))))))))), @@ -40,7 +41,7 @@ getStream(stream).then(str => { \~\ ~~ */ -}); +})(); ``` @@ -54,6 +55,8 @@ Get the `stream` as a string. #### options +Type: `Object` + ##### encoding Type: `string`
    @@ -66,7 +69,7 @@ Default: `utf8` Type: `number`
    Default: `Infinity` -Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected. +Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected with a `getStream.MaxBufferError` error. ### getStream.buffer(stream, [options]) @@ -92,11 +95,14 @@ It honors both the `maxBuffer` and `encoding` options. The behavior changes slig If the input stream emits an `error` event, the promise will be rejected with the error. The buffered data will be attached to the `bufferedData` property of the error. ```js -getStream(streamThatErrorsAtTheEnd('unicorn')) - .catch(err => { - console.log(err.bufferedData); +(async () => { + try { + await getStream(streamThatErrorsAtTheEnd('unicorn')); + } catch (error) { + console.log(error.bufferedData); //=> 'unicorn' - }); + } +})() ``` diff --git a/node_modules/glob/LICENSE b/node_modules/glob/LICENSE index 19129e315fe59..42ca266df1d52 100644 --- a/node_modules/glob/LICENSE +++ b/node_modules/glob/LICENSE @@ -13,3 +13,9 @@ ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +## Glob Logo + +Glob's logo created by Tanya Brassie , licensed +under a Creative Commons Attribution-ShareAlike 4.0 International License +https://creativecommons.org/licenses/by-sa/4.0/ diff --git a/node_modules/glob/README.md b/node_modules/glob/README.md index baa1d1ba86506..0916a48255cd6 100644 --- a/node_modules/glob/README.md +++ b/node_modules/glob/README.md @@ -7,7 +7,7 @@ Match files using the patterns the shell uses, like stars and stuff. This is a glob implementation in JavaScript. It uses the `minimatch` library to do its matching. -![](oh-my-glob.gif) +![](logo/glob.png) ## Usage @@ -347,6 +347,11 @@ Users are thus advised not to use a glob result as a guarantee of filesystem state in the face of rapid changes. For the vast majority of operations, this is never a problem. +## Glob Logo +Glob's logo was created by [Tanya Brassie](http://tanyabrassie.com/). Logo files can be found [here](https://github.com/isaacs/node-glob/tree/master/logo). + +The logo is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/). + ## Contributing Any change to behavior (including bugfixes) must come with a test. @@ -366,3 +371,5 @@ npm run bench # to profile javascript npm run prof ``` + +![](oh-my-glob.gif) diff --git a/node_modules/glob/package.json b/node_modules/glob/package.json index 7c64de2751e5f..0e1fa5805bccb 100644 --- a/node_modules/glob/package.json +++ b/node_modules/glob/package.json @@ -1,19 +1,19 @@ { - "_from": "glob@7.1.3", - "_id": "glob@7.1.3", + "_from": "glob@7.1.6", + "_id": "glob@7.1.6", "_inBundle": false, - "_integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "_integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", "_location": "/glob", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "glob@7.1.3", + "raw": "glob@7.1.6", "name": "glob", "escapedName": "glob", - "rawSpec": "7.1.3", + "rawSpec": "7.1.6", "saveSpec": null, - "fetchSpec": "7.1.3" + "fetchSpec": "7.1.6" }, "_requiredBy": [ "#USER", @@ -21,20 +21,20 @@ "/cacache", "/deglob", "/eslint", - "/globby", "/init-package-json", "/node-gyp", - "/npm-registry-fetch/cacache", + "/nyc", "/pacote", "/read-package-json", "/rimraf", "/tap", - "/tap-mocha-reporter" + "/tap-mocha-reporter", + "/test-exclude" ], - "_resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", - "_shasum": "3960832d3f1574108342dafd3a67b332c0969df1", - "_spec": "glob@7.1.3", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "_shasum": "141f33b81a7c2492e125594307480c46679278a6", + "_spec": "glob@7.1.6", + "_where": "/Users/darcyclarke/Documents/Repos/npm/cli", "author": { "name": "Isaac Z. Schlueter", "email": "i@izs.me", @@ -68,6 +68,9 @@ "sync.js", "common.js" ], + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, "homepage": "https://github.com/isaacs/node-glob#readme", "license": "ISC", "main": "glob.js", @@ -85,5 +88,5 @@ "test": "tap test/*.js --cov", "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js" }, - "version": "7.1.3" + "version": "7.1.6" } diff --git a/node_modules/got/node_modules/get-stream/buffer-stream.js b/node_modules/got/node_modules/get-stream/buffer-stream.js new file mode 100644 index 0000000000000..ae45d3d9e7417 --- /dev/null +++ b/node_modules/got/node_modules/get-stream/buffer-stream.js @@ -0,0 +1,51 @@ +'use strict'; +const PassThrough = require('stream').PassThrough; + +module.exports = opts => { + opts = Object.assign({}, opts); + + const array = opts.array; + let encoding = opts.encoding; + const buffer = encoding === 'buffer'; + let objectMode = false; + + if (array) { + objectMode = !(encoding || buffer); + } else { + encoding = encoding || 'utf8'; + } + + if (buffer) { + encoding = null; + } + + let len = 0; + const ret = []; + const stream = new PassThrough({objectMode}); + + if (encoding) { + stream.setEncoding(encoding); + } + + stream.on('data', chunk => { + ret.push(chunk); + + if (objectMode) { + len = ret.length; + } else { + len += chunk.length; + } + }); + + stream.getBufferedValue = () => { + if (array) { + return ret; + } + + return buffer ? Buffer.concat(ret, len) : ret.join(''); + }; + + stream.getBufferedLength = () => len; + + return stream; +}; diff --git a/node_modules/got/node_modules/get-stream/index.js b/node_modules/got/node_modules/get-stream/index.js new file mode 100644 index 0000000000000..2dc5ee96af2d9 --- /dev/null +++ b/node_modules/got/node_modules/get-stream/index.js @@ -0,0 +1,51 @@ +'use strict'; +const bufferStream = require('./buffer-stream'); + +function getStream(inputStream, opts) { + if (!inputStream) { + return Promise.reject(new Error('Expected a stream')); + } + + opts = Object.assign({maxBuffer: Infinity}, opts); + + const maxBuffer = opts.maxBuffer; + let stream; + let clean; + + const p = new Promise((resolve, reject) => { + const error = err => { + if (err) { // null check + err.bufferedData = stream.getBufferedValue(); + } + + reject(err); + }; + + stream = bufferStream(opts); + inputStream.once('error', error); + inputStream.pipe(stream); + + stream.on('data', () => { + if (stream.getBufferedLength() > maxBuffer) { + reject(new Error('maxBuffer exceeded')); + } + }); + stream.once('error', error); + stream.on('end', resolve); + + clean = () => { + // some streams doesn't implement the `stream.Readable` interface correctly + if (inputStream.unpipe) { + inputStream.unpipe(stream); + } + }; + }); + + p.then(clean, clean); + + return p.then(() => stream.getBufferedValue()); +} + +module.exports = getStream; +module.exports.buffer = (stream, opts) => getStream(stream, Object.assign({}, opts, {encoding: 'buffer'})); +module.exports.array = (stream, opts) => getStream(stream, Object.assign({}, opts, {array: true})); diff --git a/node_modules/got/node_modules/get-stream/license b/node_modules/got/node_modules/get-stream/license new file mode 100644 index 0000000000000..654d0bfe94343 --- /dev/null +++ b/node_modules/got/node_modules/get-stream/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/got/node_modules/get-stream/package.json b/node_modules/got/node_modules/get-stream/package.json new file mode 100644 index 0000000000000..e8eb498409e00 --- /dev/null +++ b/node_modules/got/node_modules/get-stream/package.json @@ -0,0 +1,80 @@ +{ + "_from": "get-stream@^3.0.0", + "_id": "get-stream@3.0.0", + "_inBundle": false, + "_integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=", + "_location": "/got/get-stream", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "get-stream@^3.0.0", + "name": "get-stream", + "escapedName": "get-stream", + "rawSpec": "^3.0.0", + "saveSpec": null, + "fetchSpec": "^3.0.0" + }, + "_requiredBy": [ + "/got" + ], + "_resolved": "http://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "_shasum": "8e943d1358dc37555054ecbe2edb05aa174ede14", + "_spec": "get-stream@^3.0.0", + "_where": "/Users/zkat/Documents/code/work/npm/node_modules/got", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/get-stream/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Get a stream as a string, buffer, or array", + "devDependencies": { + "ava": "*", + "into-stream": "^3.0.0", + "xo": "*" + }, + "engines": { + "node": ">=4" + }, + "files": [ + "index.js", + "buffer-stream.js" + ], + "homepage": "https://github.com/sindresorhus/get-stream#readme", + "keywords": [ + "get", + "stream", + "promise", + "concat", + "string", + "str", + "text", + "buffer", + "read", + "data", + "consume", + "readable", + "readablestream", + "array", + "object", + "obj" + ], + "license": "MIT", + "name": "get-stream", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/get-stream.git" + }, + "scripts": { + "test": "xo && ava" + }, + "version": "3.0.0", + "xo": { + "esnext": true + } +} diff --git a/node_modules/got/node_modules/get-stream/readme.md b/node_modules/got/node_modules/get-stream/readme.md new file mode 100644 index 0000000000000..73b188fb420f2 --- /dev/null +++ b/node_modules/got/node_modules/get-stream/readme.md @@ -0,0 +1,117 @@ +# get-stream [![Build Status](https://travis-ci.org/sindresorhus/get-stream.svg?branch=master)](https://travis-ci.org/sindresorhus/get-stream) + +> Get a stream as a string, buffer, or array + + +## Install + +``` +$ npm install --save get-stream +``` + + +## Usage + +```js +const fs = require('fs'); +const getStream = require('get-stream'); +const stream = fs.createReadStream('unicorn.txt'); + +getStream(stream).then(str => { + console.log(str); + /* + ,,))))))));, + __)))))))))))))), + \|/ -\(((((''''((((((((. + -*-==//////(('' . `)))))), + /|\ ))| o ;-. '((((( ,(, + ( `| / ) ;))))' ,_))^;(~ + | | | ,))((((_ _____------~~~-. %,;(;(>';'~ + o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~ + ; ''''```` `: `:::|\,__,%% );`'; ~ + | _ ) / `:|`----' `-' + ______/\/~ | / / + /~;;.____/;;' / ___--,-( `;;;/ + / // _;______;'------~~~~~ /;;/\ / + // | | / ; \;;,\ + (<_ | ; /',/-----' _> + \_| ||_ //~;~~~~~~~~~ + `\_| (,~~ + \~\ + ~~ + */ +}); +``` + + +## API + +The methods returns a promise that resolves when the `end` event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode. + +### getStream(stream, [options]) + +Get the `stream` as a string. + +#### options + +##### encoding + +Type: `string`
    +Default: `utf8` + +[Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream. + +##### maxBuffer + +Type: `number`
    +Default: `Infinity` + +Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected. + +### getStream.buffer(stream, [options]) + +Get the `stream` as a buffer. + +It honors the `maxBuffer` option as above, but it refers to byte length rather than string length. + +### getStream.array(stream, [options]) + +Get the `stream` as an array of values. + +It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen: + +- When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes). + +- When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array. + +- When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array. + + +## Errors + +If the input stream emits an `error` event, the promise will be rejected with the error. The buffered data will be attached to the `bufferedData` property of the error. + +```js +getStream(streamThatErrorsAtTheEnd('unicorn')) + .catch(err => { + console.log(err.bufferedData); + //=> 'unicorn' + }); +``` + + +## FAQ + +### How is this different from [`concat-stream`](https://github.com/maxogden/concat-stream)? + +This module accepts a stream instead of being one and returns a promise instead of using a callback. The API is simpler and it only supports returning a string, buffer, or array. It doesn't have a fragile type inference. You explicitly choose what you want. And it doesn't depend on the huge `readable-stream` package. + + +## Related + +- [get-stdin](https://github.com/sindresorhus/get-stdin) - Get stdin as a string or buffer + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/graceful-fs/graceful-fs.js b/node_modules/graceful-fs/graceful-fs.js index ac206757e63c5..8c75ee259e091 100644 --- a/node_modules/graceful-fs/graceful-fs.js +++ b/node_modules/graceful-fs/graceful-fs.js @@ -3,10 +3,22 @@ var polyfills = require('./polyfills.js') var legacy = require('./legacy-streams.js') var clone = require('./clone.js') -var queue = [] - var util = require('util') +/* istanbul ignore next - node 0.x polyfill */ +var gracefulQueue +var previousSymbol + +/* istanbul ignore else - node 0.x polyfill */ +if (typeof Symbol === 'function' && typeof Symbol.for === 'function') { + gracefulQueue = Symbol.for('graceful-fs.queue') + // This is used in testing by future versions + previousSymbol = Symbol.for('graceful-fs.previous') +} else { + gracefulQueue = '___graceful-fs.queue' + previousSymbol = '___graceful-fs.previous' +} + function noop () {} var debug = noop @@ -19,11 +31,58 @@ else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) console.error(m) } -if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { - process.on('exit', function() { - debug(queue) - require('assert').equal(queue.length, 0) +// Once time initialization +if (!global[gracefulQueue]) { + // This queue can be shared by multiple loaded instances + var queue = [] + Object.defineProperty(global, gracefulQueue, { + get: function() { + return queue + } }) + + // Patch fs.close/closeSync to shared queue version, because we need + // to retry() whenever a close happens *anywhere* in the program. + // This is essential when multiple graceful-fs instances are + // in play at the same time. + fs.close = (function (fs$close) { + function close (fd, cb) { + return fs$close.call(fs, fd, function (err) { + // This function uses the graceful-fs shared queue + if (!err) { + retry() + } + + if (typeof cb === 'function') + cb.apply(this, arguments) + }) + } + + Object.defineProperty(close, previousSymbol, { + value: fs$close + }) + return close + })(fs.close) + + fs.closeSync = (function (fs$closeSync) { + function closeSync (fd) { + // This function uses the graceful-fs shared queue + fs$closeSync.apply(fs, arguments) + retry() + } + + Object.defineProperty(closeSync, previousSymbol, { + value: fs$closeSync + }) + return closeSync + })(fs.closeSync) + + if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { + process.on('exit', function() { + debug(global[gracefulQueue]) + require('assert').equal(global[gracefulQueue].length, 0) + }) + } } module.exports = patch(clone(fs)) @@ -32,45 +91,11 @@ if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) { fs.__patched = true; } -// Always patch fs.close/closeSync, because we want to -// retry() whenever a close happens *anywhere* in the program. -// This is essential when multiple graceful-fs instances are -// in play at the same time. -module.exports.close = (function (fs$close) { return function (fd, cb) { - return fs$close.call(fs, fd, function (err) { - if (!err) - retry() - - if (typeof cb === 'function') - cb.apply(this, arguments) - }) -}})(fs.close) - -module.exports.closeSync = (function (fs$closeSync) { return function (fd) { - // Note that graceful-fs also retries when fs.closeSync() fails. - // Looks like a bug to me, although it's probably a harmless one. - var rval = fs$closeSync.apply(fs, arguments) - retry() - return rval -}})(fs.closeSync) - -// Only patch fs once, otherwise we'll run into a memory leak if -// graceful-fs is loaded multiple times, such as in test environments that -// reset the loaded modules between tests. -// We look for the string `graceful-fs` from the comment above. This -// way we are not adding any extra properties and it will detect if older -// versions of graceful-fs are installed. -if (!/\bgraceful-fs\b/.test(fs.closeSync.toString())) { - fs.closeSync = module.exports.closeSync; - fs.close = module.exports.close; -} - function patch (fs) { // Everything that references the open() function needs to be in here polyfills(fs) fs.gracefulify = patch - fs.FileReadStream = ReadStream; // Legacy name. - fs.FileWriteStream = WriteStream; // Legacy name. + fs.createReadStream = createReadStream fs.createWriteStream = createWriteStream var fs$readFile = fs.readFile @@ -187,8 +212,50 @@ function patch (fs) { WriteStream.prototype.open = WriteStream$open } - fs.ReadStream = ReadStream - fs.WriteStream = WriteStream + Object.defineProperty(fs, 'ReadStream', { + get: function () { + return ReadStream + }, + set: function (val) { + ReadStream = val + }, + enumerable: true, + configurable: true + }) + Object.defineProperty(fs, 'WriteStream', { + get: function () { + return WriteStream + }, + set: function (val) { + WriteStream = val + }, + enumerable: true, + configurable: true + }) + + // legacy names + var FileReadStream = ReadStream + Object.defineProperty(fs, 'FileReadStream', { + get: function () { + return FileReadStream + }, + set: function (val) { + FileReadStream = val + }, + enumerable: true, + configurable: true + }) + var FileWriteStream = WriteStream + Object.defineProperty(fs, 'FileWriteStream', { + get: function () { + return FileWriteStream + }, + set: function (val) { + FileWriteStream = val + }, + enumerable: true, + configurable: true + }) function ReadStream (path, options) { if (this instanceof ReadStream) @@ -234,11 +301,11 @@ function patch (fs) { } function createReadStream (path, options) { - return new ReadStream(path, options) + return new fs.ReadStream(path, options) } function createWriteStream (path, options) { - return new WriteStream(path, options) + return new fs.WriteStream(path, options) } var fs$open = fs.open @@ -267,11 +334,11 @@ function patch (fs) { function enqueue (elem) { debug('ENQUEUE', elem[0].name, elem[1]) - queue.push(elem) + global[gracefulQueue].push(elem) } function retry () { - var elem = queue.shift() + var elem = global[gracefulQueue].shift() if (elem) { debug('RETRY', elem[0].name, elem[1]) elem[0].apply(null, elem[1]) diff --git a/node_modules/graceful-fs/package.json b/node_modules/graceful-fs/package.json index a17913f221043..788876e322073 100644 --- a/node_modules/graceful-fs/package.json +++ b/node_modules/graceful-fs/package.json @@ -1,19 +1,19 @@ { - "_from": "graceful-fs@4.1.15", - "_id": "graceful-fs@4.1.15", + "_from": "graceful-fs@4.2.3", + "_id": "graceful-fs@4.2.3", "_inBundle": false, - "_integrity": "sha512-6uHUhOPEBgQ24HM+r6b/QwWfZq+yiFcipKFrOFiBEnWdy5sdzYoi+pJeQaPI5qOLRFqWmAXUPQNsielzdLoecA==", + "_integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==", "_location": "/graceful-fs", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "graceful-fs@4.1.15", + "raw": "graceful-fs@4.2.3", "name": "graceful-fs", "escapedName": "graceful-fs", - "rawSpec": "4.1.15", + "rawSpec": "4.2.3", "saveSpec": null, - "fetchSpec": "4.1.15" + "fetchSpec": "4.2.3" }, "_requiredBy": [ "#USER", @@ -22,40 +22,41 @@ "/cacache", "/cmd-shim", "/configstore", + "/cp-file", "/flat-cache", "/fs-vacuum", "/fs-write-stream-atomic", - "/fstream", "/gentle-fs", "/libcipm", "/load-json-file", "/node-gyp", "/npm-lifecycle", - "/npm-registry-client", - "/npm-registry-fetch/cacache", + "/package-hash", "/pkg-conf/load-json-file", "/read-cmd-shim", "/read-installed", "/read-package-json", "/readdir-scoped-modules", "/sha", + "/test-exclude/load-json-file", "/write-file-atomic" ], - "_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.15.tgz", - "_shasum": "ffb703e1066e8a0eeaa4c8b80ba9253eeefbfb00", - "_spec": "graceful-fs@4.1.15", - "_where": "/Users/zkat/Documents/code/work/npm", + "_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", + "_shasum": "4a12ff1b60376ef09862c2093edd908328be8423", + "_spec": "graceful-fs@4.2.3", + "_where": "/Users/mperrotte/npminc/cli", "bugs": { "url": "https://github.com/isaacs/node-graceful-fs/issues" }, "bundleDependencies": false, + "dependencies": {}, "deprecated": false, "description": "A drop-in replacement for fs, making various improvements.", "devDependencies": { "import-fresh": "^2.0.0", "mkdirp": "^0.5.0", "rimraf": "^2.2.8", - "tap": "^12.0.1" + "tap": "^12.7.0" }, "directories": { "test": "test" @@ -92,10 +93,10 @@ "url": "git+https://github.com/isaacs/node-graceful-fs.git" }, "scripts": { - "postpublish": "git push origin --all; git push origin --tags", + "postpublish": "git push origin --follow-tags", "postversion": "npm publish", "preversion": "npm test", "test": "node test.js | tap -" }, - "version": "4.1.15" + "version": "4.2.3" } diff --git a/node_modules/graceful-fs/polyfills.js b/node_modules/graceful-fs/polyfills.js index b964ed0806cee..a5808d23f132e 100644 --- a/node_modules/graceful-fs/polyfills.js +++ b/node_modules/graceful-fs/polyfills.js @@ -115,20 +115,26 @@ function patch (fs) { } // if read() returns EAGAIN, then just try it again. - fs.read = (function (fs$read) { return function (fd, buffer, offset, length, position, callback_) { - var callback - if (callback_ && typeof callback_ === 'function') { - var eagCounter = 0 - callback = function (er, _, __) { - if (er && er.code === 'EAGAIN' && eagCounter < 10) { - eagCounter ++ - return fs$read.call(fs, fd, buffer, offset, length, position, callback) + fs.read = (function (fs$read) { + function read (fd, buffer, offset, length, position, callback_) { + var callback + if (callback_ && typeof callback_ === 'function') { + var eagCounter = 0 + callback = function (er, _, __) { + if (er && er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + return fs$read.call(fs, fd, buffer, offset, length, position, callback) + } + callback_.apply(this, arguments) } - callback_.apply(this, arguments) } + return fs$read.call(fs, fd, buffer, offset, length, position, callback) } - return fs$read.call(fs, fd, buffer, offset, length, position, callback) - }})(fs.read) + + // This ensures `util.promisify` works as it does for native `fs.read`. + read.__proto__ = fs$read + return read + })(fs.read) fs.readSync = (function (fs$readSync) { return function (fd, buffer, offset, length, position) { var eagCounter = 0 @@ -272,18 +278,24 @@ function patch (fs) { } } - function statFix (orig) { if (!orig) return orig // Older versions of Node erroneously returned signed integers for // uid + gid. - return function (target, cb) { - return orig.call(fs, target, function (er, stats) { - if (!stats) return cb.apply(this, arguments) - if (stats.uid < 0) stats.uid += 0x100000000 - if (stats.gid < 0) stats.gid += 0x100000000 + return function (target, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + function callback (er, stats) { + if (stats) { + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + } if (cb) cb.apply(this, arguments) - }) + } + return options ? orig.call(fs, target, options, callback) + : orig.call(fs, target, callback) } } @@ -291,8 +303,9 @@ function patch (fs) { if (!orig) return orig // Older versions of Node erroneously returned signed integers for // uid + gid. - return function (target) { - var stats = orig.call(fs, target) + return function (target, options) { + var stats = options ? orig.call(fs, target, options) + : orig.call(fs, target) if (stats.uid < 0) stats.uid += 0x100000000 if (stats.gid < 0) stats.gid += 0x100000000 return stats; diff --git a/node_modules/has-symbols/.npmignore b/node_modules/has-symbols/.npmignore new file mode 100644 index 0000000000000..5148e527a7e28 --- /dev/null +++ b/node_modules/has-symbols/.npmignore @@ -0,0 +1,37 @@ +# Logs +logs +*.log +npm-debug.log* + +# Runtime data +pids +*.pid +*.seed + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (http://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules +jspm_packages + +# Optional npm cache directory +.npm + +# Optional REPL history +.node_repl_history diff --git a/node_modules/has-symbols/.travis.yml b/node_modules/has-symbols/.travis.yml new file mode 100644 index 0000000000000..3b3331a3b488a --- /dev/null +++ b/node_modules/has-symbols/.travis.yml @@ -0,0 +1,113 @@ +language: node_js +node_js: + - "6.6" + - "6.5" + - "6.4" + - "6.3" + - "6.2" + - "6.1" + - "6.0" + - "5.12" + - "5.11" + - "5.10" + - "5.9" + - "5.8" + - "5.7" + - "5.6" + - "5.5" + - "5.4" + - "5.3" + - "5.2" + - "5.1" + - "5.0" + - "4.5" + - "4.4" + - "4.3" + - "4.2" + - "4.1" + - "4.0" + - "iojs-v3.3" + - "iojs-v3.2" + - "iojs-v3.1" + - "iojs-v3.0" + - "iojs-v2.5" + - "iojs-v2.4" + - "iojs-v2.3" + - "iojs-v2.2" + - "iojs-v2.1" + - "iojs-v2.0" + - "iojs-v1.8" + - "iojs-v1.7" + - "iojs-v1.6" + - "iojs-v1.5" + - "iojs-v1.4" + - "iojs-v1.3" + - "iojs-v1.2" + - "iojs-v1.1" + - "iojs-v1.0" + - "0.12" + - "0.11" + - "0.10" + - "0.9" + - "0.8" + - "0.6" + - "0.4" +before_install: + - 'if [ "${TRAVIS_NODE_VERSION}" != "0.9" ]; then case "$(npm --version)" in 1.*) npm install -g npm@1.4.28 ;; 2.*) npm install -g npm@2 ;; esac ; fi' + - 'if [ "${TRAVIS_NODE_VERSION}" != "0.6" ] && [ "${TRAVIS_NODE_VERSION}" != "0.9" ]; then npm install -g npm; fi' +script: + - 'if [ -n "${LINT-}" ]; then npm run lint ; fi' + - 'if [ -n "${COVERAGE-}" ]; then npm run coverage ; fi' + - 'if [ -n "${TEST-}" ]; then npm run tests-only ; fi' +sudo: false +env: + - TEST=true +matrix: + fast_finish: true + include: + - node_js: "node" + env: LINT=true + allow_failures: + - node_js: "6.5" + - node_js: "6.4" + - node_js: "6.3" + - node_js: "6.2" + - node_js: "6.1" + - node_js: "6.0" + - node_js: "5.11" + - node_js: "5.10" + - node_js: "5.9" + - node_js: "5.8" + - node_js: "5.7" + - node_js: "5.6" + - node_js: "5.5" + - node_js: "5.4" + - node_js: "5.3" + - node_js: "5.2" + - node_js: "5.1" + - node_js: "5.0" + - node_js: "4.4" + - node_js: "4.3" + - node_js: "4.2" + - node_js: "4.1" + - node_js: "4.0" + - node_js: "iojs-v3.2" + - node_js: "iojs-v3.1" + - node_js: "iojs-v3.0" + - node_js: "iojs-v2.4" + - node_js: "iojs-v2.3" + - node_js: "iojs-v2.2" + - node_js: "iojs-v2.1" + - node_js: "iojs-v2.0" + - node_js: "iojs-v1.7" + - node_js: "iojs-v1.6" + - node_js: "iojs-v1.5" + - node_js: "iojs-v1.4" + - node_js: "iojs-v1.3" + - node_js: "iojs-v1.2" + - node_js: "iojs-v1.1" + - node_js: "iojs-v1.0" + - node_js: "0.11" + - node_js: "0.9" + - node_js: "0.6" + - node_js: "0.4" diff --git a/node_modules/has-symbols/CHANGELOG.md b/node_modules/has-symbols/CHANGELOG.md new file mode 100644 index 0000000000000..da7f9da7ea207 --- /dev/null +++ b/node_modules/has-symbols/CHANGELOG.md @@ -0,0 +1,3 @@ +1.0.0 / 2016-09-19 +================= + * Initial release. diff --git a/node_modules/has-symbols/LICENSE b/node_modules/has-symbols/LICENSE new file mode 100644 index 0000000000000..df31cbf3c064d --- /dev/null +++ b/node_modules/has-symbols/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 Jordan Harband + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/has-symbols/README.md b/node_modules/has-symbols/README.md new file mode 100644 index 0000000000000..b27b31acbc71b --- /dev/null +++ b/node_modules/has-symbols/README.md @@ -0,0 +1,45 @@ +# has-symbols [![Version Badge][2]][1] + +[![Build Status][3]][4] +[![dependency status][5]][6] +[![dev dependency status][7]][8] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][11]][1] + +Determine if the JS environment has Symbol support. Supports spec, or shams. + +## Example + +```js +var hasSymbols = require('has-symbols'); + +hasSymbols() === true; // if the environment has native Symbol support. Not polyfillable, not forgeable. + +var hasSymbolsKinda = require('has-symbols/shams'); +hasSymbolsKinda() === true; // if the environment has a Symbol sham that mostly follows the spec. +``` + +## Supported Symbol shams + - get-own-property-symbols [npm](https://www.npmjs.com/package/get-own-property-symbols) | [github](https://github.com/WebReflection/get-own-property-symbols) + - core-js [npm](https://www.npmjs.com/package/core-js) | [github](https://github.com/zloirock/core-js) + +## Tests +Simply clone the repo, `npm install`, and run `npm test` + +[1]: https://npmjs.org/package/has-symbols +[2]: http://versionbadg.es/ljharb/has-symbols.svg +[3]: https://travis-ci.org/ljharb/has-symbols.svg +[4]: https://travis-ci.org/ljharb/has-symbols +[5]: https://david-dm.org/ljharb/has-symbols.svg +[6]: https://david-dm.org/ljharb/has-symbols +[7]: https://david-dm.org/ljharb/has-symbols/dev-status.svg +[8]: https://david-dm.org/ljharb/has-symbols#info=devDependencies +[9]: https://ci.testling.com/ljharb/has-symbols.png +[10]: https://ci.testling.com/ljharb/has-symbols +[11]: https://nodei.co/npm/has-symbols.png?downloads=true&stars=true +[license-image]: http://img.shields.io/npm/l/has-symbols.svg +[license-url]: LICENSE +[downloads-image]: http://img.shields.io/npm/dm/has-symbols.svg +[downloads-url]: http://npm-stat.com/charts.html?package=has-symbols diff --git a/node_modules/has-symbols/index.js b/node_modules/has-symbols/index.js new file mode 100644 index 0000000000000..f72159e0ac7dc --- /dev/null +++ b/node_modules/has-symbols/index.js @@ -0,0 +1,13 @@ +'use strict'; + +var origSymbol = global.Symbol; +var hasSymbolSham = require('./shams'); + +module.exports = function hasNativeSymbols() { + if (typeof origSymbol !== 'function') { return false; } + if (typeof Symbol !== 'function') { return false; } + if (typeof origSymbol('foo') !== 'symbol') { return false; } + if (typeof Symbol('bar') !== 'symbol') { return false; } + + return hasSymbolSham(); +}; diff --git a/node_modules/has-symbols/package.json b/node_modules/has-symbols/package.json new file mode 100644 index 0000000000000..0999461218e0d --- /dev/null +++ b/node_modules/has-symbols/package.json @@ -0,0 +1,82 @@ +{ + "name": "has-symbols", + "version": "1.0.0", + "author": { + "name": "Jordan Harband", + "email": "ljharb@gmail.com", + "url": "http://ljharb.codes" + }, + "contributors": [ + { + "name": "Jordan Harband", + "email": "ljharb@gmail.com", + "url": "http://ljharb.codes" + } + ], + "description": "Determine if the JS environment has Symbol support. Supports spec, or shams.", + "license": "MIT", + "main": "index.js", + "scripts": { + "prepublish": "safe-publish-latest", + "pretest": "npm run --silent lint", + "test": "npm run --silent tests-only", + "posttest": "npm run --silent security", + "tests-only": "npm run --silent test:stock && npm run --silent test:staging && npm run --silent test:shams", + "test:stock": "node test", + "test:staging": "node --harmony --es-staging test", + "test:shams": "npm run --silent test:shams:getownpropertysymbols && npm run --silent test:shams:corejs", + "test:shams:corejs": "node test/shams/core-js.js", + "test:shams:getownpropertysymbols": "node test/shams/get-own-property-symbols.js", + "lint": "eslint *.js", + "security": "nsp check" + }, + "repository": { + "type": "git", + "url": "git://github.com/ljharb/has-symbols.git" + }, + "keywords": [ + "Symbol", + "symbols", + "typeof", + "sham", + "polyfill", + "native", + "core-js", + "ES6" + ], + "dependencies": {}, + "devDependencies": { + "tape": "^4.6.0", + "nsp": "^2.6.1", + "safe-publish-latest": "^1.0.1", + "eslint": "^3.5.0", + "@ljharb/eslint-config": "^8.0.0", + "get-own-property-symbols": "^0.9.2", + "core-js": "^2.4.1" + }, + "testling": { + "files": "test/index.js", + "browsers": [ + "iexplore/6.0..latest", + "firefox/3.0..6.0", + "firefox/15.0..latest", + "firefox/nightly", + "chrome/4.0..10.0", + "chrome/20.0..latest", + "chrome/canary", + "opera/10.0..latest", + "opera/next", + "safari/4.0..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2" + ] + }, + "engines": { + "node": ">= 0.4" + } + +,"_resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.0.tgz" +,"_integrity": "sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q=" +,"_from": "has-symbols@1.0.0" +} \ No newline at end of file diff --git a/node_modules/has-symbols/shams.js b/node_modules/has-symbols/shams.js new file mode 100644 index 0000000000000..f6c1ff4a23637 --- /dev/null +++ b/node_modules/has-symbols/shams.js @@ -0,0 +1,42 @@ +'use strict'; + +/* eslint complexity: [2, 17], max-statements: [2, 33] */ +module.exports = function hasSymbols() { + if (typeof Symbol !== 'function' || typeof Object.getOwnPropertySymbols !== 'function') { return false; } + if (typeof Symbol.iterator === 'symbol') { return true; } + + var obj = {}; + var sym = Symbol('test'); + var symObj = Object(sym); + if (typeof sym === 'string') { return false; } + + if (Object.prototype.toString.call(sym) !== '[object Symbol]') { return false; } + if (Object.prototype.toString.call(symObj) !== '[object Symbol]') { return false; } + + // temp disabled per https://github.com/ljharb/object.assign/issues/17 + // if (sym instanceof Symbol) { return false; } + // temp disabled per https://github.com/WebReflection/get-own-property-symbols/issues/4 + // if (!(symObj instanceof Symbol)) { return false; } + + // if (typeof Symbol.prototype.toString !== 'function') { return false; } + // if (String(sym) !== Symbol.prototype.toString.call(sym)) { return false; } + + var symVal = 42; + obj[sym] = symVal; + for (sym in obj) { return false; } // eslint-disable-line no-restricted-syntax + if (typeof Object.keys === 'function' && Object.keys(obj).length !== 0) { return false; } + + if (typeof Object.getOwnPropertyNames === 'function' && Object.getOwnPropertyNames(obj).length !== 0) { return false; } + + var syms = Object.getOwnPropertySymbols(obj); + if (syms.length !== 1 || syms[0] !== sym) { return false; } + + if (!Object.prototype.propertyIsEnumerable.call(obj, sym)) { return false; } + + if (typeof Object.getOwnPropertyDescriptor === 'function') { + var descriptor = Object.getOwnPropertyDescriptor(obj, sym); + if (descriptor.value !== symVal || descriptor.enumerable !== true) { return false; } + } + + return true; +}; diff --git a/node_modules/has-symbols/test/index.js b/node_modules/has-symbols/test/index.js new file mode 100644 index 0000000000000..fc32aff94cbb2 --- /dev/null +++ b/node_modules/has-symbols/test/index.js @@ -0,0 +1,22 @@ +'use strict'; + +var test = require('tape'); +var hasSymbols = require('../'); +var runSymbolTests = require('./tests'); + +test('interface', function (t) { + t.equal(typeof hasSymbols, 'function', 'is a function'); + t.equal(typeof hasSymbols(), 'boolean', 'returns a boolean'); + t.end(); +}); + +test('Symbols are supported', { skip: !hasSymbols() }, function (t) { + runSymbolTests(t); + t.end(); +}); + +test('Symbols are not supported', { skip: hasSymbols() }, function (t) { + t.equal(typeof Symbol, 'undefined', 'global Symbol is undefined'); + t.equal(typeof Object.getOwnPropertySymbols, 'undefined', 'Object.getOwnPropertySymbols does not exist'); + t.end(); +}); diff --git a/node_modules/has-symbols/test/shams/core-js.js b/node_modules/has-symbols/test/shams/core-js.js new file mode 100644 index 0000000000000..df5365c23ed74 --- /dev/null +++ b/node_modules/has-symbols/test/shams/core-js.js @@ -0,0 +1,28 @@ +'use strict'; + +var test = require('tape'); + +if (typeof Symbol === 'function' && typeof Symbol() === 'symbol') { + test('has native Symbol support', function (t) { + t.equal(typeof Symbol, 'function'); + t.equal(typeof Symbol(), 'symbol'); + t.end(); + }); + return; +} + +var hasSymbols = require('../../shams'); + +test('polyfilled Symbols', function (t) { + /* eslint-disable global-require */ + t.equal(hasSymbols(), false, 'hasSymbols is false before polyfilling'); + require('core-js/fn/symbol'); + require('core-js/fn/symbol/to-string-tag'); + + require('../tests')(t); + + var hasSymbolsAfter = hasSymbols(); + t.equal(hasSymbolsAfter, true, 'hasSymbols is true after polyfilling'); + /* eslint-enable global-require */ + t.end(); +}); diff --git a/node_modules/has-symbols/test/shams/get-own-property-symbols.js b/node_modules/has-symbols/test/shams/get-own-property-symbols.js new file mode 100644 index 0000000000000..9191b248baa14 --- /dev/null +++ b/node_modules/has-symbols/test/shams/get-own-property-symbols.js @@ -0,0 +1,28 @@ +'use strict'; + +var test = require('tape'); + +if (typeof Symbol === 'function' && typeof Symbol() === 'symbol') { + test('has native Symbol support', function (t) { + t.equal(typeof Symbol, 'function'); + t.equal(typeof Symbol(), 'symbol'); + t.end(); + }); + return; +} + +var hasSymbols = require('../../shams'); + +test('polyfilled Symbols', function (t) { + /* eslint-disable global-require */ + t.equal(hasSymbols(), false, 'hasSymbols is false before polyfilling'); + + require('get-own-property-symbols'); + + require('../tests')(t); + + var hasSymbolsAfter = hasSymbols(); + t.equal(hasSymbolsAfter, true, 'hasSymbols is true after polyfilling'); + /* eslint-enable global-require */ + t.end(); +}); diff --git a/node_modules/has-symbols/test/tests.js b/node_modules/has-symbols/test/tests.js new file mode 100644 index 0000000000000..93ff0eae90f03 --- /dev/null +++ b/node_modules/has-symbols/test/tests.js @@ -0,0 +1,54 @@ +'use strict'; + +module.exports = function runSymbolTests(t) { + t.equal(typeof Symbol, 'function', 'global Symbol is a function'); + + if (typeof Symbol !== 'function') { return false }; + + t.notEqual(Symbol(), Symbol(), 'two symbols are not equal'); + + /* + t.equal( + Symbol.prototype.toString.call(Symbol('foo')), + Symbol.prototype.toString.call(Symbol('foo')), + 'two symbols with the same description stringify the same' + ); + */ + + var foo = Symbol('foo'); + + /* + t.notEqual( + String(foo), + String(Symbol('bar')), + 'two symbols with different descriptions do not stringify the same' + ); + */ + + t.equal(typeof Symbol.prototype.toString, 'function', 'Symbol#toString is a function'); + // t.equal(String(foo), Symbol.prototype.toString.call(foo), 'Symbol#toString equals String of the same symbol'); + + t.equal(typeof Object.getOwnPropertySymbols, 'function', 'Object.getOwnPropertySymbols is a function'); + + var obj = {}; + var sym = Symbol('test'); + var symObj = Object(sym); + t.notEqual(typeof sym, 'string', 'Symbol is not a string'); + t.equal(Object.prototype.toString.call(sym), '[object Symbol]', 'symbol primitive Object#toStrings properly'); + t.equal(Object.prototype.toString.call(symObj), '[object Symbol]', 'symbol primitive Object#toStrings properly'); + + var symVal = 42; + obj[sym] = symVal; + for (sym in obj) { t.fail('symbol property key was found in for..in of object'); } + + t.deepEqual(Object.keys(obj), [], 'no enumerable own keys on symbol-valued object'); + t.deepEqual(Object.getOwnPropertyNames(obj), [], 'no own names on symbol-valued object'); + t.deepEqual(Object.getOwnPropertySymbols(obj), [sym], 'one own symbol on symbol-valued object'); + t.equal(Object.prototype.propertyIsEnumerable.call(obj, sym), true, 'symbol is enumerable'); + t.deepEqual(Object.getOwnPropertyDescriptor(obj, sym), { + configurable: true, + enumerable: true, + value: 42, + writable: true + }, 'property descriptor is correct'); +}; diff --git a/node_modules/has/LICENSE-MIT b/node_modules/has/LICENSE-MIT new file mode 100644 index 0000000000000..ae7014d385df3 --- /dev/null +++ b/node_modules/has/LICENSE-MIT @@ -0,0 +1,22 @@ +Copyright (c) 2013 Thiago de Arruda + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/has/README.md b/node_modules/has/README.md new file mode 100644 index 0000000000000..635e3a4baab00 --- /dev/null +++ b/node_modules/has/README.md @@ -0,0 +1,18 @@ +# has + +> Object.prototype.hasOwnProperty.call shortcut + +## Installation + +```sh +npm install --save has +``` + +## Usage + +```js +var has = require('has'); + +has({}, 'hasOwnProperty'); // false +has(Object.prototype, 'hasOwnProperty'); // true +``` diff --git a/node_modules/has/package.json b/node_modules/has/package.json new file mode 100644 index 0000000000000..86203ce165966 --- /dev/null +++ b/node_modules/has/package.json @@ -0,0 +1,52 @@ +{ + "name": "has", + "description": "Object.prototype.hasOwnProperty.call shortcut", + "version": "1.0.3", + "homepage": "https://github.com/tarruda/has", + "author": { + "name": "Thiago de Arruda", + "email": "tpadilha84@gmail.com" + }, + "contributors": [ + { + "name": "Jordan Harband", + "email": "ljharb@gmail.com", + "url": "http://ljharb.codes" + } + ], + "repository": { + "type": "git", + "url": "git://github.com/tarruda/has.git" + }, + "bugs": { + "url": "https://github.com/tarruda/has/issues" + }, + "license": "MIT", + "licenses": [ + { + "type": "MIT", + "url": "https://github.com/tarruda/has/blob/master/LICENSE-MIT" + } + ], + "main": "./src", + "dependencies": { + "function-bind": "^1.1.1" + }, + "devDependencies": { + "@ljharb/eslint-config": "^12.2.1", + "eslint": "^4.19.1", + "tape": "^4.9.0" + }, + "engines": { + "node": ">= 0.4.0" + }, + "scripts": { + "lint": "eslint .", + "pretest": "npm run lint", + "test": "tape test" + } + +,"_resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz" +,"_integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==" +,"_from": "has@1.0.3" +} \ No newline at end of file diff --git a/node_modules/has/src/index.js b/node_modules/has/src/index.js new file mode 100644 index 0000000000000..dd92dd9094edb --- /dev/null +++ b/node_modules/has/src/index.js @@ -0,0 +1,5 @@ +'use strict'; + +var bind = require('function-bind'); + +module.exports = bind.call(Function.call, Object.prototype.hasOwnProperty); diff --git a/node_modules/has/test/index.js b/node_modules/has/test/index.js new file mode 100644 index 0000000000000..43d480b2c2e76 --- /dev/null +++ b/node_modules/has/test/index.js @@ -0,0 +1,10 @@ +'use strict'; + +var test = require('tape'); +var has = require('../'); + +test('has', function (t) { + t.equal(has({}, 'hasOwnProperty'), false, 'object literal does not have own property "hasOwnProperty"'); + t.equal(has(Object.prototype, 'hasOwnProperty'), true, 'Object.prototype has own property "hasOwnProperty"'); + t.end(); +}); diff --git a/node_modules/hosted-git-info/CHANGELOG.md b/node_modules/hosted-git-info/CHANGELOG.md index 7a8d5df968da3..4f86601e029e9 100644 --- a/node_modules/hosted-git-info/CHANGELOG.md +++ b/node_modules/hosted-git-info/CHANGELOG.md @@ -2,6 +2,93 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [2.8.8](https://github.com/npm/hosted-git-info/compare/v2.8.7...v2.8.8) (2020-02-29) + + +### Bug Fixes + +* [#61](https://github.com/npm/hosted-git-info/issues/61) & [#65](https://github.com/npm/hosted-git-info/issues/65) addressing issues w/ url.URL implmentation which regressed node 6 support ([5038b18](https://github.com/npm/hosted-git-info/commit/5038b18)), closes [#66](https://github.com/npm/hosted-git-info/issues/66) + + + + +## [2.8.7](https://github.com/npm/hosted-git-info/compare/v2.8.6...v2.8.7) (2020-02-26) + + +### Bug Fixes + +* Do not attempt to use url.URL when unavailable ([2d0bb66](https://github.com/npm/hosted-git-info/commit/2d0bb66)), closes [#61](https://github.com/npm/hosted-git-info/issues/61) [#62](https://github.com/npm/hosted-git-info/issues/62) +* Do not pass scp-style URLs to the WhatWG url.URL ([f2cdfcf](https://github.com/npm/hosted-git-info/commit/f2cdfcf)), closes [#60](https://github.com/npm/hosted-git-info/issues/60) + + + + +## [2.8.6](https://github.com/npm/hosted-git-info/compare/v2.8.5...v2.8.6) (2020-02-25) + + + + +## [2.8.5](https://github.com/npm/hosted-git-info/compare/v2.8.4...v2.8.5) (2019-10-07) + + +### Bug Fixes + +* updated pathmatch for gitlab ([e8325b5](https://github.com/npm/hosted-git-info/commit/e8325b5)), closes [#51](https://github.com/npm/hosted-git-info/issues/51) +* updated pathmatch for gitlab ([ffe056f](https://github.com/npm/hosted-git-info/commit/ffe056f)) + + + + +## [2.8.4](https://github.com/npm/hosted-git-info/compare/v2.8.3...v2.8.4) (2019-08-12) + + + + +## [2.8.3](https://github.com/npm/hosted-git-info/compare/v2.8.2...v2.8.3) (2019-08-12) + + + + +## [2.8.2](https://github.com/npm/hosted-git-info/compare/v2.8.1...v2.8.2) (2019-08-05) + + +### Bug Fixes + +* http protocol use sshurl by default ([3b1d629](https://github.com/npm/hosted-git-info/commit/3b1d629)), closes [#48](https://github.com/npm/hosted-git-info/issues/48) + + + + +## [2.8.1](https://github.com/npm/hosted-git-info/compare/v2.8.0...v2.8.1) (2019-08-05) + + +### Bug Fixes + +* ignore noCommittish on tarball url generation ([5d4a8d7](https://github.com/npm/hosted-git-info/commit/5d4a8d7)) +* use gist tarball url that works for anonymous gists ([1692435](https://github.com/npm/hosted-git-info/commit/1692435)) + + + + +# [2.8.0](https://github.com/npm/hosted-git-info/compare/v2.7.1...v2.8.0) (2019-08-05) + + +### Bug Fixes + +* Allow slashes in gitlab project section ([bbcf7b2](https://github.com/npm/hosted-git-info/commit/bbcf7b2)), closes [#46](https://github.com/npm/hosted-git-info/issues/46) [#43](https://github.com/npm/hosted-git-info/issues/43) +* **git-host:** disallow URI-encoded slash (%2F) in `path` ([3776fa5](https://github.com/npm/hosted-git-info/commit/3776fa5)), closes [#44](https://github.com/npm/hosted-git-info/issues/44) +* **gitlab:** Do not URL encode slashes in project name for GitLab https URL ([cbf04f9](https://github.com/npm/hosted-git-info/commit/cbf04f9)), closes [#47](https://github.com/npm/hosted-git-info/issues/47) +* do not allow invalid gist urls ([d5cf830](https://github.com/npm/hosted-git-info/commit/d5cf830)) +* **cache:** Switch to lru-cache to save ourselves from unlimited memory consumption ([e518222](https://github.com/npm/hosted-git-info/commit/e518222)), closes [#38](https://github.com/npm/hosted-git-info/issues/38) + + +### Features + +* give these objects a name ([60abaea](https://github.com/npm/hosted-git-info/commit/60abaea)) + + + ## [2.7.1](https://github.com/npm/hosted-git-info/compare/v2.7.0...v2.7.1) (2018-07-07) diff --git a/node_modules/hosted-git-info/git-host-info.js b/node_modules/hosted-git-info/git-host-info.js index 090a23251fbd2..8147e3348f5e8 100644 --- a/node_modules/hosted-git-info/git-host-info.js +++ b/node_modules/hosted-git-info/git-host-info.js @@ -23,12 +23,14 @@ var gitHosts = module.exports = { 'domain': 'gitlab.com', 'treepath': 'tree', 'bugstemplate': 'https://{domain}/{user}/{project}/issues', - 'tarballtemplate': 'https://{domain}/{user}/{project}/repository/archive.tar.gz?ref={committish}' + 'httpstemplate': 'git+https://{auth@}{domain}/{user}/{projectPath}.git{#committish}', + 'tarballtemplate': 'https://{domain}/{user}/{project}/repository/archive.tar.gz?ref={committish}', + 'pathmatch': /^[/]([^/]+)[/]((?!.*(\/-\/|\/repository\/archive\.tar\.gz\?=.*|\/repository\/[^/]+\/archive.tar.gz$)).*?)(?:[.]git|[/])?$/ }, gist: { 'protocols': [ 'git', 'git+ssh', 'git+https', 'ssh', 'https' ], 'domain': 'gist.github.com', - 'pathmatch': /^[/](?:([^/]+)[/])?([a-z0-9]+)(?:[.]git)?$/, + 'pathmatch': /^[/](?:([^/]+)[/])?([a-z0-9]{32,})(?:[.]git)?$/, 'filetemplate': 'https://gist.githubusercontent.com/{user}/{project}/raw{/committish}/{path}', 'bugstemplate': 'https://{domain}/{project}', 'gittemplate': 'git://{domain}/{project}.git{#committish}', @@ -40,7 +42,7 @@ var gitHosts = module.exports = { 'httpstemplate': 'git+https://{domain}/{project}.git{#committish}', 'shortcuttemplate': '{type}:{project}{#committish}', 'pathtemplate': '{project}{#committish}', - 'tarballtemplate': 'https://{domain}/{user}/{project}/archive/{committish}.tar.gz', + 'tarballtemplate': 'https://codeload.github.com/gist/{project}/tar.gz/{committish}', 'hashformat': function (fragment) { return 'file-' + formatHashFragment(fragment) } diff --git a/node_modules/hosted-git-info/git-host.js b/node_modules/hosted-git-info/git-host.js index 733648d84b6c9..9616fbaa6b4af 100644 --- a/node_modules/hosted-git-info/git-host.js +++ b/node_modules/hosted-git-info/git-host.js @@ -1,9 +1,24 @@ 'use strict' var gitHosts = require('./git-host-info.js') /* eslint-disable node/no-deprecated-api */ -var extend = Object.assign || require('util')._extend -var GitHost = module.exports = function (type, user, auth, project, committish, defaultRepresentation, opts) { +// copy-pasta util._extend from node's source, to avoid pulling +// the whole util module into peoples' webpack bundles. +/* istanbul ignore next */ +var extend = Object.assign || function _extend (target, source) { + // Don't do anything if source isn't an object + if (source === null || typeof source !== 'object') return target + + var keys = Object.keys(source) + var i = keys.length + while (i--) { + target[keys[i]] = source[keys[i]] + } + return target +} + +module.exports = GitHost +function GitHost (type, user, auth, project, committish, defaultRepresentation, opts) { var gitHostInfo = this gitHostInfo.type = type Object.keys(gitHosts[type]).forEach(function (key) { @@ -16,7 +31,6 @@ var GitHost = module.exports = function (type, user, auth, project, committish, gitHostInfo.default = defaultRepresentation gitHostInfo.opts = opts || {} } -GitHost.prototype = {} GitHost.prototype.hash = function () { return this.committish ? '#' + this.committish : '' @@ -32,24 +46,33 @@ GitHost.prototype._fill = function (template, opts) { if (self[key] != null && vars[key] == null) vars[key] = self[key] }) var rawAuth = vars.auth - var rawComittish = vars.committish + var rawcommittish = vars.committish var rawFragment = vars.fragment var rawPath = vars.path + var rawProject = vars.project Object.keys(vars).forEach(function (key) { - vars[key] = encodeURIComponent(vars[key]) + var value = vars[key] + if ((key === 'path' || key === 'project') && typeof value === 'string') { + vars[key] = value.split('/').map(function (pathComponent) { + return encodeURIComponent(pathComponent) + }).join('/') + } else { + vars[key] = encodeURIComponent(value) + } }) vars['auth@'] = rawAuth ? rawAuth + '@' : '' vars['#fragment'] = rawFragment ? '#' + this.hashformat(rawFragment) : '' vars.fragment = vars.fragment ? vars.fragment : '' vars['#path'] = rawPath ? '#' + this.hashformat(rawPath) : '' vars['/path'] = vars.path ? '/' + vars.path : '' + vars.projectPath = rawProject.split('/').map(encodeURIComponent).join('/') if (opts.noCommittish) { vars['#committish'] = '' vars['/tree/committish'] = '' - vars['/comittish'] = '' - vars.comittish = '' + vars['/committish'] = '' + vars.committish = '' } else { - vars['#committish'] = rawComittish ? '#' + rawComittish : '' + vars['#committish'] = rawcommittish ? '#' + rawcommittish : '' vars['/tree/committish'] = vars.committish ? '/' + vars.treepath + '/' + vars.committish : '' @@ -114,7 +137,8 @@ GitHost.prototype.path = function (opts) { return this._fill(this.pathtemplate, opts) } -GitHost.prototype.tarball = function (opts) { +GitHost.prototype.tarball = function (opts_) { + var opts = extend({}, opts_, { noCommittish: false }) return this._fill(this.tarballtemplate, opts) } @@ -127,5 +151,6 @@ GitHost.prototype.getDefaultRepresentation = function () { } GitHost.prototype.toString = function (opts) { - return (this[this.default] || this.sshurl).call(this, opts) + if (this.default && typeof this[this.default] === 'function') return this[this.default](opts) + return this.sshurl(opts) } diff --git a/node_modules/hosted-git-info/index.js b/node_modules/hosted-git-info/index.js index c1c2cc8996b9a..21e53fe3724be 100644 --- a/node_modules/hosted-git-info/index.js +++ b/node_modules/hosted-git-info/index.js @@ -4,15 +4,14 @@ var gitHosts = require('./git-host-info.js') var GitHost = module.exports = require('./git-host.js') var protocolToRepresentationMap = { - 'git+ssh': 'sshurl', - 'git+https': 'https', - 'ssh': 'sshurl', - 'git': 'git' + 'git+ssh:': 'sshurl', + 'git+https:': 'https', + 'ssh:': 'sshurl', + 'git:': 'git' } function protocolToRepresentation (protocol) { - if (protocol.substr(-1) === ':') protocol = protocol.slice(0, -1) - return protocolToRepresentationMap[protocol] || protocol + return protocolToRepresentationMap[protocol] || protocol.slice(0, -1) } var authProtocols = { @@ -48,7 +47,7 @@ function fromUrl (giturl, opts) { var gitHostInfo = gitHosts[gitHostName] var auth = null if (parsed.auth && authProtocols[parsed.protocol]) { - auth = decodeURIComponent(parsed.auth) + auth = parsed.auth } var committish = parsed.hash ? decodeURIComponent(parsed.hash.substr(1)) : null var user = null @@ -65,13 +64,18 @@ function fromUrl (giturl, opts) { var pathmatch = gitHostInfo.pathmatch var matched = parsed.path.match(pathmatch) if (!matched) return - if (matched[1] != null) user = decodeURIComponent(matched[1].replace(/^:/, '')) - if (matched[2] != null) project = decodeURIComponent(matched[2]) + /* istanbul ignore else */ + if (matched[1] !== null && matched[1] !== undefined) { + user = decodeURIComponent(matched[1].replace(/^:/, '')) + } + project = decodeURIComponent(matched[2]) defaultRepresentation = protocolToRepresentation(parsed.protocol) } return new GitHost(gitHostName, user, auth, project, committish, defaultRepresentation, opts) } catch (ex) { - if (!(ex instanceof URIError)) throw ex + /* istanbul ignore else */ + if (ex instanceof URIError) { + } else throw ex } }).filter(function (gitHostInfo) { return gitHostInfo }) if (matches.length !== 1) return @@ -101,9 +105,31 @@ function fixupUnqualifiedGist (giturl) { } function parseGitUrl (giturl) { - if (typeof giturl !== 'string') giturl = '' + giturl var matched = giturl.match(/^([^@]+)@([^:/]+):[/]?((?:[^/]+[/])?[^/]+?)(?:[.]git)?(#.*)?$/) - if (!matched) return url.parse(giturl) + if (!matched) { + var legacy = url.parse(giturl) + // If we don't have url.URL, then sorry, this is just not fixable. + // This affects Node <= 6.12. + if (legacy.auth && typeof url.URL === 'function') { + // git urls can be in the form of scp-style/ssh-connect strings, like + // git+ssh://user@host.com:some/path, which the legacy url parser + // supports, but WhatWG url.URL class does not. However, the legacy + // parser de-urlencodes the username and password, so something like + // https://user%3An%40me:p%40ss%3Aword@x.com/ becomes + // https://user:n@me:p@ss:word@x.com/ which is all kinds of wrong. + // Pull off just the auth and host, so we dont' get the confusing + // scp-style URL, then pass that to the WhatWG parser to get the + // auth properly escaped. + var authmatch = giturl.match(/[^@]+@[^:/]+/) + /* istanbul ignore else - this should be impossible */ + if (authmatch) { + var whatwg = new url.URL(authmatch[0]) + legacy.auth = whatwg.username || '' + if (whatwg.password) legacy.auth += ':' + whatwg.password + } + } + return legacy + } return { protocol: 'git+ssh:', slashes: true, diff --git a/node_modules/hosted-git-info/package.json b/node_modules/hosted-git-info/package.json index de1ee3d7f6422..8d9c2b2046c74 100644 --- a/node_modules/hosted-git-info/package.json +++ b/node_modules/hosted-git-info/package.json @@ -1,19 +1,19 @@ { - "_from": "hosted-git-info@latest", - "_id": "hosted-git-info@2.7.1", + "_from": "hosted-git-info@2.8.8", + "_id": "hosted-git-info@2.8.8", "_inBundle": false, - "_integrity": "sha512-7T/BxH19zbcCTa8XkMlbK5lTo1WtgkFi3GvdWEyNuc4Vex7/9Dqbnpsf4JMydcfj9HCg4zUWFTL3Za6lapg5/w==", + "_integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==", "_location": "/hosted-git-info", "_phantomChildren": {}, "_requested": { - "type": "tag", + "type": "version", "registry": true, - "raw": "hosted-git-info@latest", + "raw": "hosted-git-info@2.8.8", "name": "hosted-git-info", "escapedName": "hosted-git-info", - "rawSpec": "latest", + "rawSpec": "2.8.8", "saveSpec": null, - "fetchSpec": "latest" + "fetchSpec": "2.8.8" }, "_requiredBy": [ "#USER", @@ -21,10 +21,10 @@ "/normalize-package-data", "/npm-package-arg" ], - "_resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.7.1.tgz", - "_shasum": "97f236977bd6e125408930ff6de3eec6281ec047", - "_spec": "hosted-git-info@latest", - "_where": "/Users/zkat/Documents/code/work/npm", + "_resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", + "_shasum": "7539bd4bc1e0e0a895815a2e0262420b12858488", + "_spec": "hosted-git-info@2.8.8", + "_where": "/Users/darcyclarke/Documents/Repos/npm/cli", "author": { "name": "Rebecca Turner", "email": "me@re-becca.org", @@ -38,8 +38,8 @@ "description": "Provides metadata and conversions from repository urls for Github, Bitbucket and Gitlab", "devDependencies": { "standard": "^11.0.1", - "standard-version": "^4.3.0", - "tap": "^12.0.1" + "standard-version": "^4.4.0", + "tap": "^12.7.0" }, "files": [ "index.js", @@ -61,11 +61,12 @@ "url": "git+https://github.com/npm/hosted-git-info.git" }, "scripts": { - "postrelease": "npm publish && git push --follow-tags", + "postrelease": "npm publish --tag=ancient-legacy-fixes && git push --follow-tags", + "posttest": "standard", "prerelease": "npm t", - "pretest": "standard", "release": "standard-version -s", - "test": "tap -J --nyc-arg=--all --coverage test" + "test": "tap -J --coverage=90 --no-esm test/*.js", + "test:coverage": "tap --coverage-report=html -J --coverage=90 --no-esm test/*.js" }, - "version": "2.7.1" + "version": "2.8.8" } diff --git a/node_modules/https-proxy-agent/.editorconfig b/node_modules/https-proxy-agent/.editorconfig new file mode 100644 index 0000000000000..12b4b9a3b9bff --- /dev/null +++ b/node_modules/https-proxy-agent/.editorconfig @@ -0,0 +1,37 @@ +root = true + +[*] +indent_style = tab +indent_size = 4 +tab_width = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[{*.json,*.json.example,*.gyp,*.yml,*.yaml,*.workflow}] +indent_style = space +indent_size = 2 + +[{*.py,*.asm}] +indent_style = space + +[*.py] +indent_size = 4 + +[*.asm] +indent_size = 8 + +[*.md] +trim_trailing_whitespace = false + +# Ideal settings - some plugins might support these. +[*.js] +quote_type = single + +[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.js,*.java,*.go,*.rs,*.php,*.ng,*.jsx,*.ts,*.d,*.cs,*.swift}] +curly_bracket_next_line = false +spaces_around_operators = true +spaces_around_brackets = outside +# close enough to 1TB +indent_brace_style = K&R diff --git a/node_modules/https-proxy-agent/.eslintrc.js b/node_modules/https-proxy-agent/.eslintrc.js new file mode 100644 index 0000000000000..62743f2c4d187 --- /dev/null +++ b/node_modules/https-proxy-agent/.eslintrc.js @@ -0,0 +1,86 @@ +module.exports = { + 'extends': [ + 'airbnb', + 'prettier' + ], + 'parser': '@typescript-eslint/parser', + 'parserOptions': { + 'ecmaVersion': 2018, + 'sourceType': 'module', + 'modules': true + }, + 'plugins': [ + '@typescript-eslint' + ], + 'settings': { + 'import/resolver': { + 'typescript': { + } + } + }, + 'rules': { + 'quotes': [ + 2, + 'single', + { + 'allowTemplateLiterals': true + } + ], + 'class-methods-use-this': 0, + 'consistent-return': 0, + 'func-names': 0, + 'global-require': 0, + 'guard-for-in': 0, + 'import/no-duplicates': 0, + 'import/no-dynamic-require': 0, + 'import/no-extraneous-dependencies': 0, + 'import/prefer-default-export': 0, + 'lines-between-class-members': 0, + 'no-await-in-loop': 0, + 'no-bitwise': 0, + 'no-console': 0, + 'no-continue': 0, + 'no-control-regex': 0, + 'no-empty': 0, + 'no-loop-func': 0, + 'no-nested-ternary': 0, + 'no-param-reassign': 0, + 'no-plusplus': 0, + 'no-restricted-globals': 0, + 'no-restricted-syntax': 0, + 'no-shadow': 0, + 'no-underscore-dangle': 0, + 'no-use-before-define': 0, + 'prefer-const': 0, + 'prefer-destructuring': 0, + 'camelcase': 0, + 'no-unused-vars': 0, // in favor of '@typescript-eslint/no-unused-vars' + // 'indent': 0 // in favor of '@typescript-eslint/indent' + '@typescript-eslint/no-unused-vars': 'warn', + // '@typescript-eslint/indent': ['error', 2] // this might conflict with a lot ongoing changes + '@typescript-eslint/no-array-constructor': 'error', + '@typescript-eslint/adjacent-overload-signatures': 'error', + '@typescript-eslint/class-name-casing': 'error', + '@typescript-eslint/interface-name-prefix': 'error', + '@typescript-eslint/no-empty-interface': 'error', + '@typescript-eslint/no-inferrable-types': 'error', + '@typescript-eslint/no-misused-new': 'error', + '@typescript-eslint/no-namespace': 'error', + '@typescript-eslint/no-non-null-assertion': 'error', + '@typescript-eslint/no-parameter-properties': 'error', + '@typescript-eslint/no-triple-slash-reference': 'error', + '@typescript-eslint/prefer-namespace-keyword': 'error', + '@typescript-eslint/type-annotation-spacing': 'error', + // '@typescript-eslint/array-type': 'error', + // '@typescript-eslint/ban-types': 'error', + // '@typescript-eslint/explicit-function-return-type': 'warn', + // '@typescript-eslint/explicit-member-accessibility': 'error', + // '@typescript-eslint/member-delimiter-style': 'error', + // '@typescript-eslint/no-angle-bracket-type-assertion': 'error', + // '@typescript-eslint/no-explicit-any': 'warn', + // '@typescript-eslint/no-object-literal-type-assertion': 'error', + // '@typescript-eslint/no-use-before-define': 'error', + // '@typescript-eslint/no-var-requires': 'error', + // '@typescript-eslint/prefer-interface': 'error' + } +} diff --git a/node_modules/https-proxy-agent/.travis.yml b/node_modules/https-proxy-agent/.travis.yml deleted file mode 100644 index 805d3d50d2a1f..0000000000000 --- a/node_modules/https-proxy-agent/.travis.yml +++ /dev/null @@ -1,22 +0,0 @@ -sudo: false - -language: node_js - -node_js: - - "4" - - "5" - - "6" - - "7" - - "8" - -install: - - PATH="`npm bin`:`npm bin -g`:$PATH" - # Install dependencies and build - - npm install - -script: - # Output useful info for debugging - - node --version - - npm --version - # Run tests - - npm test diff --git a/node_modules/https-proxy-agent/README.md b/node_modules/https-proxy-agent/README.md index 5e0419cf9ced0..20fda1e24cf65 100644 --- a/node_modules/https-proxy-agent/README.md +++ b/node_modules/https-proxy-agent/README.md @@ -103,7 +103,7 @@ The `options` argument may either be a string URI of the proxy server to use, or * `host` - String - Proxy host to connect to (may use `hostname` as well). Required. * `port` - Number - Proxy port to connect to. Required. - * `secureProxy` - Boolean - If `true`, then use TLS to connect to the proxy. Defaults to `false`. + * `protocol` - String - If `https:`, then use TLS to connect to the proxy. * `headers` - Object - Additional HTTP headers to be sent on the HTTP CONNECT method. * Any other options given are passed to the `net.connect()`/`tls.connect()` functions. diff --git a/node_modules/https-proxy-agent/index.d.ts b/node_modules/https-proxy-agent/index.d.ts new file mode 100644 index 0000000000000..cec35d85e0f61 --- /dev/null +++ b/node_modules/https-proxy-agent/index.d.ts @@ -0,0 +1,22 @@ +declare module 'https-proxy-agent' { + import * as https from 'https'; + + namespace HttpsProxyAgent { + interface HttpsProxyAgentOptions { + host: string; + port: number | string; + secureProxy?: boolean; + headers?: { + [key: string]: string; + }; + [key: string]: any; + } + } + + // HttpsProxyAgent doesnt *actually* extend https.Agent, but for my purposes I want it to pretend that it does + class HttpsProxyAgent extends https.Agent { + constructor(opts: HttpsProxyAgent.HttpsProxyAgentOptions | string); + } + + export = HttpsProxyAgent; +} diff --git a/node_modules/https-proxy-agent/index.js b/node_modules/https-proxy-agent/index.js index 0a2fdabe8dcfa..817a0a9232060 100644 --- a/node_modules/https-proxy-agent/index.js +++ b/node_modules/https-proxy-agent/index.js @@ -5,6 +5,7 @@ var net = require('net'); var tls = require('tls'); var url = require('url'); +var assert = require('assert'); var Agent = require('agent-base'); var inherits = require('util').inherits; var debug = require('debug')('https-proxy-agent'); @@ -23,40 +24,42 @@ module.exports = HttpsProxyAgent; */ function HttpsProxyAgent(opts) { - if (!(this instanceof HttpsProxyAgent)) return new HttpsProxyAgent(opts); - if ('string' == typeof opts) opts = url.parse(opts); - if (!opts) - throw new Error( - 'an HTTP(S) proxy server `host` and `port` must be specified!' - ); - debug('creating new HttpsProxyAgent instance: %o', opts); - Agent.call(this, opts); - - var proxy = Object.assign({}, opts); - - // if `true`, then connect to the proxy server over TLS. defaults to `false`. - this.secureProxy = proxy.protocol ? /^https:?$/i.test(proxy.protocol) : false; - - // prefer `hostname` over `host`, and set the `port` if needed - proxy.host = proxy.hostname || proxy.host; - proxy.port = +proxy.port || (this.secureProxy ? 443 : 80); - - // ALPN is supported by Node.js >= v5. - // attempt to negotiate http/1.1 for proxy servers that support http/2 - if (this.secureProxy && !('ALPNProtocols' in proxy)) { - proxy.ALPNProtocols = ['http 1.1'] - } - - if (proxy.host && proxy.path) { - // if both a `host` and `path` are specified then it's most likely the - // result of a `url.parse()` call... we need to remove the `path` portion so - // that `net.connect()` doesn't attempt to open that as a unix socket file. - delete proxy.path; - delete proxy.pathname; - } - - this.proxy = proxy; - this.defaultPort = 443; + if (!(this instanceof HttpsProxyAgent)) return new HttpsProxyAgent(opts); + if ('string' == typeof opts) opts = url.parse(opts); + if (!opts) + throw new Error( + 'an HTTP(S) proxy server `host` and `port` must be specified!' + ); + debug('creating new HttpsProxyAgent instance: %o', opts); + Agent.call(this, opts); + + var proxy = Object.assign({}, opts); + + // if `true`, then connect to the proxy server over TLS. defaults to `false`. + this.secureProxy = proxy.protocol + ? /^https:?$/i.test(proxy.protocol) + : false; + + // prefer `hostname` over `host`, and set the `port` if needed + proxy.host = proxy.hostname || proxy.host; + proxy.port = +proxy.port || (this.secureProxy ? 443 : 80); + + // ALPN is supported by Node.js >= v5. + // attempt to negotiate http/1.1 for proxy servers that support http/2 + if (this.secureProxy && !('ALPNProtocols' in proxy)) { + proxy.ALPNProtocols = ['http 1.1']; + } + + if (proxy.host && proxy.path) { + // if both a `host` and `path` are specified then it's most likely the + // result of a `url.parse()` call... we need to remove the `path` portion so + // that `net.connect()` doesn't attempt to open that as a unix socket file. + delete proxy.path; + delete proxy.pathname; + } + + this.proxy = proxy; + this.defaultPort = 443; } inherits(HttpsProxyAgent, Agent); @@ -67,163 +70,172 @@ inherits(HttpsProxyAgent, Agent); */ HttpsProxyAgent.prototype.callback = function connect(req, opts, fn) { - var proxy = this.proxy; - - // create a socket connection to the proxy server - var socket; - if (this.secureProxy) { - socket = tls.connect(proxy); - } else { - socket = net.connect(proxy); - } - - // we need to buffer any HTTP traffic that happens with the proxy before we get - // the CONNECT response, so that if the response is anything other than an "200" - // response code, then we can re-play the "data" events on the socket once the - // HTTP parser is hooked up... - var buffers = []; - var buffersLength = 0; - - function read() { - var b = socket.read(); - if (b) ondata(b); - else socket.once('readable', read); - } - - function cleanup() { - socket.removeListener('data', ondata); - socket.removeListener('end', onend); - socket.removeListener('error', onerror); - socket.removeListener('close', onclose); - socket.removeListener('readable', read); - } - - function onclose(err) { - debug('onclose had error %o', err); - } - - function onend() { - debug('onend'); - } - - function onerror(err) { - cleanup(); - fn(err); - } - - function ondata(b) { - buffers.push(b); - buffersLength += b.length; - var buffered = Buffer.concat(buffers, buffersLength); - var str = buffered.toString('ascii'); - - if (!~str.indexOf('\r\n\r\n')) { - // keep buffering - debug('have not received end of HTTP headers yet...'); - if (socket.read) { - read(); - } else { - socket.once('data', ondata); - } - return; - } - - var firstLine = str.substring(0, str.indexOf('\r\n')); - var statusCode = +firstLine.split(' ')[1]; - debug('got proxy server response: %o', firstLine); - - if (200 == statusCode) { - // 200 Connected status code! - var sock = socket; - - // nullify the buffered data since we won't be needing it - buffers = buffered = null; - - if (opts.secureEndpoint) { - // since the proxy is connecting to an SSL server, we have - // to upgrade this socket connection to an SSL connection - debug( - 'upgrading proxy-connected socket to TLS connection: %o', - opts.host - ); - opts.socket = socket; - opts.servername = opts.servername || opts.host; - opts.host = null; - opts.hostname = null; - opts.port = null; - sock = tls.connect(opts); - } - - cleanup(); - fn(null, sock); - } else { - // some other status code that's not 200... need to re-play the HTTP header - // "data" events onto the socket once the HTTP machinery is attached so that - // the user can parse and handle the error status code - cleanup(); - - // save a reference to the concat'd Buffer for the `onsocket` callback - buffers = buffered; - - // need to wait for the "socket" event to re-play the "data" events - req.once('socket', onsocket); - fn(null, socket); - } - } - - function onsocket(socket) { - // replay the "buffers" Buffer onto the `socket`, since at this point - // the HTTP module machinery has been hooked up for the user - if ('function' == typeof socket.ondata) { - // node <= v0.11.3, the `ondata` function is set on the socket - socket.ondata(buffers, 0, buffers.length); - } else if (socket.listeners('data').length > 0) { - // node > v0.11.3, the "data" event is listened for directly - socket.emit('data', buffers); - } else { - // never? - throw new Error('should not happen...'); - } - - // nullify the cached Buffer instance - buffers = null; - } - - socket.on('error', onerror); - socket.on('close', onclose); - socket.on('end', onend); - - if (socket.read) { - read(); - } else { - socket.once('data', ondata); - } - - var hostname = opts.host + ':' + opts.port; - var msg = 'CONNECT ' + hostname + ' HTTP/1.1\r\n'; - - var headers = Object.assign({}, proxy.headers); - if (proxy.auth) { - headers['Proxy-Authorization'] = - 'Basic ' + Buffer.from(proxy.auth).toString('base64'); - } - - // the Host header should only include the port - // number when it is a non-standard port - var host = opts.host; - if (!isDefaultPort(opts.port, opts.secureEndpoint)) { - host += ':' + opts.port; - } - headers['Host'] = host; - - headers['Connection'] = 'close'; - Object.keys(headers).forEach(function(name) { - msg += name + ': ' + headers[name] + '\r\n'; - }); - - socket.write(msg + '\r\n'); + var proxy = this.proxy; + + // create a socket connection to the proxy server + var socket; + if (this.secureProxy) { + socket = tls.connect(proxy); + } else { + socket = net.connect(proxy); + } + + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + var buffers = []; + var buffersLength = 0; + + function read() { + var b = socket.read(); + if (b) ondata(b); + else socket.once('readable', read); + } + + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('close', onclose); + socket.removeListener('readable', read); + } + + function onclose(err) { + debug('onclose had error %o', err); + } + + function onend() { + debug('onend'); + } + + function onerror(err) { + cleanup(); + fn(err); + } + + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + var buffered = Buffer.concat(buffers, buffersLength); + var str = buffered.toString('ascii'); + + if (!~str.indexOf('\r\n\r\n')) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + + var firstLine = str.substring(0, str.indexOf('\r\n')); + var statusCode = +firstLine.split(' ')[1]; + debug('got proxy server response: %o', firstLine); + + if (200 == statusCode) { + // 200 Connected status code! + var sock = socket; + + // nullify the buffered data since we won't be needing it + buffers = buffered = null; + + if (opts.secureEndpoint) { + // since the proxy is connecting to an SSL server, we have + // to upgrade this socket connection to an SSL connection + debug( + 'upgrading proxy-connected socket to TLS connection: %o', + opts.host + ); + opts.socket = socket; + opts.servername = opts.servername || opts.host; + opts.host = null; + opts.hostname = null; + opts.port = null; + sock = tls.connect(opts); + } + + cleanup(); + req.once('socket', resume); + fn(null, sock); + } else { + // some other status code that's not 200... need to re-play the HTTP header + // "data" events onto the socket once the HTTP machinery is attached so + // that the node core `http` can parse and handle the error status code + cleanup(); + + // the original socket is closed, and a new closed socket is + // returned instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + socket = new net.Socket(); + socket.readable = true; + + + // save a reference to the concat'd Buffer for the `onsocket` callback + buffers = buffered; + + // need to wait for the "socket" event to re-play the "data" events + req.once('socket', onsocket); + + fn(null, socket); + } + } + + function onsocket(socket) { + debug('replaying proxy buffer for failed request'); + assert(socket.listenerCount('data') > 0); + + // replay the "buffers" Buffer onto the `socket`, since at this point + // the HTTP module machinery has been hooked up for the user + socket.push(buffers); + + // nullify the cached Buffer instance + buffers = null; + } + + socket.on('error', onerror); + socket.on('close', onclose); + socket.on('end', onend); + + read(); + + var hostname = opts.host + ':' + opts.port; + var msg = 'CONNECT ' + hostname + ' HTTP/1.1\r\n'; + + var headers = Object.assign({}, proxy.headers); + if (proxy.auth) { + headers['Proxy-Authorization'] = + 'Basic ' + Buffer.from(proxy.auth).toString('base64'); + } + + // the Host header should only include the port + // number when it is a non-standard port + var host = opts.host; + if (!isDefaultPort(opts.port, opts.secureEndpoint)) { + host += ':' + opts.port; + } + headers['Host'] = host; + + headers['Connection'] = 'close'; + Object.keys(headers).forEach(function(name) { + msg += name + ': ' + headers[name] + '\r\n'; + }); + + socket.write(msg + '\r\n'); }; +/** + * Resumes a socket. + * + * @param {(net.Socket|tls.Socket)} socket The socket to resume + * @api public + */ + +function resume(socket) { + socket.resume(); +} + function isDefaultPort(port, secure) { - return Boolean((!secure && port === 80) || (secure && port === 443)); + return Boolean((!secure && port === 80) || (secure && port === 443)); } diff --git a/node_modules/https-proxy-agent/package.json b/node_modules/https-proxy-agent/package.json index 538782a862a38..274df864b2161 100644 --- a/node_modules/https-proxy-agent/package.json +++ b/node_modules/https-proxy-agent/package.json @@ -1,29 +1,27 @@ { - "_from": "https-proxy-agent@^2.2.1", - "_id": "https-proxy-agent@2.2.1", + "_from": "https-proxy-agent@^2.2.3", + "_id": "https-proxy-agent@2.2.4", "_inBundle": false, - "_integrity": "sha512-HPCTS1LW51bcyMYbxUIOO4HEOlQ1/1qRaFWcyxvwaqUS9TY88aoEuHUY33kuAh1YhVVaDQhLZsnPd+XNARWZlQ==", + "_integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==", "_location": "/https-proxy-agent", "_phantomChildren": {}, "_requested": { "type": "range", "registry": true, - "raw": "https-proxy-agent@^2.2.1", + "raw": "https-proxy-agent@^2.2.3", "name": "https-proxy-agent", "escapedName": "https-proxy-agent", - "rawSpec": "^2.2.1", + "rawSpec": "^2.2.3", "saveSpec": null, - "fetchSpec": "^2.2.1" + "fetchSpec": "^2.2.3" }, "_requiredBy": [ - "/make-fetch-happen", - "/npm-profile/make-fetch-happen", - "/npm-registry-fetch/make-fetch-happen" + "/make-fetch-happen" ], - "_resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.1.tgz", - "_shasum": "51552970fa04d723e04c56d04178c3f92592bbc0", - "_spec": "https-proxy-agent@^2.2.1", - "_where": "/Users/rebecca/code/npm/node_modules/make-fetch-happen", + "_resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz", + "_shasum": "4ee7a737abd92678a293d9b34a1af4d0d08c787b", + "_spec": "https-proxy-agent@^2.2.3", + "_where": "/Users/claudiahdz/npm/cli/node_modules/make-fetch-happen", "author": { "name": "Nathan Rajlich", "email": "nathan@tootallnate.net", @@ -34,14 +32,14 @@ }, "bundleDependencies": false, "dependencies": { - "agent-base": "^4.1.0", + "agent-base": "^4.3.0", "debug": "^3.1.0" }, "deprecated": false, "description": "An HTTP(s) proxy `http.Agent` implementation for HTTPS", "devDependencies": { - "mocha": "^3.4.2", - "proxy": "^0.2.4" + "mocha": "^6.2.0", + "proxy": "1" }, "engines": { "node": ">= 4.5.0" @@ -63,5 +61,6 @@ "scripts": { "test": "mocha --reporter spec" }, - "version": "2.2.1" + "types": "./index.d.ts", + "version": "2.2.4" } diff --git a/node_modules/https-proxy-agent/test/test.js b/node_modules/https-proxy-agent/test/test.js deleted file mode 100644 index b368495821421..0000000000000 --- a/node_modules/https-proxy-agent/test/test.js +++ /dev/null @@ -1,342 +0,0 @@ - -/** - * Module dependencies. - */ - -var fs = require('fs'); -var url = require('url'); -var http = require('http'); -var https = require('https'); -var assert = require('assert'); -var Proxy = require('proxy'); -var HttpsProxyAgent = require('../'); - -describe('HttpsProxyAgent', function () { - - var server; - var serverPort; - - var sslServer; - var sslServerPort; - - var proxy; - var proxyPort; - - var sslProxy; - var sslProxyPort; - - before(function (done) { - // setup target HTTP server - server = http.createServer(); - server.listen(function () { - serverPort = server.address().port; - done(); - }); - }); - - before(function (done) { - // setup HTTP proxy server - proxy = Proxy(); - proxy.listen(function () { - proxyPort = proxy.address().port; - done(); - }); - }); - - before(function (done) { - // setup target HTTPS server - var options = { - key: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.key'), - cert: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.pem') - }; - sslServer = https.createServer(options); - sslServer.listen(function () { - sslServerPort = sslServer.address().port; - done(); - }); - }); - - before(function (done) { - // setup SSL HTTP proxy server - var options = { - key: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.key'), - cert: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.pem') - }; - sslProxy = Proxy(https.createServer(options)); - sslProxy.listen(function () { - sslProxyPort = sslProxy.address().port; - done(); - }); - }); - - // shut down test HTTP server - after(function (done) { - server.once('close', function () { done(); }); - server.close(); - }); - - after(function (done) { - proxy.once('close', function () { done(); }); - proxy.close(); - }); - - after(function (done) { - sslServer.once('close', function () { done(); }); - sslServer.close(); - }); - - after(function (done) { - sslProxy.once('close', function () { done(); }); - sslProxy.close(); - }); - - describe('constructor', function () { - it('should throw an Error if no "proxy" argument is given', function () { - assert.throws(function () { - new HttpsProxyAgent(); - }); - }); - it('should accept a "string" proxy argument', function () { - var agent = new HttpsProxyAgent('http://127.0.0.1:' + proxyPort); - assert.equal('127.0.0.1', agent.proxy.host); - assert.equal(proxyPort, agent.proxy.port); - }); - it('should accept a `url.parse()` result object argument', function () { - var opts = url.parse('http://127.0.0.1:' + proxyPort); - var agent = new HttpsProxyAgent(opts); - assert.equal('127.0.0.1', agent.proxy.host); - assert.equal(proxyPort, agent.proxy.port); - }); - it('should set a `defaultPort` property', function () { - var opts = url.parse("http://127.0.0.1:" + proxyPort); - var agent = new HttpsProxyAgent(opts); - assert.equal(443, agent.defaultPort); - }); - describe('secureProxy', function () { - it('should default to `false`', function () { - var agent = new HttpsProxyAgent({ port: proxyPort }); - assert.equal(false, agent.secureProxy); - }); - it('should be `false` when "http:" protocol is used', function () { - var agent = new HttpsProxyAgent({ port: proxyPort, protocol: 'http:' }); - assert.equal(false, agent.secureProxy); - }); - it('should be `true` when "https:" protocol is used', function () { - var agent = new HttpsProxyAgent({ port: proxyPort, protocol: 'https:' }); - assert.equal(true, agent.secureProxy); - }); - it('should be `true` when "https" protocol is used', function () { - var agent = new HttpsProxyAgent({ port: proxyPort, protocol: 'https' }); - assert.equal(true, agent.secureProxy); - }); - }); - }); - - describe('"http" module', function () { - - beforeEach(function () { - delete proxy.authenticate; - }); - - it('should work over an HTTP proxy', function (done) { - server.once('request', function (req, res) { - res.end(JSON.stringify(req.headers)); - }); - - var proxy = process.env.HTTP_PROXY || process.env.http_proxy || 'http://127.0.0.1:' + proxyPort; - var agent = new HttpsProxyAgent(proxy); - - var opts = url.parse('http://127.0.0.1:' + serverPort); - opts.agent = agent; - - var req = http.get(opts, function (res) { - var data = ''; - res.setEncoding('utf8'); - res.on('data', function (b) { - data += b; - }); - res.on('end', function () { - data = JSON.parse(data); - assert.equal('127.0.0.1:' + serverPort, data.host); - done(); - }); - }); - req.once('error', done); - }); - it('should work over an HTTPS proxy', function (done) { - server.once('request', function (req, res) { - res.end(JSON.stringify(req.headers)); - }); - - var proxy = process.env.HTTPS_PROXY || process.env.https_proxy || 'https://127.0.0.1:' + sslProxyPort; - proxy = url.parse(proxy); - proxy.rejectUnauthorized = false; - var agent = new HttpsProxyAgent(proxy); - - var opts = url.parse('http://127.0.0.1:' + serverPort); - opts.agent = agent; - - http.get(opts, function (res) { - var data = ''; - res.setEncoding('utf8'); - res.on('data', function (b) { - data += b; - }); - res.on('end', function () { - data = JSON.parse(data); - assert.equal('127.0.0.1:' + serverPort, data.host); - done(); - }); - }); - }); - it('should receive the 407 authorization code on the `http.ClientResponse`', function (done) { - // set a proxy authentication function for this test - proxy.authenticate = function (req, fn) { - // reject all requests - fn(null, false); - }; - - var proxyUri = process.env.HTTP_PROXY || process.env.http_proxy || 'http://127.0.0.1:' + proxyPort; - var agent = new HttpsProxyAgent(proxyUri); - - var opts = {}; - // `host` and `port` don't really matter since the proxy will reject anyways - opts.host = '127.0.0.1'; - opts.port = 80; - opts.agent = agent; - - var req = http.get(opts, function (res) { - assert.equal(407, res.statusCode); - assert('proxy-authenticate' in res.headers); - done(); - }); - }); - it('should emit an "error" event on the `http.ClientRequest` if the proxy does not exist', function (done) { - // port 4 is a reserved, but "unassigned" port - var proxyUri = 'http://127.0.0.1:4'; - var agent = new HttpsProxyAgent(proxyUri); - - var opts = url.parse('http://nodejs.org'); - opts.agent = agent; - - var req = http.get(opts); - req.once('error', function (err) { - assert.equal('ECONNREFUSED', err.code); - req.abort(); - done(); - }); - }); - - it('should allow custom proxy "headers"', function (done) { - server.once('connect', function (req, socket, head) { - assert.equal('CONNECT', req.method); - assert.equal('bar', req.headers.foo); - socket.destroy(); - done(); - }); - - var uri = 'http://127.0.0.1:' + serverPort; - var proxyOpts = url.parse(uri); - proxyOpts.headers = { - 'Foo': 'bar' - }; - var agent = new HttpsProxyAgent(proxyOpts); - - var opts = {}; - // `host` and `port` don't really matter since the proxy will reject anyways - opts.host = '127.0.0.1'; - opts.port = 80; - opts.agent = agent; - - http.get(opts); - }); - - }); - - describe('"https" module', function () { - it('should work over an HTTP proxy', function (done) { - sslServer.once('request', function (req, res) { - res.end(JSON.stringify(req.headers)); - }); - - var proxy = process.env.HTTP_PROXY || process.env.http_proxy || 'http://127.0.0.1:' + proxyPort; - var agent = new HttpsProxyAgent(proxy); - - var opts = url.parse('https://127.0.0.1:' + sslServerPort); - opts.rejectUnauthorized = false; - opts.agent = agent; - - https.get(opts, function (res) { - var data = ''; - res.setEncoding('utf8'); - res.on('data', function (b) { - data += b; - }); - res.on('end', function () { - data = JSON.parse(data); - assert.equal('127.0.0.1:' + sslServerPort, data.host); - done(); - }); - }); - }); - - it('should work over an HTTPS proxy', function (done) { - sslServer.once('request', function (req, res) { - res.end(JSON.stringify(req.headers)); - }); - - var proxy = process.env.HTTPS_PROXY || process.env.https_proxy || 'https://127.0.0.1:' + sslProxyPort; - proxy = url.parse(proxy); - proxy.rejectUnauthorized = false; - var agent = new HttpsProxyAgent(proxy); - - var opts = url.parse('https://127.0.0.1:' + sslServerPort); - opts.agent = agent; - opts.rejectUnauthorized = false; - - https.get(opts, function (res) { - var data = ''; - res.setEncoding('utf8'); - res.on('data', function (b) { - data += b; - }); - res.on('end', function () { - data = JSON.parse(data); - assert.equal('127.0.0.1:' + sslServerPort, data.host); - done(); - }); - }); - }); - - it('should not send a port number for the default port', function (done) { - sslServer.once('request', function (req, res) { - res.end(JSON.stringify(req.headers)); - }); - - var proxy = process.env.HTTPS_PROXY || process.env.https_proxy || "https://127.0.0.1:" + sslProxyPort; - proxy = url.parse(proxy); - proxy.rejectUnauthorized = false; - var agent = new HttpsProxyAgent(proxy); - agent.defaultPort = sslServerPort; - - var opts = url.parse("https://127.0.0.1:" + sslServerPort); - opts.agent = agent; - opts.rejectUnauthorized = false; - - https.get(opts, function(res) { - var data = ""; - res.setEncoding("utf8"); - res.on("data", function(b) { - data += b; - }); - res.on("end", function() { - data = JSON.parse(data); - assert.equal("127.0.0.1", data.host); - done(); - }); - }); - }); - - }); - -}); diff --git a/node_modules/ignore-walk/README.md b/node_modules/ignore-walk/README.md index 66b69e894b2dd..278f61017f5e7 100644 --- a/node_modules/ignore-walk/README.md +++ b/node_modules/ignore-walk/README.md @@ -1,7 +1,7 @@ # ignore-walk [![Build -Status](https://travis-ci.org/isaacs/ignore-walk.svg?branch=master)](https://travis-ci.org/isaacs/ignore-walk) +Status](https://travis-ci.org/npm/ignore-walk.svg?branch=master)](https://travis-ci.org/npm/ignore-walk) Nested/recursive `.gitignore`/`.npmignore` parsing and filtering. diff --git a/node_modules/ignore-walk/index.js b/node_modules/ignore-walk/index.js index abfd9ece57fd4..eec6851804a99 100644 --- a/node_modules/ignore-walk/index.js +++ b/node_modules/ignore-walk/index.js @@ -17,7 +17,7 @@ class Walker extends EE { this.includeEmpty = !!opts.includeEmpty this.root = this.parent ? this.parent.root : this.path this.follow = !!opts.follow - this.result = this.parent ? this.parent.result : [] + this.result = this.parent ? this.parent.result : new Set() this.entries = null this.sawError = false } @@ -31,8 +31,12 @@ class Walker extends EE { if (!(this.sawError && ev === 'error')) { if (ev === 'error') this.sawError = true - else if (ev === 'done' && !this.parent) - data = data.sort(this.sort) + else if (ev === 'done' && !this.parent) { + data = Array.from(data) + .map(e => /^@/.test(e) ? `./${e}` : e).sort(this.sort) + this.result = data + } + if (ev === 'error' && this.parent) ret = this.parent.emit('error', data) else @@ -57,7 +61,7 @@ class Walker extends EE { this.entries = entries if (entries.length === 0) { if (this.includeEmpty) - this.result.push(this.path.substr(this.root.length + 1)) + this.result.add(this.path.substr(this.root.length + 1)) this.emit('done', this.result) } else { const hasIg = this.entries.some(e => @@ -145,7 +149,7 @@ class Walker extends EE { const abs = this.path + '/' + entry if (!st.isDirectory()) { if (file) - this.result.push(abs.substr(this.root.length + 1)) + this.result.add(abs.substr(this.root.length + 1)) then() } else { // is a directory diff --git a/node_modules/ignore-walk/package.json b/node_modules/ignore-walk/package.json index cc041a55e4757..882c1398b07d6 100644 --- a/node_modules/ignore-walk/package.json +++ b/node_modules/ignore-walk/package.json @@ -1,8 +1,8 @@ { "_from": "ignore-walk@^3.0.1", - "_id": "ignore-walk@3.0.1", + "_id": "ignore-walk@3.0.3", "_inBundle": false, - "_integrity": "sha512-DTVlMx3IYPe0/JJcYP7Gxg7ttZZu3IInhuEhbchuqneY9wWe5Ojy2mXLBaQFUQmo0AW2r3qG7m1mg86js+gnlQ==", + "_integrity": "sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==", "_location": "/ignore-walk", "_phantomChildren": {}, "_requested": { @@ -18,10 +18,10 @@ "_requiredBy": [ "/npm-packlist" ], - "_resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.1.tgz", - "_shasum": "a83e62e7d272ac0e3b551aaa82831a19b69f82f8", + "_resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.3.tgz", + "_shasum": "017e2447184bfeade7c238e4aefdd1e8f95b1e37", "_spec": "ignore-walk@^3.0.1", - "_where": "/Users/rebecca/code/npm/node_modules/npm-packlist", + "_where": "/Users/mperrotte/npminc/cli/node_modules/npm-packlist", "author": { "name": "Isaac Z. Schlueter", "email": "i@izs.me", @@ -40,7 +40,7 @@ "mkdirp": "^0.5.1", "mutate-fs": "^1.1.0", "rimraf": "^2.6.1", - "tap": "^10.7.2" + "tap": "^14.6.9" }, "files": [ "index.js" @@ -65,7 +65,10 @@ "postpublish": "git push origin --all; git push origin --tags", "postversion": "npm publish", "preversion": "npm test", - "test": "tap test/*.js --100" + "test": "tap" }, - "version": "3.0.1" + "tap": { + "jobs": 1 + }, + "version": "3.0.3" } diff --git a/node_modules/infer-owner/LICENSE b/node_modules/infer-owner/LICENSE new file mode 100644 index 0000000000000..20a4762540923 --- /dev/null +++ b/node_modules/infer-owner/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/infer-owner/README.md b/node_modules/infer-owner/README.md new file mode 100644 index 0000000000000..146caf7b8c801 --- /dev/null +++ b/node_modules/infer-owner/README.md @@ -0,0 +1,41 @@ +# infer-owner + +Infer the owner of a path based on the owner of its nearest existing parent + +## USAGE + +```js +const inferOwner = require('infer-owner') + +inferOwner('/some/cache/folder/file').then(owner => { + // owner is {uid, gid} that should be attached to + // the /some/cache/folder/file, based on ownership + // of /some/cache/folder, /some/cache, /some, or /, + // whichever is the first to exist +}) + +// same, but not async +const owner = inferOwner.sync('/some/cache/folder/file') + +// results are cached! to reset the cache (eg, to change +// permissions for whatever reason), do this: +inferOwner.clearCache() +``` + +This module endeavors to be as performant as possible. Parallel requests +for ownership of the same path will only stat the directories one time. + +## API + +* `inferOwner(path) -> Promise<{ uid, gid }>` + + If the path exists, return its uid and gid. If it does not, look to + its parent, then its grandparent, and so on. + +* `inferOwner(path) -> { uid, gid }` + + Sync form of `inferOwner(path)`. + +* `inferOwner.clearCache()` + + Delete all cached ownership information and in-flight tracking. diff --git a/node_modules/infer-owner/index.js b/node_modules/infer-owner/index.js new file mode 100644 index 0000000000000..a7bddcbd2288b --- /dev/null +++ b/node_modules/infer-owner/index.js @@ -0,0 +1,71 @@ +const cache = new Map() +const fs = require('fs') +const { dirname, resolve } = require('path') + + +const lstat = path => new Promise((res, rej) => + fs.lstat(path, (er, st) => er ? rej(er) : res(st))) + +const inferOwner = path => { + path = resolve(path) + if (cache.has(path)) + return Promise.resolve(cache.get(path)) + + const statThen = st => { + const { uid, gid } = st + cache.set(path, { uid, gid }) + return { uid, gid } + } + const parent = dirname(path) + const parentTrap = parent === path ? null : er => { + return inferOwner(parent).then((owner) => { + cache.set(path, owner) + return owner + }) + } + return lstat(path).then(statThen, parentTrap) +} + +const inferOwnerSync = path => { + path = resolve(path) + if (cache.has(path)) + return cache.get(path) + + const parent = dirname(path) + + // avoid obscuring call site by re-throwing + // "catch" the error by returning from a finally, + // only if we're not at the root, and the parent call works. + let threw = true + try { + const st = fs.lstatSync(path) + threw = false + const { uid, gid } = st + cache.set(path, { uid, gid }) + return { uid, gid } + } finally { + if (threw && parent !== path) { + const owner = inferOwnerSync(parent) + cache.set(path, owner) + return owner // eslint-disable-line no-unsafe-finally + } + } +} + +const inflight = new Map() +module.exports = path => { + path = resolve(path) + if (inflight.has(path)) + return Promise.resolve(inflight.get(path)) + const p = inferOwner(path).then(owner => { + inflight.delete(path) + return owner + }) + inflight.set(path, p) + return p +} +module.exports.sync = inferOwnerSync +module.exports.clearCache = () => { + cache.clear() + inflight.clear() +} diff --git a/node_modules/infer-owner/package.json b/node_modules/infer-owner/package.json new file mode 100644 index 0000000000000..1b7b3e0509f42 --- /dev/null +++ b/node_modules/infer-owner/package.json @@ -0,0 +1,63 @@ +{ + "_from": "infer-owner@1.0.4", + "_id": "infer-owner@1.0.4", + "_inBundle": false, + "_integrity": "sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==", + "_location": "/infer-owner", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "infer-owner@1.0.4", + "name": "infer-owner", + "escapedName": "infer-owner", + "rawSpec": "1.0.4", + "saveSpec": null, + "fetchSpec": "1.0.4" + }, + "_requiredBy": [ + "#USER", + "/" + ], + "_resolved": "https://registry.npmjs.org/infer-owner/-/infer-owner-1.0.4.tgz", + "_shasum": "c4cefcaa8e51051c2a40ba2ce8a3d27295af9467", + "_spec": "infer-owner@1.0.4", + "_where": "/Users/isaacs/dev/npm/cli", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "https://izs.me" + }, + "bugs": { + "url": "https://github.com/npm/infer-owner/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Infer the owner of a path based on the owner of its nearest existing parent", + "devDependencies": { + "mutate-fs": "^2.1.1", + "tap": "^12.4.2" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/npm/infer-owner#readme", + "license": "ISC", + "main": "index.js", + "name": "infer-owner", + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/infer-owner.git" + }, + "scripts": { + "postpublish": "git push origin --follow-tags", + "postversion": "npm publish", + "preversion": "npm test", + "snap": "TAP_SNAPSHOT=1 tap -J test/*.js --100", + "test": "tap -J test/*.js --100" + }, + "version": "1.0.4" +} diff --git a/node_modules/inherits/inherits.js b/node_modules/inherits/inherits.js index 3b94763a76eef..f71f2d93294a6 100644 --- a/node_modules/inherits/inherits.js +++ b/node_modules/inherits/inherits.js @@ -1,7 +1,9 @@ try { var util = require('util'); + /* istanbul ignore next */ if (typeof util.inherits !== 'function') throw ''; module.exports = util.inherits; } catch (e) { + /* istanbul ignore next */ module.exports = require('./inherits_browser.js'); } diff --git a/node_modules/inherits/inherits_browser.js b/node_modules/inherits/inherits_browser.js index c1e78a75e6b52..86bbb3dc29e48 100644 --- a/node_modules/inherits/inherits_browser.js +++ b/node_modules/inherits/inherits_browser.js @@ -1,23 +1,27 @@ if (typeof Object.create === 'function') { // implementation from standard node.js 'util' module module.exports = function inherits(ctor, superCtor) { - ctor.super_ = superCtor - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true - } - }); + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } }; } else { // old school shim for old browsers module.exports = function inherits(ctor, superCtor) { - ctor.super_ = superCtor - var TempCtor = function () {} - TempCtor.prototype = superCtor.prototype - ctor.prototype = new TempCtor() - ctor.prototype.constructor = ctor + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } } } diff --git a/node_modules/inherits/package.json b/node_modules/inherits/package.json index 0ae0875ff4874..ab48b2a4b3dbf 100644 --- a/node_modules/inherits/package.json +++ b/node_modules/inherits/package.json @@ -1,52 +1,62 @@ { - "_args": [ - [ - "inherits@2.0.3", - "/Users/rebecca/code/npm" - ] - ], - "_from": "inherits@2.0.3", - "_id": "inherits@2.0.3", + "_from": "inherits@2.0.4", + "_id": "inherits@2.0.4", "_inBundle": false, - "_integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", + "_integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", "_location": "/inherits", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "inherits@2.0.3", + "raw": "inherits@2.0.4", "name": "inherits", "escapedName": "inherits", - "rawSpec": "2.0.3", + "rawSpec": "2.0.4", "saveSpec": null, - "fetchSpec": "2.0.3" + "fetchSpec": "2.0.4" }, "_requiredBy": [ + "#USER", "/", + "/are-we-there-yet/readable-stream", "/block-stream", + "/cacache/glob", "/concat-stream", + "/concat-stream/readable-stream", "/duplexify", + "/duplexify/readable-stream", "/flush-write-stream", + "/flush-write-stream/readable-stream", "/from2", + "/from2/readable-stream", + "/fs-write-stream-atomic/readable-stream", "/fstream", "/glob", "/node-gyp/tar", "/parallel-transform", + "/parallel-transform/readable-stream", "/pumpify", "/readable-stream", "/sorted-union-stream/from2", - "/sorted-union-stream/readable-stream" + "/sorted-union-stream/readable-stream", + "/stream-iterate/readable-stream", + "/tap-mocha-reporter/readable-stream", + "/tar-stream", + "/through2/readable-stream" ], - "_resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "_spec": "2.0.3", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "_shasum": "0fa2c64f932917c3433a0ded55363aae37416b7c", + "_spec": "inherits@2.0.4", + "_where": "/Users/isaacs/dev/npm/cli", "browser": "./inherits_browser.js", "bugs": { "url": "https://github.com/isaacs/inherits/issues" }, + "bundleDependencies": false, + "deprecated": false, "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", "devDependencies": { - "tap": "^7.1.0" + "tap": "^14.2.4" }, "files": [ "inherits.js", @@ -71,7 +81,7 @@ "url": "git://github.com/isaacs/inherits.git" }, "scripts": { - "test": "node test" + "test": "tap" }, - "version": "2.0.3" + "version": "2.0.4" } diff --git a/node_modules/invert-kv/index.js b/node_modules/invert-kv/index.js index 61e21961128f7..27f9cbb8eaf28 100644 --- a/node_modules/invert-kv/index.js +++ b/node_modules/invert-kv/index.js @@ -1,14 +1,14 @@ 'use strict'; -module.exports = function (obj) { - if (typeof obj !== 'object') { +module.exports = object => { + if (typeof object !== 'object') { throw new TypeError('Expected an object'); } - var ret = {}; + const ret = {}; - for (var key in obj) { - var val = obj[key]; - ret[val] = key; + for (const key of Object.keys(object)) { + const value = object[key]; + ret[value] = key; } return ret; diff --git a/node_modules/invert-kv/license b/node_modules/invert-kv/license new file mode 100644 index 0000000000000..e7af2f77107d7 --- /dev/null +++ b/node_modules/invert-kv/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/invert-kv/package.json b/node_modules/invert-kv/package.json index c38e09e5d91ba..beb77fa37f6fa 100644 --- a/node_modules/invert-kv/package.json +++ b/node_modules/invert-kv/package.json @@ -1,43 +1,44 @@ { - "_from": "invert-kv@^1.0.0", - "_id": "invert-kv@1.0.0", + "_from": "invert-kv@^2.0.0", + "_id": "invert-kv@2.0.0", "_inBundle": false, - "_integrity": "sha1-EEqOSqym09jNFXqO+L+rLXo//bY=", + "_integrity": "sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA==", "_location": "/invert-kv", "_phantomChildren": {}, "_requested": { "type": "range", "registry": true, - "raw": "invert-kv@^1.0.0", + "raw": "invert-kv@^2.0.0", "name": "invert-kv", "escapedName": "invert-kv", - "rawSpec": "^1.0.0", + "rawSpec": "^2.0.0", "saveSpec": null, - "fetchSpec": "^1.0.0" + "fetchSpec": "^2.0.0" }, "_requiredBy": [ "/lcid" ], - "_resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-1.0.0.tgz", - "_shasum": "104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6", - "_spec": "invert-kv@^1.0.0", - "_where": "/Users/rebecca/code/npm/node_modules/lcid", + "_resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-2.0.0.tgz", + "_shasum": "7393f5afa59ec9ff5f67a27620d11c226e3eec02", + "_spec": "invert-kv@^2.0.0", + "_where": "/Users/mperrotte/npminc/cli/node_modules/lcid", "author": { "name": "Sindre Sorhus", "email": "sindresorhus@gmail.com", - "url": "http://sindresorhus.com" + "url": "sindresorhus.com" }, "bugs": { "url": "https://github.com/sindresorhus/invert-kv/issues" }, "bundleDependencies": false, "deprecated": false, - "description": "Invert the key/value of an object. Example: {foo: 'bar'} → {bar: 'foo'}", + "description": "Invert the key/value of an object. Example: `{foo: 'bar'}` → `{bar: 'foo'}`", "devDependencies": { - "mocha": "*" + "ava": "*", + "xo": "*" }, "engines": { - "node": ">=0.10.0" + "node": ">=4" }, "files": [ "index.js" @@ -45,10 +46,8 @@ "homepage": "https://github.com/sindresorhus/invert-kv#readme", "keywords": [ "object", - "obj", "key", "value", - "val", "kv", "invert" ], @@ -59,7 +58,7 @@ "url": "git+https://github.com/sindresorhus/invert-kv.git" }, "scripts": { - "test": "mocha" + "test": "xo && ava" }, - "version": "1.0.0" + "version": "2.0.0" } diff --git a/node_modules/invert-kv/readme.md b/node_modules/invert-kv/readme.md index 039fc7cfa25b0..365a97997cac3 100644 --- a/node_modules/invert-kv/readme.md +++ b/node_modules/invert-kv/readme.md @@ -5,15 +5,15 @@ ## Install -```sh -$ npm install --save invert-kv +``` +$ npm install invert-kv ``` ## Usage ```js -var invertKv = require('invert-kv'); +const invertKv = require('invert-kv'); invertKv({foo: 'bar', unicorn: 'rainbow'}); //=> {bar: 'foo', rainbow: 'unicorn'} @@ -22,4 +22,4 @@ invertKv({foo: 'bar', unicorn: 'rainbow'}); ## License -MIT © [Sindre Sorhus](http://sindresorhus.com) +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/is-builtin-module/index.js b/node_modules/is-builtin-module/index.js deleted file mode 100644 index b6cfa616ae85c..0000000000000 --- a/node_modules/is-builtin-module/index.js +++ /dev/null @@ -1,10 +0,0 @@ -'use strict'; -var builtinModules = require('builtin-modules'); - -module.exports = function (str) { - if (typeof str !== 'string') { - throw new TypeError('Expected a string'); - } - - return builtinModules.indexOf(str) !== -1; -}; diff --git a/node_modules/is-builtin-module/package.json b/node_modules/is-builtin-module/package.json deleted file mode 100644 index d1cd899d9ad65..0000000000000 --- a/node_modules/is-builtin-module/package.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "_from": "is-builtin-module@^1.0.0", - "_id": "is-builtin-module@1.0.0", - "_inBundle": false, - "_integrity": "sha1-VAVy0096wxGfj3bDDLwbHgN6/74=", - "_location": "/is-builtin-module", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "is-builtin-module@^1.0.0", - "name": "is-builtin-module", - "escapedName": "is-builtin-module", - "rawSpec": "^1.0.0", - "saveSpec": null, - "fetchSpec": "^1.0.0" - }, - "_requiredBy": [ - "/normalize-package-data" - ], - "_resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-1.0.0.tgz", - "_shasum": "540572d34f7ac3119f8f76c30cbc1b1e037affbe", - "_spec": "is-builtin-module@^1.0.0", - "_where": "/Users/rebecca/code/npm/node_modules/normalize-package-data", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "bugs": { - "url": "https://github.com/sindresorhus/is-builtin-module/issues" - }, - "bundleDependencies": false, - "dependencies": { - "builtin-modules": "^1.0.0" - }, - "deprecated": false, - "description": "Check if a string matches the name of a Node.js builtin module", - "devDependencies": { - "ava": "0.0.4" - }, - "engines": { - "node": ">=0.10.0" - }, - "files": [ - "index.js" - ], - "homepage": "https://github.com/sindresorhus/is-builtin-module#readme", - "keywords": [ - "builtin", - "built-in", - "builtins", - "node", - "modules", - "core", - "bundled", - "list", - "array", - "names", - "is", - "detect", - "check", - "match" - ], - "license": "MIT", - "name": "is-builtin-module", - "repository": { - "type": "git", - "url": "git+https://github.com/sindresorhus/is-builtin-module.git" - }, - "scripts": { - "test": "node test.js" - }, - "version": "1.0.0" -} diff --git a/node_modules/is-builtin-module/readme.md b/node_modules/is-builtin-module/readme.md deleted file mode 100644 index 798dcf4371799..0000000000000 --- a/node_modules/is-builtin-module/readme.md +++ /dev/null @@ -1,33 +0,0 @@ -# is-builtin-module [![Build Status](https://travis-ci.org/sindresorhus/is-builtin-module.svg?branch=master)](https://travis-ci.org/sindresorhus/is-builtin-module) - -> Check if a string matches the name of a Node.js builtin module - - -## Install - -``` -$ npm install --save is-builtin-module -``` - - -## Usage - -```js -var isBuiltinModule = require('is-builtin-module'); - -isBuiltinModule('fs'); -//=> true - -isBuiltinModule('unicorn'); -//=> false :( -``` - - -## Related - -- [builtin-modules](https://github.com/sindresorhus/builtin-modules) - List of the Node.js builtin modules - - -## License - -MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/is-callable/.editorconfig b/node_modules/is-callable/.editorconfig new file mode 100644 index 0000000000000..bc228f8269443 --- /dev/null +++ b/node_modules/is-callable/.editorconfig @@ -0,0 +1,20 @@ +root = true + +[*] +indent_style = tab +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true +max_line_length = 150 + +[CHANGELOG.md] +indent_style = space +indent_size = 2 + +[*.json] +max_line_length = off + +[Makefile] +max_line_length = off diff --git a/node_modules/is-callable/.istanbul.yml b/node_modules/is-callable/.istanbul.yml new file mode 100644 index 0000000000000..9affe0bc3e67a --- /dev/null +++ b/node_modules/is-callable/.istanbul.yml @@ -0,0 +1,47 @@ +verbose: false +instrumentation: + root: . + extensions: + - .js + - .jsx + default-excludes: true + excludes: [] + variable: __coverage__ + compact: true + preserve-comments: false + complete-copy: false + save-baseline: false + baseline-file: ./coverage/coverage-baseline.raw.json + include-all-sources: false + include-pid: false + es-modules: false + auto-wrap: false +reporting: + print: summary + reports: + - html + dir: ./coverage + summarizer: pkg + report-config: {} + watermarks: + statements: [50, 80] + functions: [50, 80] + branches: [50, 80] + lines: [50, 80] +hooks: + hook-run-in-context: false + post-require-hook: null + handle-sigint: false +check: + global: + statements: 100 + lines: 100 + branches: 100 + functions: 100 + excludes: [] + each: + statements: 100 + lines: 100 + branches: 100 + functions: 100 + excludes: [] diff --git a/node_modules/is-callable/.jscs.json b/node_modules/is-callable/.jscs.json new file mode 100644 index 0000000000000..b4d9b8b40aebf --- /dev/null +++ b/node_modules/is-callable/.jscs.json @@ -0,0 +1,176 @@ +{ + "es3": true, + + "additionalRules": [], + + "requireSemicolons": true, + + "disallowMultipleSpaces": true, + + "disallowIdentifierNames": [], + + "requireCurlyBraces": { + "allExcept": [], + "keywords": ["if", "else", "for", "while", "do", "try", "catch"] + }, + + "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"], + + "disallowSpaceAfterKeywords": [], + + "disallowSpaceBeforeComma": true, + "disallowSpaceAfterComma": false, + "disallowSpaceBeforeSemicolon": true, + + "disallowNodeTypes": [ + "DebuggerStatement", + "ForInStatement", + "LabeledStatement", + "SwitchCase", + "SwitchStatement", + "WithStatement" + ], + + "requireObjectKeysOnNewLine": { "allExcept": ["sameLine"] }, + + "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true }, + "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true }, + "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true }, + + "requireSpaceBetweenArguments": true, + + "disallowSpacesInsideParentheses": true, + + "disallowSpacesInsideArrayBrackets": true, + + "disallowQuotedKeysInObjects": { "allExcept": ["reserved"] }, + + "disallowSpaceAfterObjectKeys": true, + + "requireCommaBeforeLineBreak": true, + + "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"], + "requireSpaceAfterPrefixUnaryOperators": [], + + "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"], + "requireSpaceBeforePostfixUnaryOperators": [], + + "disallowSpaceBeforeBinaryOperators": [], + "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + + "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + "disallowSpaceAfterBinaryOperators": [], + + "disallowImplicitTypeConversion": ["binary", "string"], + + "disallowKeywords": ["with", "eval"], + + "requireKeywordsOnNewLine": [], + "disallowKeywordsOnNewLine": ["else"], + + "requireLineFeedAtFileEnd": true, + + "disallowTrailingWhitespace": true, + + "disallowTrailingComma": true, + + "excludeFiles": ["node_modules/**", "vendor/**"], + + "disallowMultipleLineStrings": true, + + "requireDotNotation": { "allExcept": ["keywords"] }, + + "requireParenthesesAroundIIFE": true, + + "validateLineBreaks": "LF", + + "validateQuoteMarks": { + "escape": true, + "mark": "'" + }, + + "disallowOperatorBeforeLineBreak": [], + + "requireSpaceBeforeKeywords": [ + "do", + "for", + "if", + "else", + "switch", + "case", + "try", + "catch", + "finally", + "while", + "with", + "return" + ], + + "validateAlignedFunctionParameters": { + "lineBreakAfterOpeningBraces": true, + "lineBreakBeforeClosingBraces": true + }, + + "requirePaddingNewLinesBeforeExport": true, + + "validateNewlineAfterArrayElements": { + "maximum": 1 + }, + + "requirePaddingNewLinesAfterUseStrict": true, + + "disallowArrowFunctions": true, + + "disallowMultiLineTernary": true, + + "validateOrderInObjectKeys": "asc-insensitive", + + "disallowIdenticalDestructuringNames": true, + + "disallowNestedTernaries": { "maxLevel": 1 }, + + "requireSpaceAfterComma": { "allExcept": ["trailing"] }, + "requireAlignedMultilineParams": false, + + "requireSpacesInGenerator": { + "afterStar": true + }, + + "disallowSpacesInGenerator": { + "beforeStar": true + }, + + "disallowVar": false, + + "requireArrayDestructuring": false, + + "requireEnhancedObjectLiterals": false, + + "requireObjectDestructuring": false, + + "requireEarlyReturn": false, + + "requireCapitalizedConstructorsNew": { + "allExcept": ["Function", "String", "Object", "Symbol", "Number", "Date", "RegExp", "Error", "Boolean", "Array"] + }, + + "requireImportAlphabetized": false, + + "requireSpaceBeforeObjectValues": true, + "requireSpaceBeforeDestructuredValues": true, + + "disallowSpacesInsideTemplateStringPlaceholders": true, + + "disallowArrayDestructuringReturn": false, + + "requireNewlineBeforeSingleStatementsInIf": false, + + "disallowUnusedVariables": true, + + "requireSpacesInsideImportedObjectBraces": true, + + "requireUseStrict": true +} + diff --git a/node_modules/is-callable/.travis.yml b/node_modules/is-callable/.travis.yml new file mode 100644 index 0000000000000..767256c8dd484 --- /dev/null +++ b/node_modules/is-callable/.travis.yml @@ -0,0 +1,225 @@ +language: node_js +os: + - linux +node_js: + - "10.4" + - "9.11" + - "8.11" + - "7.10" + - "6.14" + - "5.12" + - "4.9" + - "iojs-v3.3" + - "iojs-v2.5" + - "iojs-v1.8" + - "0.12" + - "0.10" + - "0.8" +before_install: + - 'case "${TRAVIS_NODE_VERSION}" in 0.*) export NPM_CONFIG_STRICT_SSL=false ;; esac' + - 'nvm install-latest-npm' +install: + - 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ] || [ "${TRAVIS_NODE_VERSION}" = "0.9" ]; then nvm install --latest-npm 0.8 && npm install && nvm use "${TRAVIS_NODE_VERSION}"; else npm install; fi;' +script: + - 'if [ -n "${PRETEST-}" ]; then npm run pretest ; fi' + - 'if [ -n "${POSTTEST-}" ]; then npm run posttest ; fi' + - 'if [ -n "${COVERAGE-}" ]; then npm run coverage ; fi' + - 'if [ -n "${TEST-}" ]; then npm run tests-only ; fi' +sudo: false +env: + - TEST=true +matrix: + fast_finish: true + include: + - node_js: "lts/*" + env: PRETEST=true + - node_js: "lts/*" + env: POSTTEST=true + - node_js: "4" + env: COVERAGE=true + - node_js: "10.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.13" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.12" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.4" + env: TEST=true ALLOW_FAILURE=true + allow_failures: + - os: osx + - env: TEST=true ALLOW_FAILURE=true + - env: COVERAGE=true diff --git a/node_modules/is-callable/CHANGELOG.md b/node_modules/is-callable/CHANGELOG.md new file mode 100644 index 0000000000000..58286a0535b74 --- /dev/null +++ b/node_modules/is-callable/CHANGELOG.md @@ -0,0 +1,56 @@ +1.1.4 / 2018-07-02 +================= + * [Fix] improve `class` and arrow function detection (#30, #31) + * [Tests] on all latest node minors; improve matrix + * [Dev Deps] update all dev deps + +1.1.3 / 2016-02-27 +================= + * [Fix] ensure “class “ doesn’t screw up “class” detection + * [Tests] up to `node` `v5.7`, `v4.3` + * [Dev Deps] update to `eslint` v2, `@ljharb/eslint-config`, `jscs` + +1.1.2 / 2016-01-15 +================= + * [Fix] Make sure comments don’t screw up “class” detection (#4) + * [Tests] up to `node` `v5.3` + * [Tests] Add `parallelshell`, run both `--es-staging` and stock tests at once + * [Dev Deps] update `tape`, `jscs`, `nsp`, `eslint`, `@ljharb/eslint-config` + * [Refactor] convert `isNonES6ClassFn` into `isES6ClassFn` + +1.1.1 / 2015-11-30 +================= + * [Fix] do not throw when a non-function has a function in its [[Prototype]] (#2) + * [Dev Deps] update `tape`, `eslint`, `@ljharb/eslint-config`, `jscs`, `nsp`, `semver` + * [Tests] up to `node` `v5.1` + * [Tests] no longer allow node 0.8 to fail. + * [Tests] fix npm upgrades in older nodes + +1.1.0 / 2015-10-02 +================= + * [Fix] Some browsers report TypedArray constructors as `typeof object` + * [New] return false for "class" constructors, when possible. + * [Tests] up to `io.js` `v3.3`, `node` `v4.1` + * [Dev Deps] update `eslint`, `editorconfig-tools`, `nsp`, `tape`, `semver`, `jscs`, `covert`, `make-arrow-function` + * [Docs] Switch from vb.teelaun.ch to versionbadg.es for the npm version badge SVG + +1.0.4 / 2015-01-30 +================= + * If @@toStringTag is not present, use the old-school Object#toString test. + +1.0.3 / 2015-01-29 +================= + * Add tests to ensure arrow functions are callable. + * Refactor to aid optimization of non-try/catch code. + +1.0.2 / 2015-01-29 +================= + * Fix broken package.json + +1.0.1 / 2015-01-29 +================= + * Add early exit for typeof not "function" + +1.0.0 / 2015-01-29 +================= + * Initial release. diff --git a/node_modules/is-callable/LICENSE b/node_modules/is-callable/LICENSE new file mode 100644 index 0000000000000..b43df444e5182 --- /dev/null +++ b/node_modules/is-callable/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Jordan Harband + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/is-callable/Makefile b/node_modules/is-callable/Makefile new file mode 100644 index 0000000000000..b9e4fe1aab3dd --- /dev/null +++ b/node_modules/is-callable/Makefile @@ -0,0 +1,61 @@ +# Since we rely on paths relative to the makefile location, abort if make isn't being run from there. +$(if $(findstring /,$(MAKEFILE_LIST)),$(error Please only invoke this makefile from the directory it resides in)) + + # The files that need updating when incrementing the version number. +VERSIONED_FILES := *.js *.json README* + + +# Add the local npm packages' bin folder to the PATH, so that `make` can find them, when invoked directly. +# Note that rather than using `$(npm bin)` the 'node_modules/.bin' path component is hard-coded, so that invocation works even from an environment +# where npm is (temporarily) unavailable due to having deactivated an nvm instance loaded into the calling shell in order to avoid interference with tests. +export PATH := $(shell printf '%s' "$$PWD/node_modules/.bin:$$PATH") +UTILS := semver +# Make sure that all required utilities can be located. +UTIL_CHECK := $(or $(shell PATH="$(PATH)" which $(UTILS) >/dev/null && echo 'ok'),$(error Did you forget to run `npm install` after cloning the repo? At least one of the required supporting utilities not found: $(UTILS))) + +# Default target (by virtue of being the first non '.'-prefixed in the file). +.PHONY: _no-target-specified +_no-target-specified: + $(error Please specify the target to make - `make list` shows targets. Alternatively, use `npm test` to run the default tests; `npm run` shows all tests) + +# Lists all targets defined in this makefile. +.PHONY: list +list: + @$(MAKE) -pRrn : -f $(MAKEFILE_LIST) 2>/dev/null | awk -v RS= -F: '/^# File/,/^# Finished Make data base/ {if ($$1 !~ "^[#.]") {print $$1}}' | command grep -v -e '^[^[:alnum:]]' -e '^$@$$command ' | sort + +# All-tests target: invokes the specified test suites for ALL shells defined in $(SHELLS). +.PHONY: test +test: + @npm test + +.PHONY: _ensure-tag +_ensure-tag: +ifndef TAG + $(error Please invoke with `make TAG= release`, where is either an increment specifier (patch, minor, major, prepatch, preminor, premajor, prerelease), or an explicit major.minor.patch version number) +endif + +CHANGELOG_ERROR = $(error No CHANGELOG specified) +.PHONY: _ensure-changelog +_ensure-changelog: + @ (git status -sb --porcelain | command grep -E '^( M|[MA] ) CHANGELOG.md' > /dev/null) || (echo no CHANGELOG.md specified && exit 2) + +# Ensures that the git workspace is clean. +.PHONY: _ensure-clean +_ensure-clean: + @[ -z "$$((git status --porcelain --untracked-files=no || echo err) | command grep -v 'CHANGELOG.md')" ] || { echo "Workspace is not clean; please commit changes first." >&2; exit 2; } + +# Makes a release; invoke with `make TAG= release`. +.PHONY: release +release: _ensure-tag _ensure-changelog _ensure-clean + @old_ver=`git describe --abbrev=0 --tags --match 'v[0-9]*.[0-9]*.[0-9]*'` || { echo "Failed to determine current version." >&2; exit 1; }; old_ver=$${old_ver#v}; \ + new_ver=`echo "$(TAG)" | sed 's/^v//'`; new_ver=$${new_ver:-patch}; \ + if printf "$$new_ver" | command grep -q '^[0-9]'; then \ + semver "$$new_ver" >/dev/null || { echo 'Invalid version number specified: $(TAG) - must be major.minor.patch' >&2; exit 2; }; \ + semver -r "> $$old_ver" "$$new_ver" >/dev/null || { echo 'Invalid version number specified: $(TAG) - must be HIGHER than current one.' >&2; exit 2; } \ + else \ + new_ver=`semver -i "$$new_ver" "$$old_ver"` || { echo 'Invalid version-increment specifier: $(TAG)' >&2; exit 2; } \ + fi; \ + printf "=== Bumping version **$$old_ver** to **$$new_ver** before committing and tagging:\n=== TYPE 'proceed' TO PROCEED, anything else to abort: " && read response && [ "$$response" = 'proceed' ] || { echo 'Aborted.' >&2; exit 2; }; \ + replace "$$old_ver" "$$new_ver" -- $(VERSIONED_FILES) && \ + git commit -m "v$$new_ver" $(VERSIONED_FILES) CHANGELOG.md && \ + git tag -a -m "v$$new_ver" "v$$new_ver" diff --git a/node_modules/is-callable/README.md b/node_modules/is-callable/README.md new file mode 100644 index 0000000000000..0cb65879972bf --- /dev/null +++ b/node_modules/is-callable/README.md @@ -0,0 +1,59 @@ +# is-callable [![Version Badge][2]][1] + +[![Build Status][3]][4] +[![dependency status][5]][6] +[![dev dependency status][7]][8] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][11]][1] + +[![browser support][9]][10] + +Is this JS value callable? Works with Functions and GeneratorFunctions, despite ES6 @@toStringTag. + +## Example + +```js +var isCallable = require('is-callable'); +var assert = require('assert'); + +assert.notOk(isCallable(undefined)); +assert.notOk(isCallable(null)); +assert.notOk(isCallable(false)); +assert.notOk(isCallable(true)); +assert.notOk(isCallable([])); +assert.notOk(isCallable({})); +assert.notOk(isCallable(/a/g)); +assert.notOk(isCallable(new RegExp('a', 'g'))); +assert.notOk(isCallable(new Date())); +assert.notOk(isCallable(42)); +assert.notOk(isCallable(NaN)); +assert.notOk(isCallable(Infinity)); +assert.notOk(isCallable(new Number(42))); +assert.notOk(isCallable('foo')); +assert.notOk(isCallable(Object('foo'))); + +assert.ok(isCallable(function () {})); +assert.ok(isCallable(function* () {})); +assert.ok(isCallable(x => x * x)); +``` + +## Tests +Simply clone the repo, `npm install`, and run `npm test` + +[1]: https://npmjs.org/package/is-callable +[2]: http://versionbadg.es/ljharb/is-callable.svg +[3]: https://travis-ci.org/ljharb/is-callable.svg +[4]: https://travis-ci.org/ljharb/is-callable +[5]: https://david-dm.org/ljharb/is-callable.svg +[6]: https://david-dm.org/ljharb/is-callable +[7]: https://david-dm.org/ljharb/is-callable/dev-status.svg +[8]: https://david-dm.org/ljharb/is-callable#info=devDependencies +[9]: https://ci.testling.com/ljharb/is-callable.png +[10]: https://ci.testling.com/ljharb/is-callable +[11]: https://nodei.co/npm/is-callable.png?downloads=true&stars=true +[license-image]: http://img.shields.io/npm/l/is-callable.svg +[license-url]: LICENSE +[downloads-image]: http://img.shields.io/npm/dm/is-callable.svg +[downloads-url]: http://npm-stat.com/charts.html?package=is-callable diff --git a/node_modules/is-callable/index.js b/node_modules/is-callable/index.js new file mode 100644 index 0000000000000..d9820b51fd4ad --- /dev/null +++ b/node_modules/is-callable/index.js @@ -0,0 +1,37 @@ +'use strict'; + +var fnToStr = Function.prototype.toString; + +var constructorRegex = /^\s*class\b/; +var isES6ClassFn = function isES6ClassFunction(value) { + try { + var fnStr = fnToStr.call(value); + return constructorRegex.test(fnStr); + } catch (e) { + return false; // not a function + } +}; + +var tryFunctionObject = function tryFunctionToStr(value) { + try { + if (isES6ClassFn(value)) { return false; } + fnToStr.call(value); + return true; + } catch (e) { + return false; + } +}; +var toStr = Object.prototype.toString; +var fnClass = '[object Function]'; +var genClass = '[object GeneratorFunction]'; +var hasToStringTag = typeof Symbol === 'function' && typeof Symbol.toStringTag === 'symbol'; + +module.exports = function isCallable(value) { + if (!value) { return false; } + if (typeof value !== 'function' && typeof value !== 'object') { return false; } + if (typeof value === 'function' && !value.prototype) { return true; } + if (hasToStringTag) { return tryFunctionObject(value); } + if (isES6ClassFn(value)) { return false; } + var strClass = toStr.call(value); + return strClass === fnClass || strClass === genClass; +}; diff --git a/node_modules/is-callable/package.json b/node_modules/is-callable/package.json new file mode 100644 index 0000000000000..50531dfc90165 --- /dev/null +++ b/node_modules/is-callable/package.json @@ -0,0 +1,98 @@ +{ + "name": "is-callable", + "version": "1.1.4", + "author": { + "name": "Jordan Harband", + "email": "ljharb@gmail.com", + "url": "http://ljharb.codes" + }, + "contributors": [ + { + "name": "Jordan Harband", + "email": "ljharb@gmail.com", + "url": "http://ljharb.codes" + } + ], + "description": "Is this JS value callable? Works with Functions and GeneratorFunctions, despite ES6 @@toStringTag.", + "license": "MIT", + "main": "index.js", + "scripts": { + "pretest": "npm run --silent lint", + "test": "npm run --silent tests-only", + "posttest": "npm run --silent security", + "tests-only": "npm run --silent test:stock && npm run --silent test:staging", + "test:stock": "node test.js", + "test:staging": "node --es-staging test.js", + "coverage": "npm run --silent istanbul", + "covert": "covert test.js", + "covert:quiet": "covert test.js --quiet", + "istanbul": "npm run --silent istanbul:clean && npm run --silent istanbul:std && npm run --silent istanbul:harmony && npm run --silent istanbul:merge && istanbul check", + "istanbul:clean": "rimraf coverage coverage-std coverage-harmony", + "istanbul:merge": "istanbul-merge --out coverage/coverage.raw.json coverage-harmony/coverage.raw.json coverage-std/coverage.raw.json && istanbul report html", + "istanbul:harmony": "node --harmony ./node_modules/istanbul/lib/cli.js cover test.js --dir coverage-harmony", + "istanbul:std": "istanbul cover test.js --report html --dir coverage-std", + "prelint": "editorconfig-tools check *", + "lint": "npm run jscs && npm run eslint", + "jscs": "jscs *.js", + "eslint": "eslint *.js", + "security": "nsp check" + }, + "repository": { + "type": "git", + "url": "git://github.com/ljharb/is-callable.git" + }, + "keywords": [ + "Function", + "function", + "callable", + "generator", + "generator function", + "arrow", + "arrow function", + "ES6", + "toStringTag", + "@@toStringTag" + ], + "dependencies": {}, + "devDependencies": { + "@ljharb/eslint-config": "^12.2.1", + "covert": "^1.1.0", + "editorconfig-tools": "^0.1.1", + "eslint": "^4.19.1", + "foreach": "^2.0.5", + "istanbul": "1.1.0-alpha.1", + "istanbul-merge": "^1.1.1", + "jscs": "^3.0.7", + "make-arrow-function": "^1.1.0", + "make-generator-function": "^1.1.0", + "nsp": "^3.2.1", + "rimraf": "^2.6.2", + "semver": "^5.5.0", + "tape": "^4.9.1" + }, + "testling": { + "files": "test.js", + "browsers": [ + "iexplore/6.0..latest", + "firefox/3.0..6.0", + "firefox/15.0..latest", + "firefox/nightly", + "chrome/4.0..10.0", + "chrome/20.0..latest", + "chrome/canary", + "opera/10.0..latest", + "opera/next", + "safari/4.0..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2" + ] + }, + "engines": { + "node": ">= 0.4" + } + +,"_resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz" +,"_integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA==" +,"_from": "is-callable@1.1.4" +} \ No newline at end of file diff --git a/node_modules/is-callable/test.js b/node_modules/is-callable/test.js new file mode 100644 index 0000000000000..f5be51d82e212 --- /dev/null +++ b/node_modules/is-callable/test.js @@ -0,0 +1,158 @@ +'use strict'; + +/* eslint no-magic-numbers: 1 */ + +var test = require('tape'); +var isCallable = require('./'); +var hasSymbols = typeof Symbol === 'function' && typeof Symbol('foo') === 'symbol'; +var genFn = require('make-generator-function'); +var arrowFn = require('make-arrow-function')(); +var weirdlyCommentedArrowFn; +var asyncFn; +var asyncArrowFn; +try { + /* eslint no-new-func: 0 */ + weirdlyCommentedArrowFn = Function('return cl/*/**/=>/**/ass - 1;')(); + asyncFn = Function('return async function foo() {};')(); + asyncArrowFn = Function('return async () => {};')(); +} catch (e) { /**/ } +var forEach = require('foreach'); + +var noop = function () {}; +var classFake = function classFake() { }; // eslint-disable-line func-name-matching +var returnClass = function () { return ' class '; }; +var return3 = function () { return 3; }; +/* for coverage */ +noop(); +classFake(); +returnClass(); +return3(); +/* end for coverage */ + +var invokeFunction = function invokeFunctionString(str) { + var result; + try { + /* eslint-disable no-new-func */ + var fn = Function(str); + /* eslint-enable no-new-func */ + result = fn(); + } catch (e) {} + return result; +}; + +var classConstructor = invokeFunction('"use strict"; return class Foo {}'); + +var commentedClass = invokeFunction('"use strict"; return class/*kkk*/\n//blah\n Bar\n//blah\n {}'); +var commentedClassOneLine = invokeFunction('"use strict"; return class/**/A{}'); +var classAnonymous = invokeFunction('"use strict"; return class{}'); +var classAnonymousCommentedOneLine = invokeFunction('"use strict"; return class/*/*/{}'); + +test('not callables', function (t) { + t.test('non-number/string primitives', function (st) { + st.notOk(isCallable(), 'undefined is not callable'); + st.notOk(isCallable(null), 'null is not callable'); + st.notOk(isCallable(false), 'false is not callable'); + st.notOk(isCallable(true), 'true is not callable'); + st.end(); + }); + + t.notOk(isCallable([]), 'array is not callable'); + t.notOk(isCallable({}), 'object is not callable'); + t.notOk(isCallable(/a/g), 'regex literal is not callable'); + t.notOk(isCallable(new RegExp('a', 'g')), 'regex object is not callable'); + t.notOk(isCallable(new Date()), 'new Date() is not callable'); + + t.test('numbers', function (st) { + st.notOk(isCallable(42), 'number is not callable'); + st.notOk(isCallable(Object(42)), 'number object is not callable'); + st.notOk(isCallable(NaN), 'NaN is not callable'); + st.notOk(isCallable(Infinity), 'Infinity is not callable'); + st.end(); + }); + + t.test('strings', function (st) { + st.notOk(isCallable('foo'), 'string primitive is not callable'); + st.notOk(isCallable(Object('foo')), 'string object is not callable'); + st.end(); + }); + + t.test('non-function with function in its [[Prototype]] chain', function (st) { + var Foo = function Bar() {}; + Foo.prototype = noop; + st.equal(true, isCallable(Foo), 'sanity check: Foo is callable'); + st.equal(false, isCallable(new Foo()), 'instance of Foo is not callable'); + st.end(); + }); + + t.end(); +}); + +test('@@toStringTag', { skip: !hasSymbols || !Symbol.toStringTag }, function (t) { + var fakeFunction = { + toString: function () { return String(return3); }, + valueOf: return3 + }; + fakeFunction[Symbol.toStringTag] = 'Function'; + t.equal(String(fakeFunction), String(return3)); + t.equal(Number(fakeFunction), return3()); + t.notOk(isCallable(fakeFunction), 'fake Function with @@toStringTag "Function" is not callable'); + t.end(); +}); + +var typedArrayNames = [ + 'Int8Array', + 'Uint8Array', + 'Uint8ClampedArray', + 'Int16Array', + 'Uint16Array', + 'Int32Array', + 'Uint32Array', + 'Float32Array', + 'Float64Array' +]; + +test('Functions', function (t) { + t.ok(isCallable(noop), 'function is callable'); + t.ok(isCallable(classFake), 'function with name containing "class" is callable'); + t.ok(isCallable(returnClass), 'function with string " class " is callable'); + t.ok(isCallable(isCallable), 'isCallable is callable'); + t.end(); +}); + +test('Typed Arrays', function (st) { + forEach(typedArrayNames, function (typedArray) { + /* istanbul ignore if : covered in node 0.6 */ + if (typeof global[typedArray] === 'undefined') { + st.comment('# SKIP typed array "' + typedArray + '" not supported'); + } else { + st.ok(isCallable(global[typedArray]), typedArray + ' is callable'); + } + }); + st.end(); +}); + +test('Generators', { skip: !genFn }, function (t) { + t.ok(isCallable(genFn), 'generator function is callable'); + t.end(); +}); + +test('Arrow functions', { skip: !arrowFn }, function (t) { + t.ok(isCallable(arrowFn), 'arrow function is callable'); + t.ok(isCallable(weirdlyCommentedArrowFn), 'weirdly commented arrow functions are callable'); + t.end(); +}); + +test('"Class" constructors', { skip: !classConstructor || !commentedClass || !commentedClassOneLine || !classAnonymous }, function (t) { + t.notOk(isCallable(classConstructor), 'class constructors are not callable'); + t.notOk(isCallable(commentedClass), 'class constructors with comments in the signature are not callable'); + t.notOk(isCallable(commentedClassOneLine), 'one-line class constructors with comments in the signature are not callable'); + t.notOk(isCallable(classAnonymous), 'anonymous class constructors are not callable'); + t.notOk(isCallable(classAnonymousCommentedOneLine), 'anonymous one-line class constructors with comments in the signature are not callable'); + t.end(); +}); + +test('`async function`s', { skip: !asyncFn }, function (t) { + t.ok(isCallable(asyncFn), '`async function`s are callable'); + t.ok(isCallable(asyncArrowFn), '`async` arrow functions are callable'); + t.end(); +}); diff --git a/node_modules/is-ci/node_modules/ci-info/CHANGELOG.md b/node_modules/is-ci/node_modules/ci-info/CHANGELOG.md new file mode 100644 index 0000000000000..859a0ad12a53b --- /dev/null +++ b/node_modules/is-ci/node_modules/ci-info/CHANGELOG.md @@ -0,0 +1,62 @@ +# Changelog + +## v1.6.0 + +* feat: add Sail CI support +* feat: add Buddy support +* feat: add Bitrise support +* feat: detect Jenkins PRs +* feat: detect Drone PRs + +## v1.5.1 + +* fix: use full path to vendors.json + +## v1.5.0 + +* feat: add dsari detection ([#15](https://github.com/watson/ci-info/pull/15)) +* feat: add ci.isPR ([#16](https://github.com/watson/ci-info/pull/16)) + +## v1.4.0 + +* feat: add Cirrus CI detection ([#13](https://github.com/watson/ci-info/pull/13)) +* feat: add Shippable CI detection ([#14](https://github.com/watson/ci-info/pull/14)) + +## v1.3.1 + +* chore: reduce npm package size by not including `.github` folder content ([#11](https://github.com/watson/ci-info/pull/11)) + +## v1.3.0 + +* feat: add support for Strider CD +* chore: deprecate vendor constant `TDDIUM` in favor of `SOLANO` +* docs: add missing vendor constant to docs + +## v1.2.0 + +* feat: detect solano-ci ([#9](https://github.com/watson/ci-info/pull/9)) + +## v1.1.3 + +* fix: fix spelling of Hunson in `ci.name` + +## v1.1.2 + +* fix: no more false positive matches for Jenkins + +## v1.1.1 + +* docs: sort lists of CI servers in README.md +* docs: add missing AWS CodeBuild to the docs + +## v1.1.0 + +* feat: add AWS CodeBuild to CI detection ([#2](https://github.com/watson/ci-info/pull/2)) + +## v1.0.1 + +* chore: reduce npm package size by using an `.npmignore` file ([#3](https://github.com/watson/ci-info/pull/3)) + +## v1.0.0 + +* Initial release diff --git a/node_modules/is-ci/node_modules/ci-info/LICENSE b/node_modules/is-ci/node_modules/ci-info/LICENSE new file mode 100644 index 0000000000000..67846832ecc30 --- /dev/null +++ b/node_modules/is-ci/node_modules/ci-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016-2018 Thomas Watson Steen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/is-ci/node_modules/ci-info/README.md b/node_modules/is-ci/node_modules/ci-info/README.md new file mode 100644 index 0000000000000..c88be8f82d5df --- /dev/null +++ b/node_modules/is-ci/node_modules/ci-info/README.md @@ -0,0 +1,107 @@ +# ci-info + +Get details about the current Continuous Integration environment. + +Please [open an +issue](https://github.com/watson/ci-info/issues/new?template=ci-server-not-detected.md) +if your CI server isn't properly detected :) + +[![npm](https://img.shields.io/npm/v/ci-info.svg)](https://www.npmjs.com/package/ci-info) +[![Build status](https://travis-ci.org/watson/ci-info.svg?branch=master)](https://travis-ci.org/watson/ci-info) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://github.com/feross/standard) + +## Installation + +```bash +npm install ci-info --save +``` + +## Usage + +```js +var ci = require('ci-info') + +if (ci.isCI) { + console.log('The name of the CI server is:', ci.name) +} else { + console.log('This program is not running on a CI server') +} +``` + +## Supported CI tools + +Officially supported CI servers: + +| Name | Constant | +|------|----------| +| [AWS CodeBuild](https://aws.amazon.com/codebuild/) | `ci.CODEBUILD` | +| [AppVeyor](http://www.appveyor.com) | `ci.APPVEYOR` | +| [Bamboo](https://www.atlassian.com/software/bamboo) by Atlassian | `ci.BAMBOO` | +| [Bitbucket Pipelines](https://bitbucket.org/product/features/pipelines) | `ci.BITBUCKET` | +| [Bitrise](https://www.bitrise.io/) | `ci.BITRISE` | +| [Buddy](https://buddy.works/) | `ci.BUDDY` | +| [Buildkite](https://buildkite.com) | `ci.BUILDKITE` | +| [CircleCI](http://circleci.com) | `ci.CIRCLE` | +| [Cirrus CI](https://cirrus-ci.org) | `ci.CIRRUS` | +| [Codeship](https://codeship.com) | `ci.CODESHIP` | +| [Drone](https://drone.io) | `ci.DRONE` | +| [dsari](https://github.com/rfinnie/dsari) | `ci.DSARI` | +| [GitLab CI](https://about.gitlab.com/gitlab-ci/) | `ci.GITLAB` | +| [GoCD](https://www.go.cd/) | `ci.GOCD` | +| [Hudson](http://hudson-ci.org) | `ci.HUDSON` | +| [Jenkins CI](https://jenkins-ci.org) | `ci.JENKINS` | +| [Magnum CI](https://magnum-ci.com) | `ci.MAGNUM` | +| [Sail CI](https://sail.ci/) | `ci.SAIL` | +| [Semaphore](https://semaphoreci.com) | `ci.SEMAPHORE` | +| [Shippable](https://www.shippable.com/) | `ci.SHIPPABLE` | +| [Solano CI](https://www.solanolabs.com/) | `ci.SOLANO` | +| [Strider CD](https://strider-cd.github.io/) | `ci.STRIDER` | +| [TaskCluster](http://docs.taskcluster.net) | `ci.TASKCLUSTER` | +| [Team Foundation Server](https://www.visualstudio.com/en-us/products/tfs-overview-vs.aspx) by Microsoft | `ci.TFS` | +| [TeamCity](https://www.jetbrains.com/teamcity/) by JetBrains | `ci.TEAMCITY` | +| [Travis CI](http://travis-ci.org) | `ci.TRAVIS` | + +## API + +### `ci.name` + +A string. Will contain the name of the CI server the code is running on. +If not CI server is detected, it will be `null`. + +Don't depend on the value of this string not to change for a specific +vendor. If you find your self writing `ci.name === 'Travis CI'`, you +most likely want to use `ci.TRAVIS` instead. + +### `ci.isCI` + +A boolean. Will be `true` if the code is running on a CI server. +Otherwise `false`. + +Some CI servers not listed here might still trigger the `ci.isCI` +boolean to be set to `true` if they use certain vendor neutral +environment variables. In those cases `ci.name` will be `null` and no +vendor specific boolean will be set to `true`. + +### `ci.isPR` + +A boolean if PR detection is supported for the current CI server. Will +be `true` if a PR is being tested. Otherwise `false`. If PR detection is +not supported for the current CI server, the value will be `null`. + +### `ci.` + +A vendor specific boolean constants is exposed for each support CI +vendor. A constant will be `true` if the code is determined to run on +the given CI server. Otherwise `false`. + +Examples of vendor constants are `ci.TRAVIS` or `ci.APPVEYOR`. For a +complete list, see the support table above. + +Deprecated vendor constants that will be removed in the next major +release: + +- `ci.TDDIUM` (Solano CI) This have been renamed `ci.SOLANO` + +## License + +[MIT](LICENSE) diff --git a/node_modules/is-ci/node_modules/ci-info/index.js b/node_modules/is-ci/node_modules/ci-info/index.js new file mode 100644 index 0000000000000..27794d49b3f21 --- /dev/null +++ b/node_modules/is-ci/node_modules/ci-info/index.js @@ -0,0 +1,66 @@ +'use strict' + +var vendors = require('./vendors.json') + +var env = process.env + +// Used for testinging only +Object.defineProperty(exports, '_vendors', { + value: vendors.map(function (v) { return v.constant }) +}) + +exports.name = null +exports.isPR = null + +vendors.forEach(function (vendor) { + var envs = Array.isArray(vendor.env) ? vendor.env : [vendor.env] + var isCI = envs.every(function (obj) { + return checkEnv(obj) + }) + + exports[vendor.constant] = isCI + + if (isCI) { + exports.name = vendor.name + + switch (typeof vendor.pr) { + case 'string': + // "pr": "CIRRUS_PR" + exports.isPR = !!env[vendor.pr] + break + case 'object': + if ('env' in vendor.pr) { + // "pr": { "env": "BUILDKITE_PULL_REQUEST", "ne": "false" } + exports.isPR = vendor.pr.env in env && env[vendor.pr.env] !== vendor.pr.ne + } else if ('any' in vendor.pr) { + // "pr": { "any": ["ghprbPullId", "CHANGE_ID"] } + exports.isPR = vendor.pr.any.some(function (key) { + return !!env[key] + }) + } else { + // "pr": { "DRONE_BUILD_EVENT": "pull_request" } + exports.isPR = checkEnv(vendor.pr) + } + break + default: + // PR detection not supported for this vendor + exports.isPR = null + } + } +}) + +exports.isCI = !!( + env.CI || // Travis CI, CircleCI, Cirrus CI, Gitlab CI, Appveyor, CodeShip, dsari + env.CONTINUOUS_INTEGRATION || // Travis CI, Cirrus CI + env.BUILD_NUMBER || // Jenkins, TeamCity + env.RUN_ID || // TaskCluster, dsari + exports.name || + false +) + +function checkEnv (obj) { + if (typeof obj === 'string') return !!env[obj] + return Object.keys(obj).every(function (k) { + return env[k] === obj[k] + }) +} diff --git a/node_modules/is-ci/node_modules/ci-info/package.json b/node_modules/is-ci/node_modules/ci-info/package.json new file mode 100644 index 0000000000000..3542df9d4100c --- /dev/null +++ b/node_modules/is-ci/node_modules/ci-info/package.json @@ -0,0 +1,65 @@ +{ + "_from": "ci-info@^1.0.0", + "_id": "ci-info@1.6.0", + "_inBundle": false, + "_integrity": "sha512-vsGdkwSCDpWmP80ncATX7iea5DWQemg1UgCW5J8tqjU3lYw4FBYuj89J0CTVomA7BEfvSZd84GmHko+MxFQU2A==", + "_location": "/is-ci/ci-info", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "ci-info@^1.0.0", + "name": "ci-info", + "escapedName": "ci-info", + "rawSpec": "^1.0.0", + "saveSpec": null, + "fetchSpec": "^1.0.0" + }, + "_requiredBy": [ + "/is-ci" + ], + "_resolved": "https://registry.npmjs.org/ci-info/-/ci-info-1.6.0.tgz", + "_shasum": "2ca20dbb9ceb32d4524a683303313f0304b1e497", + "_spec": "ci-info@^1.0.0", + "_where": "/Users/aeschright/code/cli/node_modules/is-ci", + "author": { + "name": "Thomas Watson Steen", + "email": "w@tson.dk", + "url": "https://twitter.com/wa7son" + }, + "bugs": { + "url": "https://github.com/watson/ci-info/issues" + }, + "bundleDependencies": false, + "coordinates": [ + 55.778271, + 12.593091 + ], + "dependencies": {}, + "deprecated": false, + "description": "Get details about the current Continuous Integration environment", + "devDependencies": { + "clear-require": "^1.0.1", + "standard": "^12.0.1", + "tape": "^4.9.1" + }, + "homepage": "https://github.com/watson/ci-info", + "keywords": [ + "ci", + "continuous", + "integration", + "test", + "detect" + ], + "license": "MIT", + "main": "index.js", + "name": "ci-info", + "repository": { + "type": "git", + "url": "git+https://github.com/watson/ci-info.git" + }, + "scripts": { + "test": "standard && node test.js" + }, + "version": "1.6.0" +} diff --git a/node_modules/is-ci/node_modules/ci-info/vendors.json b/node_modules/is-ci/node_modules/ci-info/vendors.json new file mode 100644 index 0000000000000..a157b78cea4af --- /dev/null +++ b/node_modules/is-ci/node_modules/ci-info/vendors.json @@ -0,0 +1,152 @@ +[ + { + "name": "AppVeyor", + "constant": "APPVEYOR", + "env": "APPVEYOR", + "pr": "APPVEYOR_PULL_REQUEST_NUMBER" + }, + { + "name": "Bamboo", + "constant": "BAMBOO", + "env": "bamboo_planKey" + }, + { + "name": "Bitbucket Pipelines", + "constant": "BITBUCKET", + "env": "BITBUCKET_COMMIT" + }, + { + "name": "Bitrise", + "constant": "BITRISE", + "env": "BITRISE_IO", + "pr": "BITRISE_PULL_REQUEST" + }, + { + "name": "Buddy", + "constant": "BUDDY", + "env": "BUDDY_WORKSPACE_ID", + "pr": "BUDDY_EXECUTION_PULL_REQUEST_ID" + }, + { + "name": "Buildkite", + "constant": "BUILDKITE", + "env": "BUILDKITE", + "pr": { "env": "BUILDKITE_PULL_REQUEST", "ne": "false" } + }, + { + "name": "CircleCI", + "constant": "CIRCLE", + "env": "CIRCLECI", + "pr": "CIRCLE_PULL_REQUEST" + }, + { + "name": "Cirrus CI", + "constant": "CIRRUS", + "env": "CIRRUS_CI", + "pr": "CIRRUS_PR" + }, + { + "name": "AWS CodeBuild", + "constant": "CODEBUILD", + "env": "CODEBUILD_BUILD_ARN" + }, + { + "name": "Codeship", + "constant": "CODESHIP", + "env": { "CI_NAME": "codeship" } + }, + { + "name": "Drone", + "constant": "DRONE", + "env": "DRONE", + "pr": { "DRONE_BUILD_EVENT": "pull_request" } + }, + { + "name": "dsari", + "constant": "DSARI", + "env": "DSARI" + }, + { + "name": "GitLab CI", + "constant": "GITLAB", + "env": "GITLAB_CI" + }, + { + "name": "GoCD", + "constant": "GOCD", + "env": "GO_PIPELINE_LABEL" + }, + { + "name": "Hudson", + "constant": "HUDSON", + "env": "HUDSON_URL" + }, + { + "name": "Jenkins", + "constant": "JENKINS", + "env": ["JENKINS_URL", "BUILD_ID"], + "pr": { "any": ["ghprbPullId", "CHANGE_ID"] } + }, + { + "name": "Magnum CI", + "constant": "MAGNUM", + "env": "MAGNUM" + }, + { + "name": "Sail CI", + "constant": "SAIL", + "env": "SAILCI", + "pr": "SAIL_PULL_REQUEST_NUMBER" + }, + { + "name": "Semaphore", + "constant": "SEMAPHORE", + "env": "SEMAPHORE", + "pr": "PULL_REQUEST_NUMBER" + }, + { + "name": "Shippable", + "constant": "SHIPPABLE", + "env": "SHIPPABLE", + "pr": { "IS_PULL_REQUEST": "true" } + }, + { + "name": "Solano CI", + "constant": "SOLANO", + "env": "TDDIUM", + "pr": "TDDIUM_PR_ID" + }, + { + "name": "Strider CD", + "constant": "STRIDER", + "env": "STRIDER" + }, + { + "name": "TaskCluster", + "constant": "TASKCLUSTER", + "env": ["TASK_ID", "RUN_ID"] + }, + { + "name": "Solano CI", + "constant": "TDDIUM", + "env": "TDDIUM", + "pr": "TDDIUM_PR_ID", + "deprecated": true + }, + { + "name": "TeamCity", + "constant": "TEAMCITY", + "env": "TEAMCITY_VERSION" + }, + { + "name": "Team Foundation Server", + "constant": "TFS", + "env": "TF_BUILD" + }, + { + "name": "Travis CI", + "constant": "TRAVIS", + "env": "TRAVIS", + "pr": { "env": "TRAVIS_PULL_REQUEST", "ne": "false" } + } +] diff --git a/node_modules/is-cidr/README.md b/node_modules/is-cidr/README.md index cd7c8c9686ee2..1fa3ee9ede782 100644 --- a/node_modules/is-cidr/README.md +++ b/node_modules/is-cidr/README.md @@ -7,7 +7,7 @@ ## Install ``` -$ npm install --save is-cidr +npm i is-cidr ``` @@ -16,14 +16,10 @@ $ npm install --save is-cidr ```js const isCidr = require('is-cidr'); -isCidr('192.168.0.1/24'); -//=> true - -isCidr('1:2:3:4:5:6:7:8/64'); -//=> true - -isCidr.v4('1:2:3:4:5:6:7:8/64'); -//=> false +isCidr('192.168.0.1/24'); //=> 4 +isCidr('1:2:3:4:5:6:7:8/64'); //=> 6 +isCidr('10.0.0.0'); //=> 0 +isCidr.v6('10.0.0.0/24'); //=> false ``` @@ -31,15 +27,15 @@ isCidr.v4('1:2:3:4:5:6:7:8/64'); ### isCidr(input) -Check if `input` is a IPv4 or IPv6 CIDR address. +Check if `input` is a IPv4 or IPv6 CIDR address. Returns either `4`, `6` (indicating the IP version) or `0` if the string is not a CIDR. ### isCidr.v4(input) -Check if `input` is a IPv4 CIDR address. +Check if `input` is a IPv4 CIDR address. Returns a boolean. ### isCidr.v6(input) -Check if `input` is a IPv6 CIDR address. +Check if `input` is a IPv6 CIDR address. Returns a boolean. ## Related diff --git a/node_modules/is-cidr/index.js b/node_modules/is-cidr/index.js index b5a5026439913..3eaf906c3542e 100644 --- a/node_modules/is-cidr/index.js +++ b/node_modules/is-cidr/index.js @@ -1,6 +1,13 @@ "use strict"; const cidrRegex = require("cidr-regex"); +const re4 = cidrRegex.v4({exact: true}); +const re6 = cidrRegex.v6({exact: true}); -const isCidr = module.exports = string => cidrRegex({exact: true}).test(string); -isCidr.v4 = string => cidrRegex.v4({exact: true}).test(string); -isCidr.v6 = string => cidrRegex.v6({exact: true}).test(string); +const isCidr = module.exports = str => { + if (re4.test(str)) return 4; + if (re6.test(str)) return 6; + return 0; +}; + +isCidr.v4 = str => re4.test(str); +isCidr.v6 = str => re6.test(str); diff --git a/node_modules/is-cidr/package.json b/node_modules/is-cidr/package.json index 6f735158abcb9..5737794e8c680 100644 --- a/node_modules/is-cidr/package.json +++ b/node_modules/is-cidr/package.json @@ -1,28 +1,28 @@ { - "_from": "is-cidr@2.0.6", - "_id": "is-cidr@2.0.6", + "_from": "is-cidr@3.0.0", + "_id": "is-cidr@3.0.0", "_inBundle": false, - "_integrity": "sha512-A578p1dV22TgPXn6NCaDAPj6vJvYsBgAzUrAd28a4oldeXJjWqEUuSZOLIW3im51mazOKsoyVp8NU/OItlWacw==", + "_integrity": "sha512-8Xnnbjsb0x462VoYiGlhEi+drY8SFwrHiSYuzc/CEwco55vkehTaxAyIjEdpi3EMvLPPJAJi9FlzP+h+03gp0Q==", "_location": "/is-cidr", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "is-cidr@2.0.6", + "raw": "is-cidr@3.0.0", "name": "is-cidr", "escapedName": "is-cidr", - "rawSpec": "2.0.6", + "rawSpec": "3.0.0", "saveSpec": null, - "fetchSpec": "2.0.6" + "fetchSpec": "3.0.0" }, "_requiredBy": [ "#USER", "/" ], - "_resolved": "https://registry.npmjs.org/is-cidr/-/is-cidr-2.0.6.tgz", - "_shasum": "4b01c9693d8e18399dacd18a4f3d60ea5871ac60", - "_spec": "is-cidr@2.0.6", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/is-cidr/-/is-cidr-3.0.0.tgz", + "_shasum": "1acf35c9e881063cd5f696d48959b30fed3eed56", + "_spec": "is-cidr@3.0.0", + "_where": "/Users/aeschright/code/cli", "author": { "name": "silverwind", "email": "me@silverwind.io" @@ -39,18 +39,18 @@ } ], "dependencies": { - "cidr-regex": "^2.0.8" + "cidr-regex": "^2.0.10" }, "deprecated": false, "description": "Check if a string is an IP address in CIDR notation", "devDependencies": { - "ava": "^0.25.0", - "eslint": "^4.19.1", - "eslint-config-silverwind": "^1.0.42", - "updates": "^3.0.0" + "eslint": "^5.7.0", + "eslint-config-silverwind": "^2.0.9", + "updates": "^4.5.2", + "ver": "^3.0.0" }, "engines": { - "node": ">=4" + "node": ">=6" }, "files": [ "index.js" @@ -64,7 +64,8 @@ "prefix", "prefixes", "ip", - "ip address" + "ip address", + "network" ], "license": "BSD-2-Clause", "name": "is-cidr", @@ -75,5 +76,5 @@ "scripts": { "test": "make test" }, - "version": "2.0.6" + "version": "3.0.0" } diff --git a/node_modules/is-date-object/.jscs.json b/node_modules/is-date-object/.jscs.json new file mode 100644 index 0000000000000..040bb6806a566 --- /dev/null +++ b/node_modules/is-date-object/.jscs.json @@ -0,0 +1,122 @@ +{ + "es3": true, + + "additionalRules": [], + + "requireSemicolons": true, + + "disallowMultipleSpaces": true, + + "disallowIdentifierNames": [], + + "requireCurlyBraces": ["if", "else", "for", "while", "do", "try", "catch"], + + "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"], + + "disallowSpaceAfterKeywords": [], + + "disallowSpaceBeforeComma": true, + "disallowSpaceBeforeSemicolon": true, + + "disallowNodeTypes": [ + "DebuggerStatement", + "ForInStatement", + "LabeledStatement", + "SwitchCase", + "SwitchStatement", + "WithStatement" + ], + + "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true }, + "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true }, + "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true }, + + "requireSpaceBetweenArguments": true, + + "disallowSpacesInsideParentheses": true, + + "disallowSpacesInsideArrayBrackets": true, + + "disallowQuotedKeysInObjects": "allButReserved", + + "disallowSpaceAfterObjectKeys": true, + + "requireCommaBeforeLineBreak": true, + + "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"], + "requireSpaceAfterPrefixUnaryOperators": [], + + "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"], + "requireSpaceBeforePostfixUnaryOperators": [], + + "disallowSpaceBeforeBinaryOperators": [], + "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + + "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + "disallowSpaceAfterBinaryOperators": [], + + "disallowImplicitTypeConversion": ["binary", "string"], + + "disallowKeywords": ["with", "eval"], + + "requireKeywordsOnNewLine": [], + "disallowKeywordsOnNewLine": ["else"], + + "requireLineFeedAtFileEnd": true, + + "disallowTrailingWhitespace": true, + + "disallowTrailingComma": true, + + "excludeFiles": ["node_modules/**", "vendor/**"], + + "disallowMultipleLineStrings": true, + + "requireDotNotation": true, + + "requireParenthesesAroundIIFE": true, + + "validateLineBreaks": "LF", + + "validateQuoteMarks": { + "escape": true, + "mark": "'" + }, + + "disallowOperatorBeforeLineBreak": [], + + "requireSpaceBeforeKeywords": [ + "do", + "for", + "if", + "else", + "switch", + "case", + "try", + "catch", + "finally", + "while", + "with", + "return" + ], + + "validateAlignedFunctionParameters": { + "lineBreakAfterOpeningBraces": true, + "lineBreakBeforeClosingBraces": true + }, + + "requirePaddingNewLinesBeforeExport": true, + + "validateNewlineAfterArrayElements": { + "maximum": 1 + }, + + "requirePaddingNewLinesAfterUseStrict": true, + + "disallowArrowFunctions": true, + + "validateOrderInObjectKeys": "asc-insensitive" +} + diff --git a/node_modules/is-date-object/.npmignore b/node_modules/is-date-object/.npmignore new file mode 100644 index 0000000000000..59d842baa84c8 --- /dev/null +++ b/node_modules/is-date-object/.npmignore @@ -0,0 +1,28 @@ +# Logs +logs +*.log + +# Runtime data +pids +*.pid +*.seed + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Compiled binary addons (http://nodejs.org/api/addons.html) +build/Release + +# Dependency directory +# Commenting this out is preferred by some people, see +# https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git- +node_modules + +# Users Environment Variables +.lock-wscript diff --git a/node_modules/is-date-object/.travis.yml b/node_modules/is-date-object/.travis.yml new file mode 100644 index 0000000000000..4c29ed58ba7fb --- /dev/null +++ b/node_modules/is-date-object/.travis.yml @@ -0,0 +1,58 @@ +language: node_js +node_js: + - "4.1" + - "4.0" + - "iojs-v3.3" + - "iojs-v3.2" + - "iojs-v3.1" + - "iojs-v3.0" + - "iojs-v2.5" + - "iojs-v2.4" + - "iojs-v2.3" + - "iojs-v2.2" + - "iojs-v2.1" + - "iojs-v2.0" + - "iojs-v1.8" + - "iojs-v1.7" + - "iojs-v1.6" + - "iojs-v1.5" + - "iojs-v1.4" + - "iojs-v1.3" + - "iojs-v1.2" + - "iojs-v1.1" + - "iojs-v1.0" + - "0.12" + - "0.11" + - "0.10" + - "0.9" + - "0.8" + - "0.6" + - "0.4" +before_install: + - '[ "${TRAVIS_NODE_VERSION}" = "0.6" ] || npm install -g npm@1.4.28 && npm install -g npm' +sudo: false +matrix: + fast_finish: true + allow_failures: + - node_js: "4.0" + - node_js: "iojs-v3.2" + - node_js: "iojs-v3.1" + - node_js: "iojs-v3.0" + - node_js: "iojs-v2.4" + - node_js: "iojs-v2.3" + - node_js: "iojs-v2.2" + - node_js: "iojs-v2.1" + - node_js: "iojs-v2.0" + - node_js: "iojs-v1.7" + - node_js: "iojs-v1.6" + - node_js: "iojs-v1.5" + - node_js: "iojs-v1.4" + - node_js: "iojs-v1.3" + - node_js: "iojs-v1.2" + - node_js: "iojs-v1.1" + - node_js: "iojs-v1.0" + - node_js: "0.11" + - node_js: "0.9" + - node_js: "0.8" + - node_js: "0.6" + - node_js: "0.4" diff --git a/node_modules/is-date-object/CHANGELOG.md b/node_modules/is-date-object/CHANGELOG.md new file mode 100644 index 0000000000000..4a7eab61bb1b2 --- /dev/null +++ b/node_modules/is-date-object/CHANGELOG.md @@ -0,0 +1,10 @@ +1.0.1 / 2015-09-27 +================= + * [Fix] If `@@toStringTag` is not present, use the old-school `Object#toString` test + * [Docs] Switch from vb.teelaun.ch to versionbadg.es for the npm version badge SVG + * [Dev Deps] update `is`, `eslint`, `@ljharb/eslint-config`, `semver`, `tape`, `jscs`, `nsp`, `covert` + * [Tests] up to `io.js` `v3.3`, `node` `v4.1` + +1.0.0 / 2015-01-28 +================= + * Initial release. diff --git a/node_modules/is-date-object/LICENSE b/node_modules/is-date-object/LICENSE new file mode 100644 index 0000000000000..b43df444e5182 --- /dev/null +++ b/node_modules/is-date-object/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Jordan Harband + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/is-date-object/Makefile b/node_modules/is-date-object/Makefile new file mode 100644 index 0000000000000..b9e4fe1aab3dd --- /dev/null +++ b/node_modules/is-date-object/Makefile @@ -0,0 +1,61 @@ +# Since we rely on paths relative to the makefile location, abort if make isn't being run from there. +$(if $(findstring /,$(MAKEFILE_LIST)),$(error Please only invoke this makefile from the directory it resides in)) + + # The files that need updating when incrementing the version number. +VERSIONED_FILES := *.js *.json README* + + +# Add the local npm packages' bin folder to the PATH, so that `make` can find them, when invoked directly. +# Note that rather than using `$(npm bin)` the 'node_modules/.bin' path component is hard-coded, so that invocation works even from an environment +# where npm is (temporarily) unavailable due to having deactivated an nvm instance loaded into the calling shell in order to avoid interference with tests. +export PATH := $(shell printf '%s' "$$PWD/node_modules/.bin:$$PATH") +UTILS := semver +# Make sure that all required utilities can be located. +UTIL_CHECK := $(or $(shell PATH="$(PATH)" which $(UTILS) >/dev/null && echo 'ok'),$(error Did you forget to run `npm install` after cloning the repo? At least one of the required supporting utilities not found: $(UTILS))) + +# Default target (by virtue of being the first non '.'-prefixed in the file). +.PHONY: _no-target-specified +_no-target-specified: + $(error Please specify the target to make - `make list` shows targets. Alternatively, use `npm test` to run the default tests; `npm run` shows all tests) + +# Lists all targets defined in this makefile. +.PHONY: list +list: + @$(MAKE) -pRrn : -f $(MAKEFILE_LIST) 2>/dev/null | awk -v RS= -F: '/^# File/,/^# Finished Make data base/ {if ($$1 !~ "^[#.]") {print $$1}}' | command grep -v -e '^[^[:alnum:]]' -e '^$@$$command ' | sort + +# All-tests target: invokes the specified test suites for ALL shells defined in $(SHELLS). +.PHONY: test +test: + @npm test + +.PHONY: _ensure-tag +_ensure-tag: +ifndef TAG + $(error Please invoke with `make TAG= release`, where is either an increment specifier (patch, minor, major, prepatch, preminor, premajor, prerelease), or an explicit major.minor.patch version number) +endif + +CHANGELOG_ERROR = $(error No CHANGELOG specified) +.PHONY: _ensure-changelog +_ensure-changelog: + @ (git status -sb --porcelain | command grep -E '^( M|[MA] ) CHANGELOG.md' > /dev/null) || (echo no CHANGELOG.md specified && exit 2) + +# Ensures that the git workspace is clean. +.PHONY: _ensure-clean +_ensure-clean: + @[ -z "$$((git status --porcelain --untracked-files=no || echo err) | command grep -v 'CHANGELOG.md')" ] || { echo "Workspace is not clean; please commit changes first." >&2; exit 2; } + +# Makes a release; invoke with `make TAG= release`. +.PHONY: release +release: _ensure-tag _ensure-changelog _ensure-clean + @old_ver=`git describe --abbrev=0 --tags --match 'v[0-9]*.[0-9]*.[0-9]*'` || { echo "Failed to determine current version." >&2; exit 1; }; old_ver=$${old_ver#v}; \ + new_ver=`echo "$(TAG)" | sed 's/^v//'`; new_ver=$${new_ver:-patch}; \ + if printf "$$new_ver" | command grep -q '^[0-9]'; then \ + semver "$$new_ver" >/dev/null || { echo 'Invalid version number specified: $(TAG) - must be major.minor.patch' >&2; exit 2; }; \ + semver -r "> $$old_ver" "$$new_ver" >/dev/null || { echo 'Invalid version number specified: $(TAG) - must be HIGHER than current one.' >&2; exit 2; } \ + else \ + new_ver=`semver -i "$$new_ver" "$$old_ver"` || { echo 'Invalid version-increment specifier: $(TAG)' >&2; exit 2; } \ + fi; \ + printf "=== Bumping version **$$old_ver** to **$$new_ver** before committing and tagging:\n=== TYPE 'proceed' TO PROCEED, anything else to abort: " && read response && [ "$$response" = 'proceed' ] || { echo 'Aborted.' >&2; exit 2; }; \ + replace "$$old_ver" "$$new_ver" -- $(VERSIONED_FILES) && \ + git commit -m "v$$new_ver" $(VERSIONED_FILES) CHANGELOG.md && \ + git tag -a -m "v$$new_ver" "v$$new_ver" diff --git a/node_modules/is-date-object/README.md b/node_modules/is-date-object/README.md new file mode 100644 index 0000000000000..55b0c59673e60 --- /dev/null +++ b/node_modules/is-date-object/README.md @@ -0,0 +1,53 @@ +# is-date-object [![Version Badge][2]][1] + +[![Build Status][3]][4] +[![dependency status][5]][6] +[![dev dependency status][7]][8] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][11]][1] + +[![browser support][9]][10] + +Is this value a JS Date object? This module works cross-realm/iframe, and despite ES6 @@toStringTag. + +## Example + +```js +var isDate = require('is-date-object'); +var assert = require('assert'); + +assert.notOk(isDate(undefined)); +assert.notOk(isDate(null)); +assert.notOk(isDate(false)); +assert.notOk(isDate(true)); +assert.notOk(isDate(42)); +assert.notOk(isDate('foo')); +assert.notOk(isDate(function () {})); +assert.notOk(isDate([])); +assert.notOk(isDate({})); +assert.notOk(isDate(/a/g)); +assert.notOk(isDate(new RegExp('a', 'g'))); + +assert.ok(isDate(new Date())); +``` + +## Tests +Simply clone the repo, `npm install`, and run `npm test` + +[1]: https://npmjs.org/package/is-date-object +[2]: http://versionbadg.es/ljharb/is-date-object.svg +[3]: https://travis-ci.org/ljharb/is-date-object.svg +[4]: https://travis-ci.org/ljharb/is-date-object +[5]: https://david-dm.org/ljharb/is-date-object.svg +[6]: https://david-dm.org/ljharb/is-date-object +[7]: https://david-dm.org/ljharb/is-date-object/dev-status.svg +[8]: https://david-dm.org/ljharb/is-date-object#info=devDependencies +[9]: https://ci.testling.com/ljharb/is-date-object.png +[10]: https://ci.testling.com/ljharb/is-date-object +[11]: https://nodei.co/npm/is-date-object.png?downloads=true&stars=true +[license-image]: http://img.shields.io/npm/l/is-date-object.svg +[license-url]: LICENSE +[downloads-image]: http://img.shields.io/npm/dm/is-date-object.svg +[downloads-url]: http://npm-stat.com/charts.html?package=is-date-object diff --git a/node_modules/is-date-object/index.js b/node_modules/is-date-object/index.js new file mode 100644 index 0000000000000..fe0d7ecd7c145 --- /dev/null +++ b/node_modules/is-date-object/index.js @@ -0,0 +1,20 @@ +'use strict'; + +var getDay = Date.prototype.getDay; +var tryDateObject = function tryDateObject(value) { + try { + getDay.call(value); + return true; + } catch (e) { + return false; + } +}; + +var toStr = Object.prototype.toString; +var dateClass = '[object Date]'; +var hasToStringTag = typeof Symbol === 'function' && typeof Symbol.toStringTag === 'symbol'; + +module.exports = function isDateObject(value) { + if (typeof value !== 'object' || value === null) { return false; } + return hasToStringTag ? tryDateObject(value) : toStr.call(value) === dateClass; +}; diff --git a/node_modules/is-date-object/package.json b/node_modules/is-date-object/package.json new file mode 100644 index 0000000000000..20c0b95086f58 --- /dev/null +++ b/node_modules/is-date-object/package.json @@ -0,0 +1,66 @@ +{ + "name": "is-date-object", + "version": "1.0.1", + "author": "Jordan Harband", + "description": "Is this value a JS Date object? This module works cross-realm/iframe, and despite ES6 @@toStringTag.", + "license": "MIT", + "main": "index.js", + "scripts": { + "test": "npm run lint && node --harmony --es-staging test.js && npm run security", + "coverage": "covert test.js", + "coverage-quiet": "covert test.js --quiet", + "lint": "npm run jscs && npm run eslint", + "jscs": "jscs test.js *.js", + "eslint": "eslint test.js *.js", + "security": "nsp package" + }, + "repository": { + "type": "git", + "url": "git://github.com/ljharb/is-date-object.git" + }, + "keywords": [ + "Date", + "ES6", + "toStringTag", + "@@toStringTag", + "Date object" + ], + "dependencies": {}, + "devDependencies": { + "foreach": "^2.0.5", + "is": "^3.1.0", + "tape": "^4.2.0", + "indexof": "^0.0.1", + "covert": "^1.1.0", + "jscs": "^2.1.1", + "nsp": "^1.1.0", + "eslint": "^1.5.1", + "@ljharb/eslint-config": "^1.2.0", + "semver": "^5.0.3" + }, + "testling": { + "files": "test.js", + "browsers": [ + "iexplore/6.0..latest", + "firefox/3.0..6.0", + "firefox/15.0..latest", + "firefox/nightly", + "chrome/4.0..10.0", + "chrome/20.0..latest", + "chrome/canary", + "opera/10.0..latest", + "opera/next", + "safari/4.0..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2" + ] + }, + "engines": { + "node": ">= 0.4" + } + +,"_resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.1.tgz" +,"_integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=" +,"_from": "is-date-object@1.0.1" +} \ No newline at end of file diff --git a/node_modules/is-date-object/test.js b/node_modules/is-date-object/test.js new file mode 100644 index 0000000000000..29f0917bc4a33 --- /dev/null +++ b/node_modules/is-date-object/test.js @@ -0,0 +1,33 @@ +'use strict'; + +var test = require('tape'); +var isDate = require('./'); +var hasSymbols = typeof Symbol === 'function' && typeof Symbol() === 'symbol'; + +test('not Dates', function (t) { + t.notOk(isDate(), 'undefined is not Date'); + t.notOk(isDate(null), 'null is not Date'); + t.notOk(isDate(false), 'false is not Date'); + t.notOk(isDate(true), 'true is not Date'); + t.notOk(isDate(42), 'number is not Date'); + t.notOk(isDate('foo'), 'string is not Date'); + t.notOk(isDate([]), 'array is not Date'); + t.notOk(isDate({}), 'object is not Date'); + t.notOk(isDate(function () {}), 'function is not Date'); + t.notOk(isDate(/a/g), 'regex literal is not Date'); + t.notOk(isDate(new RegExp('a', 'g')), 'regex object is not Date'); + t.end(); +}); + +test('@@toStringTag', { skip: !hasSymbols || !Symbol.toStringTag }, function (t) { + var realDate = new Date(); + var fakeDate = { toString: function () { return String(realDate); }, valueOf: function () { return realDate.getTime(); } }; + fakeDate[Symbol.toStringTag] = 'Date'; + t.notOk(isDate(fakeDate), 'fake Date with @@toStringTag "Date" is not Date'); + t.end(); +}); + +test('Dates', function (t) { + t.ok(isDate(new Date()), 'new Date() is Date'); + t.end(); +}); diff --git a/node_modules/is-regex/.jscs.json b/node_modules/is-regex/.jscs.json new file mode 100644 index 0000000000000..3d099c4b1192c --- /dev/null +++ b/node_modules/is-regex/.jscs.json @@ -0,0 +1,176 @@ +{ + "es3": true, + + "additionalRules": [], + + "requireSemicolons": true, + + "disallowMultipleSpaces": true, + + "disallowIdentifierNames": [], + + "requireCurlyBraces": { + "allExcept": [], + "keywords": ["if", "else", "for", "while", "do", "try", "catch"] + }, + + "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"], + + "disallowSpaceAfterKeywords": [], + + "disallowSpaceBeforeComma": true, + "disallowSpaceAfterComma": false, + "disallowSpaceBeforeSemicolon": true, + + "disallowNodeTypes": [ + "DebuggerStatement", + "ForInStatement", + "LabeledStatement", + "SwitchCase", + "SwitchStatement", + "WithStatement" + ], + + "requireObjectKeysOnNewLine": { "allExcept": ["sameLine"] }, + + "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true }, + "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true }, + "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true }, + + "requireSpaceBetweenArguments": true, + + "disallowSpacesInsideParentheses": true, + + "disallowSpacesInsideArrayBrackets": true, + + "disallowQuotedKeysInObjects": { "allExcept": ["reserved"] }, + + "disallowSpaceAfterObjectKeys": true, + + "requireCommaBeforeLineBreak": true, + + "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"], + "requireSpaceAfterPrefixUnaryOperators": [], + + "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"], + "requireSpaceBeforePostfixUnaryOperators": [], + + "disallowSpaceBeforeBinaryOperators": [], + "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + + "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + "disallowSpaceAfterBinaryOperators": [], + + "disallowImplicitTypeConversion": ["binary", "string"], + + "disallowKeywords": ["with", "eval"], + + "requireKeywordsOnNewLine": [], + "disallowKeywordsOnNewLine": ["else"], + + "requireLineFeedAtFileEnd": true, + + "disallowTrailingWhitespace": true, + + "disallowTrailingComma": true, + + "excludeFiles": ["node_modules/**", "vendor/**"], + + "disallowMultipleLineStrings": true, + + "requireDotNotation": { "allExcept": ["keywords"] }, + + "requireParenthesesAroundIIFE": true, + + "validateLineBreaks": "LF", + + "validateQuoteMarks": { + "escape": true, + "mark": "'" + }, + + "disallowOperatorBeforeLineBreak": [], + + "requireSpaceBeforeKeywords": [ + "do", + "for", + "if", + "else", + "switch", + "case", + "try", + "catch", + "finally", + "while", + "with", + "return" + ], + + "validateAlignedFunctionParameters": { + "lineBreakAfterOpeningBraces": true, + "lineBreakBeforeClosingBraces": true + }, + + "requirePaddingNewLinesBeforeExport": true, + + "validateNewlineAfterArrayElements": { + "maximum": 1 + }, + + "requirePaddingNewLinesAfterUseStrict": true, + + "disallowArrowFunctions": true, + + "disallowMultiLineTernary": true, + + "validateOrderInObjectKeys": "asc-insensitive", + + "disallowIdenticalDestructuringNames": true, + + "disallowNestedTernaries": { "maxLevel": 1 }, + + "requireSpaceAfterComma": { "allExcept": ["trailing"] }, + "requireAlignedMultilineParams": false, + + "requireSpacesInGenerator": { + "afterStar": true + }, + + "disallowSpacesInGenerator": { + "beforeStar": true + }, + + "disallowVar": false, + + "requireArrayDestructuring": false, + + "requireEnhancedObjectLiterals": false, + + "requireObjectDestructuring": false, + + "requireEarlyReturn": false, + + "requireCapitalizedConstructorsNew": { + "allExcept": ["Function", "String", "Object", "Symbol", "Number", "Date", "RegExp", "Error", "Boolean", "Array"] + }, + + "requireImportAlphabetized": false, + + "requireSpaceBeforeObjectValues": true, + "requireSpaceBeforeDestructuredValues": true, + + "disallowSpacesInsideTemplateStringPlaceholders": true, + + "disallowArrayDestructuringReturn": false, + + "requireNewlineBeforeSingleStatementsInIf": false, + + "disallowUnusedVariables": true, + + "requireSpacesInsideImportedObjectBraces": true, + + "requireUseStrict": true +} + diff --git a/node_modules/is-regex/.npmignore b/node_modules/is-regex/.npmignore new file mode 100644 index 0000000000000..a72b52ebe8977 --- /dev/null +++ b/node_modules/is-regex/.npmignore @@ -0,0 +1,15 @@ +lib-cov +*.seed +*.log +*.csv +*.dat +*.out +*.pid +*.gz + +pids +logs +results + +npm-debug.log +node_modules diff --git a/node_modules/is-regex/.travis.yml b/node_modules/is-regex/.travis.yml new file mode 100644 index 0000000000000..41137a89a5919 --- /dev/null +++ b/node_modules/is-regex/.travis.yml @@ -0,0 +1,165 @@ +language: node_js +os: + - linux +node_js: + - "7.5" + - "6.9" + - "5.12" + - "4.7" + - "iojs-v3.3" + - "iojs-v2.5" + - "iojs-v1.8" + - "0.12" + - "0.10" + - "0.8" +before_install: + - 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ]; then npm install -g npm@1.3 ; elif [ "${TRAVIS_NODE_VERSION}" != "0.9" ]; then case "$(npm --version)" in 1.*) npm install -g npm@1.4.28 ;; 2.*) npm install -g npm@2 ;; esac ; fi' + - 'if [ "${TRAVIS_NODE_VERSION}" != "0.6" ] && [ "${TRAVIS_NODE_VERSION}" != "0.9" ]; then npm install -g npm; fi' +script: + - 'if [ -n "${PRETEST-}" ]; then npm run pretest ; fi' + - 'if [ -n "${POSTTEST-}" ]; then npm run posttest ; fi' + - 'if [ -n "${COVERAGE-}" ]; then npm run coverage ; fi' + - 'if [ -n "${TEST-}" ]; then npm run tests-only ; fi' +sudo: false +env: + - TEST=true +matrix: + fast_finish: true + include: + - node_js: "node" + env: PRETEST=true + - node_js: "node" + env: POSTTEST=true + - node_js: "7.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7" + env: TEST=true + os: osx + - node_js: "6" + env: TEST=true + os: osx + - node_js: "5" + env: TEST=true + os: osx + - node_js: "4" + env: TEST=true + os: osx + - node_js: "iojs" + env: TEST=true + os: osx + - node_js: "0.12" + env: TEST=true + os: osx + - node_js: "0.10" + env: TEST=true + os: osx + - node_js: "0.8" + env: TEST=true + os: osx + allow_failures: + - os: osx + - env: TEST=true ALLOW_FAILURE=true diff --git a/node_modules/is-regex/CHANGELOG.md b/node_modules/is-regex/CHANGELOG.md new file mode 100644 index 0000000000000..6d73800022da3 --- /dev/null +++ b/node_modules/is-regex/CHANGELOG.md @@ -0,0 +1,27 @@ +1.0.4 / 2016-02-18 +================= + * [Fix] ensure that `lastIndex` is not mutated (#3) + * [Refactor] when try/catch is needed, bail early if the value lacks an own `lastIndex` data property + * [Refactor] use an early return instead of a ternary + * [Refactor] bail earlier when the value is falsy + * Switch from vb.teelaun.ch to versionbadg.es for the npm version badge SVG + * [Dev Deps] update `tape`, `jscs`, `editorconfig-tools`, `eslint`, `semver`, `replace`, `nsp`, `covert`, `@ljharb/eslint-config` + * [Tests] on all the node and io.js versions; improve test matri + * [Tests] Fix tests for faked @@toStringTag + +1.0.3 / 2015-01-29 +================= + * If @@toStringTag is not present, use the old-school Object#toString test. + +1.0.2 / 2015-01-29 +================= + * Improve optimization by separating the try/catch, and bailing out early when not typeof "object". + +1.0.1 / 2015-01-28 +================= + * Update `jscs`, `tape`, `covert` + * Use RegExp#exec to test if something is a regex, which works even with ES6 @@toStringTag. + +1.0.0 / 2014-05-19 +================= + * Initial release. diff --git a/node_modules/is-regex/LICENSE b/node_modules/is-regex/LICENSE new file mode 100644 index 0000000000000..47b7b5078fce3 --- /dev/null +++ b/node_modules/is-regex/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2014 Jordan Harband + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/is-regex/Makefile b/node_modules/is-regex/Makefile new file mode 100644 index 0000000000000..b9e4fe1aab3dd --- /dev/null +++ b/node_modules/is-regex/Makefile @@ -0,0 +1,61 @@ +# Since we rely on paths relative to the makefile location, abort if make isn't being run from there. +$(if $(findstring /,$(MAKEFILE_LIST)),$(error Please only invoke this makefile from the directory it resides in)) + + # The files that need updating when incrementing the version number. +VERSIONED_FILES := *.js *.json README* + + +# Add the local npm packages' bin folder to the PATH, so that `make` can find them, when invoked directly. +# Note that rather than using `$(npm bin)` the 'node_modules/.bin' path component is hard-coded, so that invocation works even from an environment +# where npm is (temporarily) unavailable due to having deactivated an nvm instance loaded into the calling shell in order to avoid interference with tests. +export PATH := $(shell printf '%s' "$$PWD/node_modules/.bin:$$PATH") +UTILS := semver +# Make sure that all required utilities can be located. +UTIL_CHECK := $(or $(shell PATH="$(PATH)" which $(UTILS) >/dev/null && echo 'ok'),$(error Did you forget to run `npm install` after cloning the repo? At least one of the required supporting utilities not found: $(UTILS))) + +# Default target (by virtue of being the first non '.'-prefixed in the file). +.PHONY: _no-target-specified +_no-target-specified: + $(error Please specify the target to make - `make list` shows targets. Alternatively, use `npm test` to run the default tests; `npm run` shows all tests) + +# Lists all targets defined in this makefile. +.PHONY: list +list: + @$(MAKE) -pRrn : -f $(MAKEFILE_LIST) 2>/dev/null | awk -v RS= -F: '/^# File/,/^# Finished Make data base/ {if ($$1 !~ "^[#.]") {print $$1}}' | command grep -v -e '^[^[:alnum:]]' -e '^$@$$command ' | sort + +# All-tests target: invokes the specified test suites for ALL shells defined in $(SHELLS). +.PHONY: test +test: + @npm test + +.PHONY: _ensure-tag +_ensure-tag: +ifndef TAG + $(error Please invoke with `make TAG= release`, where is either an increment specifier (patch, minor, major, prepatch, preminor, premajor, prerelease), or an explicit major.minor.patch version number) +endif + +CHANGELOG_ERROR = $(error No CHANGELOG specified) +.PHONY: _ensure-changelog +_ensure-changelog: + @ (git status -sb --porcelain | command grep -E '^( M|[MA] ) CHANGELOG.md' > /dev/null) || (echo no CHANGELOG.md specified && exit 2) + +# Ensures that the git workspace is clean. +.PHONY: _ensure-clean +_ensure-clean: + @[ -z "$$((git status --porcelain --untracked-files=no || echo err) | command grep -v 'CHANGELOG.md')" ] || { echo "Workspace is not clean; please commit changes first." >&2; exit 2; } + +# Makes a release; invoke with `make TAG= release`. +.PHONY: release +release: _ensure-tag _ensure-changelog _ensure-clean + @old_ver=`git describe --abbrev=0 --tags --match 'v[0-9]*.[0-9]*.[0-9]*'` || { echo "Failed to determine current version." >&2; exit 1; }; old_ver=$${old_ver#v}; \ + new_ver=`echo "$(TAG)" | sed 's/^v//'`; new_ver=$${new_ver:-patch}; \ + if printf "$$new_ver" | command grep -q '^[0-9]'; then \ + semver "$$new_ver" >/dev/null || { echo 'Invalid version number specified: $(TAG) - must be major.minor.patch' >&2; exit 2; }; \ + semver -r "> $$old_ver" "$$new_ver" >/dev/null || { echo 'Invalid version number specified: $(TAG) - must be HIGHER than current one.' >&2; exit 2; } \ + else \ + new_ver=`semver -i "$$new_ver" "$$old_ver"` || { echo 'Invalid version-increment specifier: $(TAG)' >&2; exit 2; } \ + fi; \ + printf "=== Bumping version **$$old_ver** to **$$new_ver** before committing and tagging:\n=== TYPE 'proceed' TO PROCEED, anything else to abort: " && read response && [ "$$response" = 'proceed' ] || { echo 'Aborted.' >&2; exit 2; }; \ + replace "$$old_ver" "$$new_ver" -- $(VERSIONED_FILES) && \ + git commit -m "v$$new_ver" $(VERSIONED_FILES) CHANGELOG.md && \ + git tag -a -m "v$$new_ver" "v$$new_ver" diff --git a/node_modules/is-regex/README.md b/node_modules/is-regex/README.md new file mode 100644 index 0000000000000..05baa0ebca339 --- /dev/null +++ b/node_modules/is-regex/README.md @@ -0,0 +1,54 @@ +#is-regex [![Version Badge][2]][1] + +[![Build Status][3]][4] +[![dependency status][5]][6] +[![dev dependency status][7]][8] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][11]][1] + +[![browser support][9]][10] + +Is this value a JS regex? +This module works cross-realm/iframe, and despite ES6 @@toStringTag. + +## Example + +```js +var isRegex = require('is-regex'); +var assert = require('assert'); + +assert.notOk(isRegex(undefined)); +assert.notOk(isRegex(null)); +assert.notOk(isRegex(false)); +assert.notOk(isRegex(true)); +assert.notOk(isRegex(42)); +assert.notOk(isRegex('foo')); +assert.notOk(isRegex(function () {})); +assert.notOk(isRegex([])); +assert.notOk(isRegex({})); + +assert.ok(isRegex(/a/g)); +assert.ok(isRegex(new RegExp('a', 'g'))); +``` + +## Tests +Simply clone the repo, `npm install`, and run `npm test` + +[1]: https://npmjs.org/package/is-regex +[2]: http://versionbadg.es/ljharb/is-regex.svg +[3]: https://travis-ci.org/ljharb/is-regex.svg +[4]: https://travis-ci.org/ljharb/is-regex +[5]: https://david-dm.org/ljharb/is-regex.svg +[6]: https://david-dm.org/ljharb/is-regex +[7]: https://david-dm.org/ljharb/is-regex/dev-status.svg +[8]: https://david-dm.org/ljharb/is-regex#info=devDependencies +[9]: https://ci.testling.com/ljharb/is-regex.png +[10]: https://ci.testling.com/ljharb/is-regex +[11]: https://nodei.co/npm/is-regex.png?downloads=true&stars=true +[license-image]: http://img.shields.io/npm/l/is-regex.svg +[license-url]: LICENSE +[downloads-image]: http://img.shields.io/npm/dm/is-regex.svg +[downloads-url]: http://npm-stat.com/charts.html?package=is-regex + diff --git a/node_modules/is-regex/index.js b/node_modules/is-regex/index.js new file mode 100644 index 0000000000000..be6513390f7d3 --- /dev/null +++ b/node_modules/is-regex/index.js @@ -0,0 +1,39 @@ +'use strict'; + +var has = require('has'); +var regexExec = RegExp.prototype.exec; +var gOPD = Object.getOwnPropertyDescriptor; + +var tryRegexExecCall = function tryRegexExec(value) { + try { + var lastIndex = value.lastIndex; + value.lastIndex = 0; + + regexExec.call(value); + return true; + } catch (e) { + return false; + } finally { + value.lastIndex = lastIndex; + } +}; +var toStr = Object.prototype.toString; +var regexClass = '[object RegExp]'; +var hasToStringTag = typeof Symbol === 'function' && typeof Symbol.toStringTag === 'symbol'; + +module.exports = function isRegex(value) { + if (!value || typeof value !== 'object') { + return false; + } + if (!hasToStringTag) { + return toStr.call(value) === regexClass; + } + + var descriptor = gOPD(value, 'lastIndex'); + var hasLastIndexDataProperty = descriptor && has(descriptor, 'value'); + if (!hasLastIndexDataProperty) { + return false; + } + + return tryRegexExecCall(value); +}; diff --git a/node_modules/is-regex/package.json b/node_modules/is-regex/package.json new file mode 100644 index 0000000000000..697123805ac3e --- /dev/null +++ b/node_modules/is-regex/package.json @@ -0,0 +1,77 @@ +{ + "name": "is-regex", + "version": "1.0.4", + "description": "Is this value a JS regex? Works cross-realm/iframe, and despite ES6 @@toStringTag", + "author": "Jordan Harband", + "license": "MIT", + "main": "index.js", + "scripts": { + "pretest": "npm run lint", + "test": "npm run tests-only", + "tests-only": "node --harmony --es-staging test.js", + "posttest": "npm run security", + "coverage": "covert test.js", + "coverage-quiet": "covert test.js --quiet", + "lint": "npm run jscs && npm run eslint", + "jscs": "jscs *.js", + "eslint": "eslint test.js *.js", + "eccheck": "editorconfig-tools check *.js **/*.js > /dev/null", + "security": "nsp check" + }, + "repository": { + "type": "git", + "url": "git://github.com/ljharb/is-regex.git" + }, + "bugs": { + "url": "https://github.com/ljharb/is-regex/issues" + }, + "homepage": "https://github.com/ljharb/is-regex", + "keywords": [ + "regex", + "regexp", + "is", + "regular expression", + "regular", + "expression" + ], + "dependencies": { + "has": "^1.0.1" + }, + "devDependencies": { + "tape": "^4.6.3", + "covert": "^1.1.0", + "jscs": "^3.0.7", + "editorconfig-tools": "^0.1.1", + "nsp": "^2.6.2", + "eslint": "^3.15.0", + "@ljharb/eslint-config": "^11.0.0", + "semver": "^5.3.0", + "replace": "^0.3.0" + }, + "testling": { + "files": "test.js", + "browsers": [ + "iexplore/6.0..latest", + "firefox/3.0..6.0", + "firefox/15.0..latest", + "firefox/nightly", + "chrome/4.0..10.0", + "chrome/20.0..latest", + "chrome/canary", + "opera/10.0..12.0", + "opera/15.0..latest", + "opera/next", + "safari/4.0..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2" + ] + }, + "engines": { + "node": ">= 0.4" + } + +,"_resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz" +,"_integrity": "sha1-VRdIm1RwkbCTDglWVM7SXul+lJE=" +,"_from": "is-regex@1.0.4" +} \ No newline at end of file diff --git a/node_modules/is-regex/test.js b/node_modules/is-regex/test.js new file mode 100644 index 0000000000000..8d390038dae33 --- /dev/null +++ b/node_modules/is-regex/test.js @@ -0,0 +1,58 @@ +'use strict'; + +var test = require('tape'); +var isRegex = require('./'); +var hasToStringTag = typeof Symbol === 'function' && typeof Symbol.toStringTag === 'symbol'; + +test('not regexes', function (t) { + t.notOk(isRegex(), 'undefined is not regex'); + t.notOk(isRegex(null), 'null is not regex'); + t.notOk(isRegex(false), 'false is not regex'); + t.notOk(isRegex(true), 'true is not regex'); + t.notOk(isRegex(42), 'number is not regex'); + t.notOk(isRegex('foo'), 'string is not regex'); + t.notOk(isRegex([]), 'array is not regex'); + t.notOk(isRegex({}), 'object is not regex'); + t.notOk(isRegex(function () {}), 'function is not regex'); + t.end(); +}); + +test('@@toStringTag', { skip: !hasToStringTag }, function (t) { + var regex = /a/g; + var fakeRegex = { + toString: function () { return String(regex); }, + valueOf: function () { return regex; } + }; + fakeRegex[Symbol.toStringTag] = 'RegExp'; + t.notOk(isRegex(fakeRegex), 'fake RegExp with @@toStringTag "RegExp" is not regex'); + t.end(); +}); + +test('regexes', function (t) { + t.ok(isRegex(/a/g), 'regex literal is regex'); + t.ok(isRegex(new RegExp('a', 'g')), 'regex object is regex'); + t.end(); +}); + +test('does not mutate regexes', function (t) { + t.test('lastIndex is a marker object', function (st) { + var regex = /a/; + var marker = {}; + regex.lastIndex = marker; + st.equal(regex.lastIndex, marker, 'lastIndex is the marker object'); + st.ok(isRegex(regex), 'is regex'); + st.equal(regex.lastIndex, marker, 'lastIndex is the marker object after isRegex'); + st.end(); + }); + + t.test('lastIndex is nonzero', function (st) { + var regex = /a/; + regex.lastIndex = 3; + st.equal(regex.lastIndex, 3, 'lastIndex is 3'); + st.ok(isRegex(regex), 'is regex'); + st.equal(regex.lastIndex, 3, 'lastIndex is 3 after isRegex'); + st.end(); + }); + + t.end(); +}); diff --git a/node_modules/is-symbol/.editorconfig b/node_modules/is-symbol/.editorconfig new file mode 100644 index 0000000000000..eaa214161f5cd --- /dev/null +++ b/node_modules/is-symbol/.editorconfig @@ -0,0 +1,13 @@ +root = true + +[*] +indent_style = tab; +insert_final_newline = true; +quote_type = auto; +space_after_anonymous_functions = true; +space_after_control_statements = true; +spaces_around_operators = true; +trim_trailing_whitespace = true; +spaces_in_brackets = false; +end_of_line = lf; + diff --git a/node_modules/is-symbol/.jscs.json b/node_modules/is-symbol/.jscs.json new file mode 100644 index 0000000000000..b4d9b8b40aebf --- /dev/null +++ b/node_modules/is-symbol/.jscs.json @@ -0,0 +1,176 @@ +{ + "es3": true, + + "additionalRules": [], + + "requireSemicolons": true, + + "disallowMultipleSpaces": true, + + "disallowIdentifierNames": [], + + "requireCurlyBraces": { + "allExcept": [], + "keywords": ["if", "else", "for", "while", "do", "try", "catch"] + }, + + "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"], + + "disallowSpaceAfterKeywords": [], + + "disallowSpaceBeforeComma": true, + "disallowSpaceAfterComma": false, + "disallowSpaceBeforeSemicolon": true, + + "disallowNodeTypes": [ + "DebuggerStatement", + "ForInStatement", + "LabeledStatement", + "SwitchCase", + "SwitchStatement", + "WithStatement" + ], + + "requireObjectKeysOnNewLine": { "allExcept": ["sameLine"] }, + + "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true }, + "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true }, + "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true }, + + "requireSpaceBetweenArguments": true, + + "disallowSpacesInsideParentheses": true, + + "disallowSpacesInsideArrayBrackets": true, + + "disallowQuotedKeysInObjects": { "allExcept": ["reserved"] }, + + "disallowSpaceAfterObjectKeys": true, + + "requireCommaBeforeLineBreak": true, + + "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"], + "requireSpaceAfterPrefixUnaryOperators": [], + + "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"], + "requireSpaceBeforePostfixUnaryOperators": [], + + "disallowSpaceBeforeBinaryOperators": [], + "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + + "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + "disallowSpaceAfterBinaryOperators": [], + + "disallowImplicitTypeConversion": ["binary", "string"], + + "disallowKeywords": ["with", "eval"], + + "requireKeywordsOnNewLine": [], + "disallowKeywordsOnNewLine": ["else"], + + "requireLineFeedAtFileEnd": true, + + "disallowTrailingWhitespace": true, + + "disallowTrailingComma": true, + + "excludeFiles": ["node_modules/**", "vendor/**"], + + "disallowMultipleLineStrings": true, + + "requireDotNotation": { "allExcept": ["keywords"] }, + + "requireParenthesesAroundIIFE": true, + + "validateLineBreaks": "LF", + + "validateQuoteMarks": { + "escape": true, + "mark": "'" + }, + + "disallowOperatorBeforeLineBreak": [], + + "requireSpaceBeforeKeywords": [ + "do", + "for", + "if", + "else", + "switch", + "case", + "try", + "catch", + "finally", + "while", + "with", + "return" + ], + + "validateAlignedFunctionParameters": { + "lineBreakAfterOpeningBraces": true, + "lineBreakBeforeClosingBraces": true + }, + + "requirePaddingNewLinesBeforeExport": true, + + "validateNewlineAfterArrayElements": { + "maximum": 1 + }, + + "requirePaddingNewLinesAfterUseStrict": true, + + "disallowArrowFunctions": true, + + "disallowMultiLineTernary": true, + + "validateOrderInObjectKeys": "asc-insensitive", + + "disallowIdenticalDestructuringNames": true, + + "disallowNestedTernaries": { "maxLevel": 1 }, + + "requireSpaceAfterComma": { "allExcept": ["trailing"] }, + "requireAlignedMultilineParams": false, + + "requireSpacesInGenerator": { + "afterStar": true + }, + + "disallowSpacesInGenerator": { + "beforeStar": true + }, + + "disallowVar": false, + + "requireArrayDestructuring": false, + + "requireEnhancedObjectLiterals": false, + + "requireObjectDestructuring": false, + + "requireEarlyReturn": false, + + "requireCapitalizedConstructorsNew": { + "allExcept": ["Function", "String", "Object", "Symbol", "Number", "Date", "RegExp", "Error", "Boolean", "Array"] + }, + + "requireImportAlphabetized": false, + + "requireSpaceBeforeObjectValues": true, + "requireSpaceBeforeDestructuredValues": true, + + "disallowSpacesInsideTemplateStringPlaceholders": true, + + "disallowArrayDestructuringReturn": false, + + "requireNewlineBeforeSingleStatementsInIf": false, + + "disallowUnusedVariables": true, + + "requireSpacesInsideImportedObjectBraces": true, + + "requireUseStrict": true +} + diff --git a/node_modules/is-symbol/.nvmrc b/node_modules/is-symbol/.nvmrc new file mode 100644 index 0000000000000..64f5a0a6813a4 --- /dev/null +++ b/node_modules/is-symbol/.nvmrc @@ -0,0 +1 @@ +node diff --git a/node_modules/is-symbol/.travis.yml b/node_modules/is-symbol/.travis.yml new file mode 100644 index 0000000000000..c671d5ea89c49 --- /dev/null +++ b/node_modules/is-symbol/.travis.yml @@ -0,0 +1,241 @@ +language: node_js +os: + - linux +node_js: + - "10.11" + - "9.11" + - "8.12" + - "7.10" + - "6.14" + - "5.12" + - "4.9" + - "iojs-v3.3" + - "iojs-v2.5" + - "iojs-v1.8" + - "0.12" + - "0.10" + - "0.8" +before_install: + - 'case "${TRAVIS_NODE_VERSION}" in 0.*) export NPM_CONFIG_STRICT_SSL=false ;; esac' + - 'nvm install-latest-npm' +install: + - 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ] || [ "${TRAVIS_NODE_VERSION}" = "0.9" ]; then nvm install --latest-npm 0.8 && npm install && nvm use "${TRAVIS_NODE_VERSION}"; else npm install; fi;' +script: + - 'if [ -n "${PRETEST-}" ]; then npm run pretest ; fi' + - 'if [ -n "${POSTTEST-}" ]; then npm run posttest ; fi' + - 'if [ -n "${COVERAGE-}" ]; then npm run coverage ; fi' + - 'if [ -n "${TEST-}" ]; then npm run tests-only ; fi' +sudo: false +env: + - TEST=true +matrix: + fast_finish: true + include: + - node_js: "lts/*" + env: PRETEST=true + - node_js: "lts/*" + env: POSTTEST=true + - node_js: "4" + env: COVERAGE=true + - node_js: "10.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.13" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.12" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.4" + env: TEST=true ALLOW_FAILURE=true + allow_failures: + - os: osx + - env: TEST=true ALLOW_FAILURE=true + - env: COVERAGE=true diff --git a/node_modules/is-symbol/CHANGELOG.md b/node_modules/is-symbol/CHANGELOG.md new file mode 100644 index 0000000000000..a7b8baf8db733 --- /dev/null +++ b/node_modules/is-symbol/CHANGELOG.md @@ -0,0 +1,12 @@ +1.0.2 / 2018-09-20 +================= + * [Refactor] use `has-symbols` and `object-inspect` + * [Tests] test on all the node minor versions + +1.0.1 / 2015-01-26 +================= + * Corrected description + +1.0.0 / 2015-01-24 +================= + * Initial release diff --git a/node_modules/is-symbol/LICENSE b/node_modules/is-symbol/LICENSE new file mode 100644 index 0000000000000..b43df444e5182 --- /dev/null +++ b/node_modules/is-symbol/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Jordan Harband + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/is-symbol/Makefile b/node_modules/is-symbol/Makefile new file mode 100644 index 0000000000000..b9e4fe1aab3dd --- /dev/null +++ b/node_modules/is-symbol/Makefile @@ -0,0 +1,61 @@ +# Since we rely on paths relative to the makefile location, abort if make isn't being run from there. +$(if $(findstring /,$(MAKEFILE_LIST)),$(error Please only invoke this makefile from the directory it resides in)) + + # The files that need updating when incrementing the version number. +VERSIONED_FILES := *.js *.json README* + + +# Add the local npm packages' bin folder to the PATH, so that `make` can find them, when invoked directly. +# Note that rather than using `$(npm bin)` the 'node_modules/.bin' path component is hard-coded, so that invocation works even from an environment +# where npm is (temporarily) unavailable due to having deactivated an nvm instance loaded into the calling shell in order to avoid interference with tests. +export PATH := $(shell printf '%s' "$$PWD/node_modules/.bin:$$PATH") +UTILS := semver +# Make sure that all required utilities can be located. +UTIL_CHECK := $(or $(shell PATH="$(PATH)" which $(UTILS) >/dev/null && echo 'ok'),$(error Did you forget to run `npm install` after cloning the repo? At least one of the required supporting utilities not found: $(UTILS))) + +# Default target (by virtue of being the first non '.'-prefixed in the file). +.PHONY: _no-target-specified +_no-target-specified: + $(error Please specify the target to make - `make list` shows targets. Alternatively, use `npm test` to run the default tests; `npm run` shows all tests) + +# Lists all targets defined in this makefile. +.PHONY: list +list: + @$(MAKE) -pRrn : -f $(MAKEFILE_LIST) 2>/dev/null | awk -v RS= -F: '/^# File/,/^# Finished Make data base/ {if ($$1 !~ "^[#.]") {print $$1}}' | command grep -v -e '^[^[:alnum:]]' -e '^$@$$command ' | sort + +# All-tests target: invokes the specified test suites for ALL shells defined in $(SHELLS). +.PHONY: test +test: + @npm test + +.PHONY: _ensure-tag +_ensure-tag: +ifndef TAG + $(error Please invoke with `make TAG= release`, where is either an increment specifier (patch, minor, major, prepatch, preminor, premajor, prerelease), or an explicit major.minor.patch version number) +endif + +CHANGELOG_ERROR = $(error No CHANGELOG specified) +.PHONY: _ensure-changelog +_ensure-changelog: + @ (git status -sb --porcelain | command grep -E '^( M|[MA] ) CHANGELOG.md' > /dev/null) || (echo no CHANGELOG.md specified && exit 2) + +# Ensures that the git workspace is clean. +.PHONY: _ensure-clean +_ensure-clean: + @[ -z "$$((git status --porcelain --untracked-files=no || echo err) | command grep -v 'CHANGELOG.md')" ] || { echo "Workspace is not clean; please commit changes first." >&2; exit 2; } + +# Makes a release; invoke with `make TAG= release`. +.PHONY: release +release: _ensure-tag _ensure-changelog _ensure-clean + @old_ver=`git describe --abbrev=0 --tags --match 'v[0-9]*.[0-9]*.[0-9]*'` || { echo "Failed to determine current version." >&2; exit 1; }; old_ver=$${old_ver#v}; \ + new_ver=`echo "$(TAG)" | sed 's/^v//'`; new_ver=$${new_ver:-patch}; \ + if printf "$$new_ver" | command grep -q '^[0-9]'; then \ + semver "$$new_ver" >/dev/null || { echo 'Invalid version number specified: $(TAG) - must be major.minor.patch' >&2; exit 2; }; \ + semver -r "> $$old_ver" "$$new_ver" >/dev/null || { echo 'Invalid version number specified: $(TAG) - must be HIGHER than current one.' >&2; exit 2; } \ + else \ + new_ver=`semver -i "$$new_ver" "$$old_ver"` || { echo 'Invalid version-increment specifier: $(TAG)' >&2; exit 2; } \ + fi; \ + printf "=== Bumping version **$$old_ver** to **$$new_ver** before committing and tagging:\n=== TYPE 'proceed' TO PROCEED, anything else to abort: " && read response && [ "$$response" = 'proceed' ] || { echo 'Aborted.' >&2; exit 2; }; \ + replace "$$old_ver" "$$new_ver" -- $(VERSIONED_FILES) && \ + git commit -m "v$$new_ver" $(VERSIONED_FILES) CHANGELOG.md && \ + git tag -a -m "v$$new_ver" "v$$new_ver" diff --git a/node_modules/is-symbol/README.md b/node_modules/is-symbol/README.md new file mode 100644 index 0000000000000..8544c8c0937c1 --- /dev/null +++ b/node_modules/is-symbol/README.md @@ -0,0 +1,46 @@ +#is-symbol [![Version Badge][2]][1] + +[![Build Status][3]][4] +[![dependency status][5]][6] +[![dev dependency status][7]][8] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][11]][1] + +[![browser support][9]][10] + +Is this an ES6 Symbol value? + +## Example + +```js +var isSymbol = require('is-symbol'); +assert(!isSymbol(function () {})); +assert(!isSymbol(null)); +assert(!isSymbol(function* () { yield 42; return Infinity; }); + +assert(isSymbol(Symbol.iterator)); +assert(isSymbol(Symbol('foo'))); +assert(isSymbol(Symbol.for('foo'))); +assert(isSymbol(Object(Symbol('foo')))); +``` + +## Tests +Simply clone the repo, `npm install`, and run `npm test` + +[1]: https://npmjs.org/package/is-symbol +[2]: http://versionbadg.es/ljharb/is-symbol.svg +[3]: https://travis-ci.org/ljharb/is-symbol.svg +[4]: https://travis-ci.org/ljharb/is-symbol +[5]: https://david-dm.org/ljharb/is-symbol.svg +[6]: https://david-dm.org/ljharb/is-symbol +[7]: https://david-dm.org/ljharb/is-symbol/dev-status.svg +[8]: https://david-dm.org/ljharb/is-symbol#info=devDependencies +[9]: https://ci.testling.com/ljharb/is-symbol.png +[10]: https://ci.testling.com/ljharb/is-symbol +[11]: https://nodei.co/npm/is-symbol.png?downloads=true&stars=true +[license-image]: http://img.shields.io/npm/l/is-symbol.svg +[license-url]: LICENSE +[downloads-image]: http://img.shields.io/npm/dm/is-symbol.svg +[downloads-url]: http://npm-stat.com/charts.html?package=is-symbol diff --git a/node_modules/is-symbol/index.js b/node_modules/is-symbol/index.js new file mode 100644 index 0000000000000..3d653e27f5fc6 --- /dev/null +++ b/node_modules/is-symbol/index.js @@ -0,0 +1,35 @@ +'use strict'; + +var toStr = Object.prototype.toString; +var hasSymbols = require('has-symbols')(); + +if (hasSymbols) { + var symToStr = Symbol.prototype.toString; + var symStringRegex = /^Symbol\(.*\)$/; + var isSymbolObject = function isRealSymbolObject(value) { + if (typeof value.valueOf() !== 'symbol') { + return false; + } + return symStringRegex.test(symToStr.call(value)); + }; + + module.exports = function isSymbol(value) { + if (typeof value === 'symbol') { + return true; + } + if (toStr.call(value) !== '[object Symbol]') { + return false; + } + try { + return isSymbolObject(value); + } catch (e) { + return false; + } + }; +} else { + + module.exports = function isSymbol(value) { + // this environment does not support Symbols. + return false && value; + }; +} diff --git a/node_modules/is-symbol/package.json b/node_modules/is-symbol/package.json new file mode 100644 index 0000000000000..5e124e2194a73 --- /dev/null +++ b/node_modules/is-symbol/package.json @@ -0,0 +1,72 @@ +{ + "name": "is-symbol", + "version": "1.0.2", + "description": "Determine if a value is an ES6 Symbol or not.", + "main": "index.js", + "scripts": { + "prepublish": "safe-publish-latest", + "pretest": "npm run lint", + "tests-only": "node --es-staging --harmony test", + "test": "npm run tests-only", + "posttest": "npm run security", + "coverage": "covert test", + "lint": "npm run jscs && npm run eslint", + "jscs": "jscs *.js */*.js", + "eslint": "eslint *.js */*.js", + "security": "nsp check" + }, + "repository": { + "type": "git", + "url": "git://github.com/ljharb/is-symbol.git" + }, + "keywords": [ + "symbol", + "es6", + "is", + "Symbol" + ], + "author": "Jordan Harband", + "license": "MIT", + "bugs": { + "url": "https://github.com/ljharb/is-symbol/issues" + }, + "dependencies": { + "has-symbols": "^1.0.0" + }, + "devDependencies": { + "@ljharb/eslint-config": "^12.2.1", + "covert": "^1.1.0", + "eslint": "^4.19.1", + "jscs": "^3.0.7", + "nsp": "^3.2.1", + "object-inspect": "^1.6.0", + "safe-publish-latest": "^1.1.2", + "semver": "^5.5.0", + "tape": "^4.9.0" + }, + "testling": { + "files": "test/index.js", + "browsers": [ + "iexplore/6.0..latest", + "firefox/3.0..6.0", + "firefox/15.0..latest", + "firefox/nightly", + "chrome/4.0..10.0", + "chrome/20.0..latest", + "chrome/canary", + "opera/10.0..latest", + "opera/next", + "safari/4.0..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2" + ] + }, + "engines": { + "node": ">= 0.4" + } + +,"_resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.2.tgz" +,"_integrity": "sha512-HS8bZ9ox60yCJLH9snBpIwv9pYUAkcuLhSA1oero1UB5y9aiQpRA8y2ex945AOtCZL1lJDeIk3G5LthswI46Lw==" +,"_from": "is-symbol@1.0.2" +} \ No newline at end of file diff --git a/node_modules/is-symbol/test/index.js b/node_modules/is-symbol/test/index.js new file mode 100644 index 0000000000000..e01f035c8ca3a --- /dev/null +++ b/node_modules/is-symbol/test/index.js @@ -0,0 +1,92 @@ +'use strict'; + +var test = require('tape'); +var isSymbol = require('../index'); + +var forEach = function (arr, func) { + var i; + for (i = 0; i < arr.length; ++i) { + func(arr[i], i, arr); + } +}; + +var hasSymbols = require('has-symbols')(); +var inspect = require('object-inspect'); +var debug = function (v, m) { return inspect(v) + ' ' + m; }; + +test('non-symbol values', function (t) { + var nonSymbols = [ + true, + false, + Object(true), + Object(false), + null, + undefined, + {}, + [], + /a/g, + 'string', + 42, + new Date(), + function () {}, + NaN + ]; + t.plan(nonSymbols.length); + forEach(nonSymbols, function (nonSymbol) { + t.equal(false, isSymbol(nonSymbol), debug(nonSymbol, 'is not a symbol')); + }); + t.end(); +}); + +test('faked symbol values', function (t) { + t.test('real symbol valueOf', { skip: !hasSymbols }, function (st) { + var fakeSymbol = { valueOf: function () { return Symbol('foo'); } }; + st.equal(false, isSymbol(fakeSymbol), 'object with valueOf returning a symbol is not a symbol'); + st.end(); + }); + + t.test('faked @@toStringTag', { skip: !hasSymbols || !Symbol.toStringTag }, function (st) { + var fakeSymbol = { valueOf: function () { return Symbol('foo'); } }; + fakeSymbol[Symbol.toStringTag] = 'Symbol'; + st.equal(false, isSymbol(fakeSymbol), 'object with fake Symbol @@toStringTag and valueOf returning a symbol is not a symbol'); + var notSoFakeSymbol = { valueOf: function () { return 42; } }; + notSoFakeSymbol[Symbol.toStringTag] = 'Symbol'; + st.equal(false, isSymbol(notSoFakeSymbol), 'object with fake Symbol @@toStringTag and valueOf not returning a symbol is not a symbol'); + st.end(); + }); + + var fakeSymbolString = { toString: function () { return 'Symbol(foo)'; } }; + t.equal(false, isSymbol(fakeSymbolString), 'object with toString returning Symbol(foo) is not a symbol'); + + t.end(); +}); + +test('Symbol support', { skip: !hasSymbols }, function (t) { + t.test('well-known Symbols', function (st) { + var isWellKnown = function filterer(name) { + return name !== 'for' && name !== 'keyFor' && !(name in filterer); + }; + var wellKnownSymbols = Object.getOwnPropertyNames(Symbol).filter(isWellKnown); + wellKnownSymbols.forEach(function (name) { + var sym = Symbol[name]; + st.equal(true, isSymbol(sym), debug(sym, ' is a symbol')); + }); + st.end(); + }); + + t.test('user-created symbols', function (st) { + var symbols = [ + Symbol(), + Symbol('foo'), + Symbol['for']('foo'), + Object(Symbol('object')) + ]; + symbols.forEach(function (sym) { + st.equal(true, isSymbol(sym), debug(sym, ' is a symbol')); + }); + st.end(); + }); + + t.end(); +}); + diff --git a/node_modules/lcid/index.js b/node_modules/lcid/index.js index 69bd3d231e059..b666ef00a4504 100644 --- a/node_modules/lcid/index.js +++ b/node_modules/lcid/index.js @@ -1,9 +1,10 @@ 'use strict'; -var invertKv = require('invert-kv'); -var all = require('./lcid.json'); -var inverted = invertKv(all); +const invertKv = require('invert-kv'); +const all = require('./lcid.json'); -exports.from = function (lcidCode) { +const inverted = invertKv(all); + +exports.from = lcidCode => { if (typeof lcidCode !== 'number') { throw new TypeError('Expected a number'); } @@ -11,7 +12,7 @@ exports.from = function (lcidCode) { return inverted[lcidCode]; }; -exports.to = function (localeId) { +exports.to = localeId => { if (typeof localeId !== 'string') { throw new TypeError('Expected a string'); } diff --git a/node_modules/lcid/license b/node_modules/lcid/license index 654d0bfe94343..e7af2f77107d7 100644 --- a/node_modules/lcid/license +++ b/node_modules/lcid/license @@ -1,21 +1,9 @@ -The MIT License (MIT) +MIT License Copyright (c) Sindre Sorhus (sindresorhus.com) -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/lcid/package.json b/node_modules/lcid/package.json index 6ab09a0881309..3de023722441e 100644 --- a/node_modules/lcid/package.json +++ b/node_modules/lcid/package.json @@ -1,28 +1,27 @@ { - "_from": "lcid@^1.0.0", - "_id": "lcid@1.0.0", + "_from": "lcid@^2.0.0", + "_id": "lcid@2.0.0", "_inBundle": false, - "_integrity": "sha1-MIrMr6C8SDo4Z7S28rlQYlHRuDU=", + "_integrity": "sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA==", "_location": "/lcid", "_phantomChildren": {}, "_requested": { "type": "range", "registry": true, - "raw": "lcid@^1.0.0", + "raw": "lcid@^2.0.0", "name": "lcid", "escapedName": "lcid", - "rawSpec": "^1.0.0", + "rawSpec": "^2.0.0", "saveSpec": null, - "fetchSpec": "^1.0.0" + "fetchSpec": "^2.0.0" }, "_requiredBy": [ - "/os-locale", - "/tacks/os-locale" + "/os-locale" ], - "_resolved": "https://registry.npmjs.org/lcid/-/lcid-1.0.0.tgz", - "_shasum": "308accafa0bc483a3867b4b6f2b9506251d1b835", - "_spec": "lcid@^1.0.0", - "_where": "/Users/rebecca/code/npm/node_modules/os-locale", + "_resolved": "https://registry.npmjs.org/lcid/-/lcid-2.0.0.tgz", + "_shasum": "6ef5d2df60e52f82eb228a4c373e8d1f397253cf", + "_spec": "lcid@^2.0.0", + "_where": "/Users/mperrotte/npminc/cli/node_modules/os-locale", "author": { "name": "Sindre Sorhus", "email": "sindresorhus@gmail.com", @@ -33,15 +32,16 @@ }, "bundleDependencies": false, "dependencies": { - "invert-kv": "^1.0.0" + "invert-kv": "^2.0.0" }, "deprecated": false, "description": "Mapping between standard locale identifiers and Windows locale identifiers (LCID)", "devDependencies": { - "ava": "0.0.4" + "ava": "*", + "xo": "*" }, "engines": { - "node": ">=0.10.0" + "node": ">=6" }, "files": [ "index.js", @@ -73,7 +73,7 @@ "url": "git+https://github.com/sindresorhus/lcid.git" }, "scripts": { - "test": "node test.js" + "test": "xo && ava" }, - "version": "1.0.0" + "version": "2.0.0" } diff --git a/node_modules/lcid/readme.md b/node_modules/lcid/readme.md index bee4a70166978..2b1aa798b6930 100644 --- a/node_modules/lcid/readme.md +++ b/node_modules/lcid/readme.md @@ -1,8 +1,8 @@ # lcid [![Build Status](https://travis-ci.org/sindresorhus/lcid.svg?branch=master)](https://travis-ci.org/sindresorhus/lcid) -> Mapping between [standard locale identifiers](http://en.wikipedia.org/wiki/Locale) and [Windows locale identifiers (LCID)](http://en.wikipedia.org/wiki/Locale#Specifics_for_Microsoft_platforms) +> Mapping between [standard locale identifiers](https://en.wikipedia.org/wiki/Locale_(computer_software)) and [Windows locale identifiers (LCID)](http://en.wikipedia.org/wiki/Locale#Specifics_for_Microsoft_platforms) -Based on the [mapping](https://github.com/python/cpython/blob/be2a1a76fa43bb1ea1b3577bb5bdd506a2e90e37/Lib/locale.py#L1395-L1604) used in the Python standard library. +Based on the [mapping](https://github.com/python/cpython/blob/8f7bb100d0fa7fb2714f3953b5b627878277c7c6/Lib/locale.py#L1465-L1674) used in the Python standard library. The mapping itself is just a [JSON file](lcid.json) and can be used wherever. @@ -10,14 +10,14 @@ The mapping itself is just a [JSON file](lcid.json) and can be used wherever. ## Install ``` -$ npm install --save lcid +$ npm install lcid ``` ## Usage ```js -var lcid = require('lcid'); +const lcid = require('lcid'); lcid.from(1044); //=> 'nb_NO' @@ -32,4 +32,4 @@ lcid.all; ## License -MIT © [Sindre Sorhus](http://sindresorhus.com) +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/libcipm/CHANGELOG.md b/node_modules/libcipm/CHANGELOG.md index a1949b8565916..80411f300698c 100644 --- a/node_modules/libcipm/CHANGELOG.md +++ b/node_modules/libcipm/CHANGELOG.md @@ -2,33 +2,139 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [4.0.7](https://github.com/npm/libcipm/compare/v4.0.4...v4.0.7) (2019-10-09) + + +### Bug Fixes + +* delete node_modules contents but keep the dir itself ([f668181](https://github.com/npm/libcipm/commit/f668181)), closes [#3](https://github.com/npm/libcipm/issues/3) + + +## [4.0.4](https://github.com/npm/libcipm/compare/v4.0.3...v4.0.4) (2019-09-24) + + +### Bug Fixes + +* pack git directories properly ([576ab36](https://github.com/npm/libcipm/commit/576ab36)), closes [#4](https://github.com/npm/libcipm/issues/4) + + + + +## [4.0.3](https://github.com/npm/libcipm/compare/v4.0.2...v4.0.3) (2019-08-12) + + +### Bug Fixes + +* do not pass opts.log to lifecycle ([46b2101](https://github.com/npm/libcipm/commit/46b2101)) + + + + +## [4.0.2](https://github.com/npm/libcipm/compare/v4.0.1...v4.0.2) (2019-08-12) + + + + +## [4.0.1](https://github.com/npm/libcipm/compare/v4.0.0...v4.0.1) (2019-08-12) + + +### Bug Fixes + +* respect and retain all configs passed in ([20b7372](https://github.com/npm/libcipm/commit/20b7372)) + + + + +# [4.0.0](https://github.com/npm/libcipm/compare/v3.0.3...v4.0.0) (2019-07-10) + + +* npm-lifecycle@3.0.0 ([84b8d7e](https://github.com/npm/libcipm/commit/84b8d7e)) + + +### Bug Fixes + +* **lifecycle:** remove warning from bluebird ([#59](https://github.com/npm/libcipm/issues/59)) ([7af39e6](https://github.com/npm/libcipm/commit/7af39e6)), closes [#58](https://github.com/npm/libcipm/issues/58) + + +### BREAKING CHANGES + +* requires updating node-gyp in npm/cli + + + + +## [3.0.3](https://github.com/npm/libcipm/compare/v3.0.2...v3.0.3) (2019-01-22) + + +### Bug Fixes + +* **scripts:** pass in opts.dir directly ([018df27](https://github.com/npm/libcipm/commit/018df27)) + + + + +## [3.0.2](https://github.com/npm/libcipm/compare/v3.0.1...v3.0.2) (2018-08-31) + + +### Bug Fixes + +* **worker:** missed a spot ([4371558](https://github.com/npm/libcipm/commit/4371558)) + + + + +## [3.0.1](https://github.com/npm/libcipm/compare/v3.0.0...v3.0.1) (2018-08-31) + + +### Bug Fixes + +* **workers:** disable workers for now ([64db490](https://github.com/npm/libcipm/commit/64db490)) + + + + +# [3.0.0](https://github.com/npm/libcipm/compare/v2.0.2...v3.0.0) (2018-08-31) + + +### Features + +* **config:** switch to modern, figgy-pudding configuration ([#57](https://github.com/npm/libcipm/issues/57)) ([161f6b2](https://github.com/npm/libcipm/commit/161f6b2)) + + +### BREAKING CHANGES + +* **config:** this updates cipm to use pacote@9, which consumes npm-style config objects, not pacoteOpts()-style objects. + + + -## [2.0.2](https://github.com/zkat/cipm/compare/v2.0.1...v2.0.2) (2018-08-10) +## [2.0.2](https://github.com/npm/libcipm/compare/v2.0.1...v2.0.2) (2018-08-10) ### Bug Fixes -* **child:** only override dirPacker if opts.dirPacker is defined ([#55](https://github.com/zkat/cipm/issues/55)) ([13ab2f0](https://github.com/zkat/cipm/commit/13ab2f0)) +* **child:** only override dirPacker if opts.dirPacker is defined ([#55](https://github.com/npm/libcipm/issues/55)) ([13ab2f0](https://github.com/npm/libcipm/commit/13ab2f0)) -## [2.0.1](https://github.com/zkat/cipm/compare/v2.0.0...v2.0.1) (2018-07-27) +## [2.0.1](https://github.com/npm/libcipm/compare/v2.0.0...v2.0.1) (2018-07-27) ### Bug Fixes -* **deps:** move mkdirp to prod deps ([6878f39](https://github.com/zkat/cipm/commit/6878f39)) +* **deps:** move mkdirp to prod deps ([6878f39](https://github.com/npm/libcipm/commit/6878f39)) -# [2.0.0](https://github.com/zkat/cipm/compare/v1.6.3...v2.0.0) (2018-05-24) +# [2.0.0](https://github.com/npm/libcipm/compare/v1.6.3...v2.0.0) (2018-05-24) ### meta -* update node version support ([694b4d3](https://github.com/zkat/cipm/commit/694b4d3)) +* update node version support ([694b4d3](https://github.com/npm/libcipm/commit/694b4d3)) ### BREAKING CHANGES @@ -38,173 +144,173 @@ All notable changes to this project will be documented in this file. See [standa -## [1.6.3](https://github.com/zkat/cipm/compare/v1.6.2...v1.6.3) (2018-05-24) +## [1.6.3](https://github.com/npm/libcipm/compare/v1.6.2...v1.6.3) (2018-05-24) -## [1.6.2](https://github.com/zkat/cipm/compare/v1.6.1...v1.6.2) (2018-04-08) +## [1.6.2](https://github.com/npm/libcipm/compare/v1.6.1...v1.6.2) (2018-04-08) ### Bug Fixes -* **lifecycle:** detect binding.gyp for default install lifecycle ([#46](https://github.com/zkat/cipm/issues/46)) ([9149631](https://github.com/zkat/cipm/commit/9149631)), closes [#45](https://github.com/zkat/cipm/issues/45) +* **lifecycle:** detect binding.gyp for default install lifecycle ([#46](https://github.com/npm/libcipm/issues/46)) ([9149631](https://github.com/npm/libcipm/commit/9149631)), closes [#45](https://github.com/npm/libcipm/issues/45) -## [1.6.1](https://github.com/zkat/cipm/compare/v1.6.0...v1.6.1) (2018-03-13) +## [1.6.1](https://github.com/npm/libcipm/compare/v1.6.0...v1.6.1) (2018-03-13) ### Bug Fixes -* **bin:** Set non-zero exit code on error ([#41](https://github.com/zkat/cipm/issues/41)) ([54d0106](https://github.com/zkat/cipm/commit/54d0106)) -* **lifecycle:** defer to lifecycle’s internal logic as to whether or not to execute a run-script ([#42](https://github.com/zkat/cipm/issues/42)) ([7f27a52](https://github.com/zkat/cipm/commit/7f27a52)), closes [npm/npm#19258](https://github.com/npm/npm/issues/19258) -* **prefix:** don't reference prefix before computing it ([#40](https://github.com/zkat/cipm/issues/40)) ([08ed1cc](https://github.com/zkat/cipm/commit/08ed1cc)) -* **prefix:** Resolve to promise when passing --prefix to npm ci ([#43](https://github.com/zkat/cipm/issues/43)) ([401d466](https://github.com/zkat/cipm/commit/401d466)) +* **bin:** Set non-zero exit code on error ([#41](https://github.com/npm/libcipm/issues/41)) ([54d0106](https://github.com/npm/libcipm/commit/54d0106)) +* **lifecycle:** defer to lifecycle’s internal logic as to whether or not to execute a run-script ([#42](https://github.com/npm/libcipm/issues/42)) ([7f27a52](https://github.com/npm/libcipm/commit/7f27a52)), closes [npm/npm#19258](https://github.com/npm/npm/issues/19258) +* **prefix:** don't reference prefix before computing it ([#40](https://github.com/npm/libcipm/issues/40)) ([08ed1cc](https://github.com/npm/libcipm/commit/08ed1cc)) +* **prefix:** Resolve to promise when passing --prefix to npm ci ([#43](https://github.com/npm/libcipm/issues/43)) ([401d466](https://github.com/npm/libcipm/commit/401d466)) -# [1.6.0](https://github.com/zkat/cipm/compare/v1.5.1...v1.6.0) (2018-03-01) +# [1.6.0](https://github.com/npm/libcipm/compare/v1.5.1...v1.6.0) (2018-03-01) ### Bug Fixes -* **bin:** cli.js was being excluded ([d62668e](https://github.com/zkat/cipm/commit/d62668e)) +* **bin:** cli.js was being excluded ([d62668e](https://github.com/npm/libcipm/commit/d62668e)) ### Features -* **libcipm:** working standalone cipm release! ([a3383fd](https://github.com/zkat/cipm/commit/a3383fd)) +* **libcipm:** working standalone cipm release! ([a3383fd](https://github.com/npm/libcipm/commit/a3383fd)) -## [1.5.1](https://github.com/zkat/cipm/compare/v1.5.0...v1.5.1) (2018-03-01) +## [1.5.1](https://github.com/npm/libcipm/compare/v1.5.0...v1.5.1) (2018-03-01) ### Bug Fixes -* **_from:** do not add _from to directory deps ([7405360](https://github.com/zkat/cipm/commit/7405360)) +* **_from:** do not add _from to directory deps ([7405360](https://github.com/npm/libcipm/commit/7405360)) -# [1.5.0](https://github.com/zkat/cipm/compare/v1.4.1...v1.5.0) (2018-03-01) +# [1.5.0](https://github.com/npm/libcipm/compare/v1.4.1...v1.5.0) (2018-03-01) ### Bug Fixes -* **errors:** handle aggregate errors better ([6239499](https://github.com/zkat/cipm/commit/6239499)) +* **errors:** handle aggregate errors better ([6239499](https://github.com/npm/libcipm/commit/6239499)) ### Features -* **logger:** rudimentary progress bar update ([c5d9dc7](https://github.com/zkat/cipm/commit/c5d9dc7)) +* **logger:** rudimentary progress bar update ([c5d9dc7](https://github.com/npm/libcipm/commit/c5d9dc7)) -## [1.4.1](https://github.com/zkat/cipm/compare/v1.4.0...v1.4.1) (2018-02-27) +## [1.4.1](https://github.com/npm/libcipm/compare/v1.4.0...v1.4.1) (2018-02-27) ### Bug Fixes -* **buildTree:** linking in parallel causes hoist-clobbering ([5ffbc0e](https://github.com/zkat/cipm/commit/5ffbc0e)), closes [#39](https://github.com/zkat/cipm/issues/39) -* **buildTree:** use checkDepEnv here too ([41a4634](https://github.com/zkat/cipm/commit/41a4634)) -* **perf:** split up updateJson and buildTree ([df5aba0](https://github.com/zkat/cipm/commit/df5aba0)) -* **perf:** stop using the readPackageJson version to update packages ([8da3d5a](https://github.com/zkat/cipm/commit/8da3d5a)) +* **buildTree:** linking in parallel causes hoist-clobbering ([5ffbc0e](https://github.com/npm/libcipm/commit/5ffbc0e)), closes [#39](https://github.com/npm/libcipm/issues/39) +* **buildTree:** use checkDepEnv here too ([41a4634](https://github.com/npm/libcipm/commit/41a4634)) +* **perf:** split up updateJson and buildTree ([df5aba0](https://github.com/npm/libcipm/commit/df5aba0)) +* **perf:** stop using the readPackageJson version to update packages ([8da3d5a](https://github.com/npm/libcipm/commit/8da3d5a)) -# [1.4.0](https://github.com/zkat/cipm/compare/v1.3.3...v1.4.0) (2018-02-21) +# [1.4.0](https://github.com/npm/libcipm/compare/v1.3.3...v1.4.0) (2018-02-21) ### Features -* **extract:** add support for --only and --also ([ad143ae](https://github.com/zkat/cipm/commit/ad143ae)) +* **extract:** add support for --only and --also ([ad143ae](https://github.com/npm/libcipm/commit/ad143ae)) -## [1.3.3](https://github.com/zkat/cipm/compare/v1.3.2...v1.3.3) (2018-02-21) +## [1.3.3](https://github.com/npm/libcipm/compare/v1.3.2...v1.3.3) (2018-02-21) ### Bug Fixes -* **extract:** stop extracting deps before parent :\ ([c6847dc](https://github.com/zkat/cipm/commit/c6847dc)) +* **extract:** stop extracting deps before parent :\ ([c6847dc](https://github.com/npm/libcipm/commit/c6847dc)) -## [1.3.2](https://github.com/zkat/cipm/compare/v1.3.1...v1.3.2) (2018-02-15) +## [1.3.2](https://github.com/npm/libcipm/compare/v1.3.1...v1.3.2) (2018-02-15) -## [1.3.1](https://github.com/zkat/cipm/compare/v1.3.0...v1.3.1) (2018-02-15) +## [1.3.1](https://github.com/npm/libcipm/compare/v1.3.0...v1.3.1) (2018-02-15) -# [1.3.0](https://github.com/zkat/cipm/compare/v1.2.0...v1.3.0) (2018-02-13) +# [1.3.0](https://github.com/npm/libcipm/compare/v1.2.0...v1.3.0) (2018-02-13) ### Features -* **extract:** link directory deps and install missing bundle deps ([8334e9e](https://github.com/zkat/cipm/commit/8334e9e)) +* **extract:** link directory deps and install missing bundle deps ([8334e9e](https://github.com/npm/libcipm/commit/8334e9e)) -# [1.2.0](https://github.com/zkat/cipm/compare/v1.1.2...v1.2.0) (2018-02-07) +# [1.2.0](https://github.com/npm/libcipm/compare/v1.1.2...v1.2.0) (2018-02-07) ### Features -* **metadata:** add _resolved, _integrity, and _from on install ([36642dc](https://github.com/zkat/cipm/commit/36642dc)) +* **metadata:** add _resolved, _integrity, and _from on install ([36642dc](https://github.com/npm/libcipm/commit/36642dc)) -## [1.1.2](https://github.com/zkat/cipm/compare/v1.1.1...v1.1.2) (2018-01-19) +## [1.1.2](https://github.com/npm/libcipm/compare/v1.1.1...v1.1.2) (2018-01-19) -## [1.1.1](https://github.com/zkat/cipm/compare/v1.1.0...v1.1.1) (2018-01-19) +## [1.1.1](https://github.com/npm/libcipm/compare/v1.1.0...v1.1.1) (2018-01-19) -# [1.1.0](https://github.com/zkat/cipm/compare/v1.0.1...v1.1.0) (2018-01-07) +# [1.1.0](https://github.com/npm/libcipm/compare/v1.0.1...v1.1.0) (2018-01-07) ### Features -* **log:** add some helpful log output ([f443f03](https://github.com/zkat/cipm/commit/f443f03)) +* **log:** add some helpful log output ([f443f03](https://github.com/npm/libcipm/commit/f443f03)) -## [1.0.1](https://github.com/zkat/cipm/compare/v1.0.0...v1.0.1) (2018-01-07) +## [1.0.1](https://github.com/npm/libcipm/compare/v1.0.0...v1.0.1) (2018-01-07) ### Bug Fixes -* **deps:** added protoduck to pkgjson ([ecbe719](https://github.com/zkat/cipm/commit/ecbe719)) +* **deps:** added protoduck to pkgjson ([ecbe719](https://github.com/npm/libcipm/commit/ecbe719)) -# [1.0.0](https://github.com/zkat/cipm/compare/v0.9.1...v1.0.0) (2018-01-07) +# [1.0.0](https://github.com/npm/libcipm/compare/v0.9.1...v1.0.0) (2018-01-07) ### Features -* **cli:** splitting off CLI into a separate tool ([cff65c1](https://github.com/zkat/cipm/commit/cff65c1)) +* **cli:** splitting off CLI into a separate tool ([cff65c1](https://github.com/npm/libcipm/commit/cff65c1)) ### BREAKING CHANGES @@ -214,173 +320,173 @@ All notable changes to this project will be documented in this file. See [standa -## [0.9.1](https://github.com/zkat/cipm/compare/v0.9.0...v0.9.1) (2018-01-07) +## [0.9.1](https://github.com/npm/libcipm/compare/v0.9.0...v0.9.1) (2018-01-07) ### Bug Fixes -* **prefix:** oops @ prefix ([cc5adac](https://github.com/zkat/cipm/commit/cc5adac)) +* **prefix:** oops @ prefix ([cc5adac](https://github.com/npm/libcipm/commit/cc5adac)) -# [0.9.0](https://github.com/zkat/cipm/compare/v0.8.0...v0.9.0) (2018-01-07) +# [0.9.0](https://github.com/npm/libcipm/compare/v0.8.0...v0.9.0) (2018-01-07) ### Bug Fixes -* **package:** add pacote to bundleDependencies ([#36](https://github.com/zkat/cipm/issues/36)) ([a69742e](https://github.com/zkat/cipm/commit/a69742e)) +* **package:** add pacote to bundleDependencies ([#36](https://github.com/npm/libcipm/issues/36)) ([a69742e](https://github.com/npm/libcipm/commit/a69742e)) ### Features -* **config:** allow injection of npm configs ([#35](https://github.com/zkat/cipm/issues/35)) ([1f5694b](https://github.com/zkat/cipm/commit/1f5694b)) +* **config:** allow injection of npm configs ([#35](https://github.com/npm/libcipm/issues/35)) ([1f5694b](https://github.com/npm/libcipm/commit/1f5694b)) -# [0.8.0](https://github.com/zkat/cipm/compare/v0.7.2...v0.8.0) (2017-11-28) +# [0.8.0](https://github.com/npm/libcipm/compare/v0.7.2...v0.8.0) (2017-11-28) ### Features -* **gyp:** new npm-lifecycle[@2](https://github.com/2) with included node-gyp ([a4ed938](https://github.com/zkat/cipm/commit/a4ed938)) +* **gyp:** new npm-lifecycle[@2](https://github.com/2) with included node-gyp ([a4ed938](https://github.com/npm/libcipm/commit/a4ed938)) -## [0.7.2](https://github.com/zkat/cipm/compare/v0.7.1...v0.7.2) (2017-10-13) +## [0.7.2](https://github.com/npm/libcipm/compare/v0.7.1...v0.7.2) (2017-10-13) ### Bug Fixes -* **extract:** idk why this was breaking. Seriously. ([433a2be](https://github.com/zkat/cipm/commit/433a2be)) -* **tree:** pass through a custom Promise to logiTree ([2d29efb](https://github.com/zkat/cipm/commit/2d29efb)) +* **extract:** idk why this was breaking. Seriously. ([433a2be](https://github.com/npm/libcipm/commit/433a2be)) +* **tree:** pass through a custom Promise to logiTree ([2d29efb](https://github.com/npm/libcipm/commit/2d29efb)) ### Performance Improvements -* zoomzoom. Even more concurrency! ([db9c2e0](https://github.com/zkat/cipm/commit/db9c2e0)) +* zoomzoom. Even more concurrency! ([db9c2e0](https://github.com/npm/libcipm/commit/db9c2e0)) -## [0.7.1](https://github.com/zkat/cipm/compare/v0.7.0...v0.7.1) (2017-10-13) +## [0.7.1](https://github.com/npm/libcipm/compare/v0.7.0...v0.7.1) (2017-10-13) ### Bug Fixes -* **scripts:** separate extract and build and fix ordering ([eb072a5](https://github.com/zkat/cipm/commit/eb072a5)) +* **scripts:** separate extract and build and fix ordering ([eb072a5](https://github.com/npm/libcipm/commit/eb072a5)) -# [0.7.0](https://github.com/zkat/cipm/compare/v0.6.0...v0.7.0) (2017-10-12) +# [0.7.0](https://github.com/npm/libcipm/compare/v0.6.0...v0.7.0) (2017-10-12) ### Bug Fixes -* **lockfile:** npm-shrinkwrap takes precedence over package-lock (#28) ([3b98fb3](https://github.com/zkat/cipm/commit/3b98fb3)) +* **lockfile:** npm-shrinkwrap takes precedence over package-lock (#28) ([3b98fb3](https://github.com/npm/libcipm/commit/3b98fb3)) ### Features -* **optional:** ignore failed optional deps (#27) ([a654629](https://github.com/zkat/cipm/commit/a654629)) +* **optional:** ignore failed optional deps (#27) ([a654629](https://github.com/npm/libcipm/commit/a654629)) -# [0.6.0](https://github.com/zkat/cipm/compare/v0.5.1...v0.6.0) (2017-10-09) +# [0.6.0](https://github.com/npm/libcipm/compare/v0.5.1...v0.6.0) (2017-10-09) ### Features -* **scripts:** run prepare and prepublish scripts in the root (#26) ([e0e35a3](https://github.com/zkat/cipm/commit/e0e35a3)) +* **scripts:** run prepare and prepublish scripts in the root (#26) ([e0e35a3](https://github.com/npm/libcipm/commit/e0e35a3)) -## [0.5.1](https://github.com/zkat/cipm/compare/v0.5.0...v0.5.1) (2017-10-09) +## [0.5.1](https://github.com/npm/libcipm/compare/v0.5.0...v0.5.1) (2017-10-09) -# [0.5.0](https://github.com/zkat/cipm/compare/v0.4.0...v0.5.0) (2017-10-09) +# [0.5.0](https://github.com/npm/libcipm/compare/v0.4.0...v0.5.0) (2017-10-09) ### Bug Fixes -* **output:** npm does not punctuate this ([e7ba976](https://github.com/zkat/cipm/commit/e7ba976)) -* **shutdown:** make sure workers close ([7ab57d0](https://github.com/zkat/cipm/commit/7ab57d0)) +* **output:** npm does not punctuate this ([e7ba976](https://github.com/npm/libcipm/commit/e7ba976)) +* **shutdown:** make sure workers close ([7ab57d0](https://github.com/npm/libcipm/commit/7ab57d0)) ### Features -* **bin:** link bins and run scripts (#25) ([fab74bf](https://github.com/zkat/cipm/commit/fab74bf)) -* **lifecycle:** run scripts in dep order (#23) ([68ecfac](https://github.com/zkat/cipm/commit/68ecfac)) +* **bin:** link bins and run scripts (#25) ([fab74bf](https://github.com/npm/libcipm/commit/fab74bf)) +* **lifecycle:** run scripts in dep order (#23) ([68ecfac](https://github.com/npm/libcipm/commit/68ecfac)) -# [0.4.0](https://github.com/zkat/cipm/compare/v0.3.2...v0.4.0) (2017-10-04) +# [0.4.0](https://github.com/npm/libcipm/compare/v0.3.2...v0.4.0) (2017-10-04) ### Features -* **opts:** support full range of relevant CLI opts (#19) ([6f2bd51](https://github.com/zkat/cipm/commit/6f2bd51)) +* **opts:** support full range of relevant CLI opts (#19) ([6f2bd51](https://github.com/npm/libcipm/commit/6f2bd51)) -## [0.3.2](https://github.com/zkat/cipm/compare/v0.3.1...v0.3.2) (2017-09-06) +## [0.3.2](https://github.com/npm/libcipm/compare/v0.3.1...v0.3.2) (2017-09-06) ### Bug Fixes -* **bin:** make cli executable by default (#13) ([14a9a5f](https://github.com/zkat/cipm/commit/14a9a5f)) -* **config:** use npm.cmd on win32 and fix tests (#12) ([d912d16](https://github.com/zkat/cipm/commit/d912d16)), closes [#12](https://github.com/zkat/cipm/issues/12) -* **json:** strip BOM when reading JSON files (#8) ([2529149](https://github.com/zkat/cipm/commit/2529149)) +* **bin:** make cli executable by default (#13) ([14a9a5f](https://github.com/npm/libcipm/commit/14a9a5f)) +* **config:** use npm.cmd on win32 and fix tests (#12) ([d912d16](https://github.com/npm/libcipm/commit/d912d16)), closes [#12](https://github.com/npm/libcipm/issues/12) +* **json:** strip BOM when reading JSON files (#8) ([2529149](https://github.com/npm/libcipm/commit/2529149)) -## [0.3.1](https://github.com/zkat/cipm/compare/v0.3.0...v0.3.1) (2017-09-05) +## [0.3.1](https://github.com/npm/libcipm/compare/v0.3.0...v0.3.1) (2017-09-05) -# [0.3.0](https://github.com/zkat/cipm/compare/v0.2.0...v0.3.0) (2017-09-05) +# [0.3.0](https://github.com/npm/libcipm/compare/v0.2.0...v0.3.0) (2017-09-05) ### Features -* **lockfile:** verify that lockfile matches package.json (#5) ([f631203](https://github.com/zkat/cipm/commit/f631203)) -* **scripts:** support --ignore-scripts option (#9) ([213ca02](https://github.com/zkat/cipm/commit/213ca02)) +* **lockfile:** verify that lockfile matches package.json (#5) ([f631203](https://github.com/npm/libcipm/commit/f631203)) +* **scripts:** support --ignore-scripts option (#9) ([213ca02](https://github.com/npm/libcipm/commit/213ca02)) -# [0.2.0](https://github.com/zkat/cipm/compare/v0.1.1...v0.2.0) (2017-09-01) +# [0.2.0](https://github.com/npm/libcipm/compare/v0.1.1...v0.2.0) (2017-09-01) ### Bug Fixes -* **main:** default --prefix ([ff06a31](https://github.com/zkat/cipm/commit/ff06a31)) +* **main:** default --prefix ([ff06a31](https://github.com/npm/libcipm/commit/ff06a31)) ### Features -* **lifecycle:** actually run lifecycle scripts correctly ([7f8933e](https://github.com/zkat/cipm/commit/7f8933e)) +* **lifecycle:** actually run lifecycle scripts correctly ([7f8933e](https://github.com/npm/libcipm/commit/7f8933e)) -## [0.1.1](https://github.com/zkat/cipm/compare/v0.1.0...v0.1.1) (2017-08-30) +## [0.1.1](https://github.com/npm/libcipm/compare/v0.1.0...v0.1.1) (2017-08-30) ### Bug Fixes -* **files:** oops. forgot to include new files in tarball ([1ee85c9](https://github.com/zkat/cipm/commit/1ee85c9)) +* **files:** oops. forgot to include new files in tarball ([1ee85c9](https://github.com/npm/libcipm/commit/1ee85c9)) @@ -390,13 +496,13 @@ All notable changes to this project will be documented in this file. See [standa ### Bug Fixes -* **config:** pipe stdout ([08e6af8](https://github.com/zkat/cipm/commit/08e6af8)) -* **extract:** make sure to extract properly ([9643583](https://github.com/zkat/cipm/commit/9643583)) -* **license:** switch to MIT ([0d10d0d](https://github.com/zkat/cipm/commit/0d10d0d)) +* **config:** pipe stdout ([08e6af8](https://github.com/npm/libcipm/commit/08e6af8)) +* **extract:** make sure to extract properly ([9643583](https://github.com/npm/libcipm/commit/9643583)) +* **license:** switch to MIT ([0d10d0d](https://github.com/npm/libcipm/commit/0d10d0d)) ### Features -* **impl:** rough prototype ([2970e43](https://github.com/zkat/cipm/commit/2970e43)) -* **lifecycle:** Run lifecycle events, implement prefix option, add unit tests (#1) ([d6629be](https://github.com/zkat/cipm/commit/d6629be)), closes [#1](https://github.com/zkat/cipm/issues/1) -* **opts:** add usage string and --help ([efcc48d](https://github.com/zkat/cipm/commit/efcc48d)) +* **impl:** rough prototype ([2970e43](https://github.com/npm/libcipm/commit/2970e43)) +* **lifecycle:** Run lifecycle events, implement prefix option, add unit tests (#1) ([d6629be](https://github.com/npm/libcipm/commit/d6629be)), closes [#1](https://github.com/npm/libcipm/issues/1) +* **opts:** add usage string and --help ([efcc48d](https://github.com/npm/libcipm/commit/efcc48d)) diff --git a/node_modules/libcipm/LICENSE.md b/node_modules/libcipm/LICENSE.md index 409d7d5a9c911..2ed9c0311c997 100644 --- a/node_modules/libcipm/LICENSE.md +++ b/node_modules/libcipm/LICENSE.md @@ -1,7 +1,19 @@ -Copyright 2017 Kat Marchán and Contributors +Copyright npm, Inc., Kat Marchán, and Contributors -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/node_modules/libcipm/README.md b/node_modules/libcipm/README.md index 00f9b81619bdb..3dd9cbe149220 100644 --- a/node_modules/libcipm/README.md +++ b/node_modules/libcipm/README.md @@ -1,6 +1,6 @@ -[![npm](https://img.shields.io/npm/v/libcipm.svg)](https://npm.im/libcipm) [![license](https://img.shields.io/npm/l/libcipm.svg)](https://npm.im/libcipm) [![Travis](https://img.shields.io/travis/zkat/cipm.svg)](https://travis-ci.org/zkat/cipm) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/cipm?svg=true)](https://ci.appveyor.com/project/zkat/cipm) [![Coverage Status](https://coveralls.io/repos/github/zkat/cipm/badge.svg?branch=latest)](https://coveralls.io/github/zkat/cipm?branch=latest) +[![npm](https://img.shields.io/npm/v/libcipm.svg)](https://npm.im/libcipm) [![license](https://img.shields.io/npm/l/libcipm.svg)](https://npm.im/libcipm) [![Travis](https://img.shields.io/travis/npm/libcipm.svg)](https://travis-ci.org/npm/libcipm) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/libcipm?svg=true)](https://ci.appveyor.com/project/npm/libcipm) [![Coverage Status](https://coveralls.io/repos/github/npm/libcipm/badge.svg?branch=latest)](https://coveralls.io/github/npm/libcipm?branch=latest) -[`libcipm`](https://github.com/zkat/cipm) installs npm projects in a way that's +[`libcipm`](https://github.com/npm/libcipm) installs npm projects in a way that's optimized for continuous integration/deployment/etc scenarios. It gives up the ability to build its own trees or install packages individually, as well as other user-oriented features, in exchange for speed, and being more strict diff --git a/node_modules/libcipm/index.js b/node_modules/libcipm/index.js index 9061ec4b18f02..42d05e1f8931c 100644 --- a/node_modules/libcipm/index.js +++ b/node_modules/libcipm/index.js @@ -5,6 +5,7 @@ const BB = require('bluebird') const binLink = require('bin-links') const buildLogicalTree = require('npm-logical-tree') const extract = require('./lib/extract.js') +const figgyPudding = require('figgy-pudding') const fs = require('graceful-fs') const getPrefix = require('find-npm-prefix') const lifecycle = require('npm-lifecycle') @@ -20,10 +21,26 @@ const statAsync = BB.promisify(fs.stat) const symlinkAsync = BB.promisify(fs.symlink) const writeFileAsync = BB.promisify(fs.writeFile) +const LifecycleOpts = figgyPudding({ + config: {}, + 'script-shell': {}, + scriptShell: 'script-shell', + 'ignore-scripts': {}, + ignoreScripts: 'ignore-scripts', + 'ignore-prepublish': {}, + ignorePrepublish: 'ignore-prepublish', + 'scripts-prepend-node-path': {}, + scriptsPrependNodePath: 'scripts-prepend-node-path', + 'unsafe-perm': {}, + unsafePerm: 'unsafe-perm', + prefix: {}, + dir: 'prefix', + failOk: { default: false } +}, { other () { return true } }) + class Installer { constructor (opts) { this.opts = opts - this.config = opts.config // Stats this.startTime = Date.now() @@ -80,17 +97,17 @@ class Installer { prepare () { this.log.info('prepare', 'initializing installer') - this.log.level = this.config.get('loglevel') + this.log.level = this.opts.loglevel this.log.verbose('prepare', 'starting workers') extract.startWorkers() return ( - this.config.get('prefix') && this.config.get('global') - ? BB.resolve(this.config.get('prefix')) + this.opts.prefix && this.opts.global + ? BB.resolve(this.opts.prefix) // There's some Special™ logic around the `--prefix` config when it // comes from a config file or env vs when it comes from the CLI : process.argv.some(arg => arg.match(/^\s*--prefix\s*/i)) - ? BB.resolve(this.config.get('prefix')) + ? BB.resolve(this.opts.prefix) : getPrefix(process.cwd()) ) .then(prefix => { @@ -120,7 +137,7 @@ class Installer { ) return BB.join( this.checkLock(), - stat && rimraf(path.join(this.prefix, 'node_modules')) + stat && rimraf(path.join(this.prefix, 'node_modules/*')) ) }).then(() => { // This needs to happen -after- we've done checkLock() @@ -203,7 +220,7 @@ class Installer { return next() } else { return BB.resolve(extract.child( - dep.name, dep, depPath, this.config, this.opts + dep.name, dep, depPath, this.opts )) .then(() => cg.completeWork(1)) .then(() => { this.pkgCount++ }) @@ -218,16 +235,19 @@ class Installer { checkDepEnv (dep) { const includeDev = ( // Covers --dev and --development (from npm config itself) - this.config.get('dev') || + this.opts.dev || ( - !/^prod(uction)?$/.test(this.config.get('only')) && - !this.config.get('production') + !/^prod(uction)?$/.test(this.opts.only) && + !this.opts.production ) || - /^dev(elopment)?$/.test(this.config.get('only')) || - /^dev(elopment)?$/.test(this.config.get('also')) + /^dev(elopment)?$/.test(this.opts.only) || + /^dev(elopment)?$/.test(this.opts.also) ) - const includeProd = !/^dev(elopment)?$/.test(this.config.get('only')) - return (dep.dev && includeDev) || (!dep.dev && includeProd) + const includeProd = !/^dev(elopment)?$/.test(this.opts.only) + const includeOptional = includeProd && this.opts.optional + return (dep.dev && includeDev) || + (dep.optional && includeOptional) || + (!dep.dev && !dep.optional && includeProd) } updateJson (tree) { @@ -274,14 +294,14 @@ class Installer { } return readPkgJson(path.join(depPath, 'package.json')) .then(pkg => binLink(pkg, depPath, false, { - force: this.config.get('force'), - ignoreScripts: this.config.get('ignore-scripts'), + force: this.opts.force, + ignoreScripts: this.opts['ignore-scripts'], log: Object.assign({}, this.log, { info: () => {} }), name: pkg.name, pkgId: pkg.name + '@' + pkg.version, prefix: this.prefix, prefixes: [this.prefix], - umask: this.config.get('umask') + umask: this.opts.umask }), e => { this.log.verbose('buildTree', `error linking ${spec}: ${e.message} ${e.stack}`) }) @@ -312,7 +332,7 @@ class Installer { .then(from => npa.resolve(dep.name, from)) .then(from => { pkg._from = from.toString() }) .then(() => writeFileAsync(depPkgPath, JSON.stringify(pkg, null, 2))) - .then(pkg) + .then(() => pkg) } updateInstallScript (dep, pkg) { @@ -327,7 +347,7 @@ class Installer { pkg.scripts.install = 'node-gyp rebuild' } }) - .then(pkg) + .then(() => pkg) } // A cute little mark-and-sweep collector! @@ -346,18 +366,25 @@ class Installer { runScript (stage, pkg, pkgPath) { const start = Date.now() - if (!this.config.get('ignore-scripts')) { + if (!this.opts['ignore-scripts']) { // TODO(mikesherov): remove pkg._id when npm-lifecycle no longer relies on it pkg._id = pkg.name + '@' + pkg.version - const opts = this.config.toLifecycle() - return BB.resolve(lifecycle(pkg, stage, pkgPath, opts)) - .tap(() => { this.timings.scripts += Date.now() - start }) + return BB.resolve(lifecycle( + pkg, stage, pkgPath, LifecycleOpts(this.opts).concat({ + // TODO: can be removed once npm-lifecycle is updated to modern + // config practices. + config: Object.assign({}, this.opts, { + log: null, + dirPacker: null + }), + dir: this.prefix + })) + ).tap(() => { this.timings.scripts += Date.now() - start }) } return BB.resolve() } } module.exports = Installer -module.exports.CipmConfig = require('./lib/config/npm-config.js').CipmConfig function mark (tree, failed) { const liveDeps = new Set() diff --git a/node_modules/libcipm/lib/config/lifecycle-opts.js b/node_modules/libcipm/lib/config/lifecycle-opts.js deleted file mode 100644 index 7d574597798e1..0000000000000 --- a/node_modules/libcipm/lib/config/lifecycle-opts.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const log = require('npmlog') - -module.exports = lifecycleOpts -function lifecycleOpts (opts) { - const objConfig = {} - for (const key of opts.keys()) { - const val = opts.get(key) - if (val != null) { - objConfig[key] = val - } - } - return { - config: objConfig, - scriptShell: opts.get('script-shell'), - force: opts.get('force'), - user: opts.get('user'), - group: opts.get('group'), - ignoreScripts: opts.get('ignore-scripts'), - ignorePrepublish: opts.get('ignore-prepublish'), - scriptsPrependNodePath: opts.get('scripts-prepend-node-path'), - unsafePerm: opts.get('unsafe-perm'), - log, - dir: opts.get('prefix'), - failOk: false, - production: opts.get('production') - } -} diff --git a/node_modules/libcipm/lib/config/npm-config.js b/node_modules/libcipm/lib/config/npm-config.js index 76b4054eef3c4..a051190619925 100644 --- a/node_modules/libcipm/lib/config/npm-config.js +++ b/node_modules/libcipm/lib/config/npm-config.js @@ -1,40 +1,22 @@ 'use strict' const BB = require('bluebird') -const lifecycleOpts = require('./lifecycle-opts.js') -const pacoteOpts = require('./pacote-opts.js') -const protoduck = require('protoduck') -const spawn = require('child_process').spawn -class NpmConfig extends Map {} +const fs = require('fs') +const figgyPudding = require('figgy-pudding') +const ini = require('ini') +const path = require('path') +const spawn = require('child_process').spawn -const CipmConfig = protoduck.define({ - get: [], - set: [], - toPacote: [], - toLifecycle: [] -}, { - name: 'CipmConfig' -}) -module.exports.CipmConfig = CipmConfig +const readFileAsync = BB.promisify(fs.readFile) -CipmConfig.impl(NpmConfig, { - get: Map.prototype.get, - set: Map.prototype.set, - toPacote (opts) { - return pacoteOpts(this, opts) - }, - toLifecycle () { - return lifecycleOpts(this) - } +const NpmConfig = figgyPudding({ + cache: { default: '' }, + then: {}, + userconfig: {} }) -module.exports.fromObject = fromObj -function fromObj (obj) { - const map = new NpmConfig() - Object.keys(obj).forEach(k => map.set(k, obj[k])) - return map -} +module.exports = NpmConfig module.exports.fromNpm = getNpmConfig function getNpmConfig (argv) { @@ -62,11 +44,41 @@ function getNpmConfig (argv) { reject(new Error('`npm` command not found. Please ensure you have npm@5.4.0 or later installed.')) } else { try { - resolve(fromObj(JSON.parse(stdout))) + resolve(JSON.parse(stdout)) } catch (e) { reject(new Error('`npm config ls --json` failed to output json. Please ensure you have npm@5.4.0 or later installed.')) } } }) + }).then(opts => { + return BB.all( + process.cwd().split(path.sep).reduce((acc, next) => { + acc.path = path.join(acc.path, next) + acc.promises.push(maybeReadIni(path.join(acc.path, '.npmrc'))) + acc.promises.push(maybeReadIni(path.join(acc.path, 'npmrc'))) + return acc + }, { + path: '', + promises: [] + }).promises.concat( + opts.userconfig ? maybeReadIni(opts.userconfig) : {} + ) + ).then(configs => NpmConfig(...configs, opts)) + }).then(opts => { + if (opts.cache) { + return opts.concat({ cache: path.join(opts.cache, '_cacache') }) + } else { + return opts + } }) } + +function maybeReadIni (f) { + return readFileAsync(f, 'utf8').catch(err => { + if (err.code === 'ENOENT') { + return '' + } else { + throw err + } + }).then(ini.parse) +} diff --git a/node_modules/libcipm/lib/config/pacote-opts.js b/node_modules/libcipm/lib/config/pacote-opts.js deleted file mode 100644 index 234ed1352a353..0000000000000 --- a/node_modules/libcipm/lib/config/pacote-opts.js +++ /dev/null @@ -1,135 +0,0 @@ -'use strict' - -const Buffer = require('safe-buffer').Buffer - -const crypto = require('crypto') -const path = require('path') - -let effectiveOwner - -const npmSession = crypto.randomBytes(8).toString('hex') - -module.exports = pacoteOpts -function pacoteOpts (npmOpts, moreOpts) { - const ownerStats = calculateOwner() - const opts = { - cache: path.join(npmOpts.get('cache'), '_cacache'), - ca: npmOpts.get('ca'), - cert: npmOpts.get('cert'), - git: npmOpts.get('git'), - key: npmOpts.get('key'), - localAddress: npmOpts.get('local-address'), - loglevel: npmOpts.get('loglevel'), - maxSockets: +(npmOpts.get('maxsockets') || 15), - npmSession: npmSession, - offline: npmOpts.get('offline'), - projectScope: moreOpts.rootPkg && getProjectScope(moreOpts.rootPkg.name), - proxy: npmOpts.get('https-proxy') || npmOpts.get('proxy'), - refer: 'cipm', - registry: npmOpts.get('registry'), - retry: { - retries: npmOpts.get('fetch-retries'), - factor: npmOpts.get('fetch-retry-factor'), - minTimeout: npmOpts.get('fetch-retry-mintimeout'), - maxTimeout: npmOpts.get('fetch-retry-maxtimeout') - }, - strictSSL: npmOpts.get('strict-ssl'), - userAgent: npmOpts.get('user-agent'), - - dmode: parseInt('0777', 8) & (~npmOpts.get('umask')), - fmode: parseInt('0666', 8) & (~npmOpts.get('umask')), - umask: npmOpts.get('umask') - } - - if (ownerStats.uid != null || ownerStats.gid != null) { - Object.assign(opts, ownerStats) - } - - (npmOpts.forEach ? Array.from(npmOpts.keys()) : npmOpts.keys).forEach(k => { - const authMatchGlobal = k.match( - /^(_authToken|username|_password|password|email|always-auth|_auth)$/ - ) - const authMatchScoped = k[0] === '/' && k.match( - /(.*):(_authToken|username|_password|password|email|always-auth|_auth)$/ - ) - - // if it matches scoped it will also match global - if (authMatchGlobal || authMatchScoped) { - let nerfDart = null - let key = null - let val = null - - if (!opts.auth) { opts.auth = {} } - - if (authMatchScoped) { - nerfDart = authMatchScoped[1] - key = authMatchScoped[2] - val = npmOpts.get(k) - if (!opts.auth[nerfDart]) { - opts.auth[nerfDart] = { - alwaysAuth: !!npmOpts.get('always-auth') - } - } - } else { - key = authMatchGlobal[1] - val = npmOpts.get(k) - opts.auth.alwaysAuth = !!npmOpts.get('always-auth') - } - - const auth = authMatchScoped ? opts.auth[nerfDart] : opts.auth - if (key === '_authToken') { - auth.token = val - } else if (key.match(/password$/i)) { - auth.password = - // the config file stores password auth already-encoded. pacote expects - // the actual username/password pair. - Buffer.from(val, 'base64').toString('utf8') - } else if (key === 'always-auth') { - auth.alwaysAuth = val === 'false' ? false : !!val - } else { - auth[key] = val - } - } - - if (k[0] === '@') { - if (!opts.scopeTargets) { opts.scopeTargets = {} } - opts.scopeTargets[k.replace(/:registry$/, '')] = npmOpts.get(k) - } - }) - - Object.keys(moreOpts || {}).forEach((k) => { - opts[k] = moreOpts[k] - }) - - return opts -} - -function calculateOwner () { - if (!effectiveOwner) { - effectiveOwner = { uid: 0, gid: 0 } - - // Pretty much only on windows - if (!process.getuid) { - return effectiveOwner - } - - effectiveOwner.uid = +process.getuid() - effectiveOwner.gid = +process.getgid() - - if (effectiveOwner.uid === 0) { - if (process.env.SUDO_UID) effectiveOwner.uid = +process.env.SUDO_UID - if (process.env.SUDO_GID) effectiveOwner.gid = +process.env.SUDO_GID - } - } - - return effectiveOwner -} - -function getProjectScope (pkgName) { - const sep = pkgName.indexOf('/') - if (sep === -1) { - return '' - } else { - return pkgName.slice(0, sep) - } -} diff --git a/node_modules/libcipm/lib/extract.js b/node_modules/libcipm/lib/extract.js index 9166ebc058d01..f87d2c791d40e 100644 --- a/node_modules/libcipm/lib/extract.js +++ b/node_modules/libcipm/lib/extract.js @@ -2,45 +2,57 @@ const BB = require('bluebird') -const npa = require('npm-package-arg') -const workerFarm = require('worker-farm') - const extractionWorker = require('./worker.js') +const figgyPudding = require('figgy-pudding') +const npa = require('npm-package-arg') const WORKER_PATH = require.resolve('./worker.js') +let workerFarm + +// Broken for now, cause too many issues on some systems. +const ENABLE_WORKERS = false + +const ExtractOpts = figgyPudding({ + log: {}, + dirPacker: {} +}) module.exports = { startWorkers () { - this._workers = workerFarm({ - maxConcurrentCallsPerWorker: 20, - maxRetries: 1 - }, WORKER_PATH) + if (ENABLE_WORKERS) { + if (!workerFarm) { workerFarm = require('worker-farm') } + this._workers = workerFarm({ + maxConcurrentCallsPerWorker: 20, + maxRetries: 1 + }, WORKER_PATH) + } }, stopWorkers () { - workerFarm.end(this._workers) + if (ENABLE_WORKERS) { + if (!workerFarm) { workerFarm = require('worker-farm') } + workerFarm.end(this._workers) + } }, - child (name, child, childPath, config, opts) { + child (name, child, childPath, opts) { + opts = ExtractOpts(opts) const spec = npa.resolve(name, child.version) - const additionalToPacoteOpts = {} - if (typeof opts.dirPacker !== 'undefined') { - additionalToPacoteOpts.dirPacker = opts.dirPacker - } - const childOpts = config.toPacote(Object.assign({ + let childOpts = opts.concat({ integrity: child.integrity, resolved: child.resolved - }, additionalToPacoteOpts)) + }) const args = [spec, childPath, childOpts] return BB.fromNode((cb) => { let launcher = extractionWorker let msg = args const spec = typeof args[0] === 'string' ? npa(args[0]) : args[0] - childOpts.loglevel = opts.log.level - if (spec.registry || spec.type === 'remote') { + if (ENABLE_WORKERS && (spec.registry || spec.type === 'remote')) { + if (!workerFarm) { workerFarm = require('worker-farm') } // We can't serialize these options - childOpts.config = null - childOpts.log = null - childOpts.dirPacker = null + childOpts = childOpts.concat({ + log: null, + dirPacker: null + }) // workers will run things in parallel! launcher = this._workers try { diff --git a/node_modules/libcipm/lib/worker.js b/node_modules/libcipm/lib/worker.js index ac61b06236b63..bab607e527879 100644 --- a/node_modules/libcipm/lib/worker.js +++ b/node_modules/libcipm/lib/worker.js @@ -2,7 +2,7 @@ const BB = require('bluebird') -const log = require('npmlog') +// const log = require('npmlog') const pacote = require('pacote') module.exports = (args, cb) => { @@ -10,7 +10,7 @@ module.exports = (args, cb) => { const spec = parsed[0] const extractTo = parsed[1] const opts = parsed[2] - opts.log = log - log.level = opts.loglevel + // opts.log = log + // log.level = opts.loglevel return BB.resolve(pacote.extract(spec, extractTo, opts)).nodeify(cb) } diff --git a/node_modules/libcipm/package.json b/node_modules/libcipm/package.json index a934f18df5d35..1411b40950004 100644 --- a/node_modules/libcipm/package.json +++ b/node_modules/libcipm/package.json @@ -1,34 +1,34 @@ { - "_from": "libcipm@2.0.2", - "_id": "libcipm@2.0.2", + "_from": "libcipm@4.0.7", + "_id": "libcipm@4.0.7", "_inBundle": false, - "_integrity": "sha512-9uZ6/LAflVEijksTRq/RX0e+pGA4mr8tND9Cmk2JMg7j2fFUBrs8PpFX2DOAJR/XoxPzz+5h8bkWmtIYLunKAg==", + "_integrity": "sha512-fTq33otU3PNXxxCTCYCYe7V96o59v/o7bvtspmbORXpgFk+wcWrGf5x6tBgui5gCed/45/wtPomBsZBYm5KbIw==", "_location": "/libcipm", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "libcipm@2.0.2", + "raw": "libcipm@4.0.7", "name": "libcipm", "escapedName": "libcipm", - "rawSpec": "2.0.2", + "rawSpec": "4.0.7", "saveSpec": null, - "fetchSpec": "2.0.2" + "fetchSpec": "4.0.7" }, "_requiredBy": [ "#USER", "/" ], - "_resolved": "https://registry.npmjs.org/libcipm/-/libcipm-2.0.2.tgz", - "_shasum": "4f38c2b37acf2ec156936cef1cbf74636568fc7b", - "_spec": "libcipm@2.0.2", - "_where": "/Users/zkat/Documents/code/work/npm", + "_resolved": "https://registry.npmjs.org/libcipm/-/libcipm-4.0.7.tgz", + "_shasum": "76cd675c98bdaae64db88b782b01b804b6d02c8a", + "_spec": "libcipm@4.0.7", + "_where": "/Users/mperrotte/npminc/cli", "author": { "name": "Kat Marchán", "email": "kzm@sykosomatic.org" }, "bugs": { - "url": "https://github.com/zkat/cipm/issues" + "url": "https://github.com/npm/libcipm/issues" }, "bundleDependencies": false, "config": { @@ -42,15 +42,16 @@ "dependencies": { "bin-links": "^1.1.2", "bluebird": "^3.5.1", + "figgy-pudding": "^3.5.1", "find-npm-prefix": "^1.0.2", "graceful-fs": "^4.1.11", + "ini": "^1.3.5", "lock-verify": "^2.0.2", "mkdirp": "^0.5.1", - "npm-lifecycle": "^2.0.3", + "npm-lifecycle": "^3.0.0", "npm-logical-tree": "^1.2.1", "npm-package-arg": "^6.1.0", - "pacote": "^8.1.6", - "protoduck": "^5.0.0", + "pacote": "^9.1.0", "read-package-json": "^2.0.13", "rimraf": "^2.6.2", "worker-farm": "^1.6.0" @@ -72,7 +73,7 @@ "*.js", "lib" ], - "homepage": "https://github.com/zkat/cipm#readme", + "homepage": "https://github.com/npm/libcipm#readme", "keywords": [ "npm", "package manager", @@ -84,7 +85,7 @@ "name": "libcipm", "repository": { "type": "git", - "url": "git+https://github.com/zkat/cipm.git" + "url": "git+https://github.com/npm/libcipm.git" }, "scripts": { "postrelease": "npm publish && git push --follow-tags", @@ -95,5 +96,5 @@ "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "2.0.2" + "version": "4.0.7" } diff --git a/node_modules/libnpm/CHANGELOG.md b/node_modules/libnpm/CHANGELOG.md new file mode 100644 index 0000000000000..bb3a52a36f93d --- /dev/null +++ b/node_modules/libnpm/CHANGELOG.md @@ -0,0 +1,113 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + + +## [3.0.1](https://github.com/npm/libnpm/compare/v3.0.0...v3.0.1) (2019-07-16) + + + + +# [3.0.0](https://github.com/npm/libnpm/compare/v2.0.1...v3.0.0) (2019-07-10) + + +* npm-lifecycle@3.0.0 ([56cc8e5](https://github.com/npm/libnpm/commit/56cc8e5)) + + +### BREAKING CHANGES + +* requires updating node-gyp in npm/cli + + + + +## [2.0.1](https://github.com/npm/libnpm/compare/v2.0.0...v2.0.1) (2018-12-05) + + +### Bug Fixes + +* **read-json:** use bluebird for promisification ([8dddde6](https://github.com/npm/libnpm/commit/8dddde6)) + + + + +# [2.0.0](https://github.com/npm/libnpm/compare/v1.5.0...v2.0.0) (2018-11-27) + + +### deps + +* bump all libs ([83ae929](https://github.com/npm/libnpm/commit/83ae929)) + + +### BREAKING CHANGES + +* This includes a breaking libnpmaccess patch + + + + +# [1.5.0](https://github.com/npm/libnpm/compare/v1.4.0...v1.5.0) (2018-11-26) + + +### Features + +* **pacote:** minimal requires for pacote-related APIs ([e19ce11](https://github.com/npm/libnpm/commit/e19ce11)) + + + + +# [1.4.0](https://github.com/npm/libnpm/compare/v1.3.0...v1.4.0) (2018-11-13) + + +### Features + +* **libnpm:** add support for partial requires ([7ba10a7](https://github.com/npm/libnpm/commit/7ba10a7)) + + + + +# [1.3.0](https://github.com/npm/libnpm/compare/v1.2.0...v1.3.0) (2018-11-07) + + +### Features + +* **bin:** add binLinks lib ([2f4d551](https://github.com/npm/libnpm/commit/2f4d551)) + + + + +# [1.2.0](https://github.com/npm/libnpm/compare/v1.1.0...v1.2.0) (2018-11-07) + + +### Features + +* **log:** add npmlog to the bundle ([c20abd1](https://github.com/npm/libnpm/commit/c20abd1)) + + + + +# [1.1.0](https://github.com/npm/libnpm/compare/v1.0.0...v1.1.0) (2018-11-07) + + +### Features + +* **config:** add libnpmconfig ([6a44725](https://github.com/npm/libnpm/commit/6a44725)) +* **json+tree:** add read-package-json and npm-logical-tree ([0198a91](https://github.com/npm/libnpm/commit/0198a91)) +* **lock+prefix:** add lock-verify and find-npm-prefix ([00750c9](https://github.com/npm/libnpm/commit/00750c9)) +* **parseArg:** add npm-package-arg ([5712614](https://github.com/npm/libnpm/commit/5712614)) +* **stringify:** add stringify-package ([0ec5bba](https://github.com/npm/libnpm/commit/0ec5bba)) + + + + +# [1.0.0](https://github.com/npm/libnpm/compare/v0.0.1...v1.0.0) (2018-08-31) + + +### Features + +* **api:** document and export libnpm api ([f85f8f8](https://github.com/npm/libnpm/commit/f85f8f8)) + + + + +## 0.0.1 (2018-04-04) diff --git a/node_modules/libnpmhook/node_modules/npm-registry-fetch/LICENSE.md b/node_modules/libnpm/LICENSE.md similarity index 100% rename from node_modules/libnpmhook/node_modules/npm-registry-fetch/LICENSE.md rename to node_modules/libnpm/LICENSE.md diff --git a/node_modules/libnpm/README.md b/node_modules/libnpm/README.md new file mode 100644 index 0000000000000..ce35f5da1995d --- /dev/null +++ b/node_modules/libnpm/README.md @@ -0,0 +1,57 @@ +# libnpm + +[`libnpm`](https://github.com/npm/libnpm) is the programmatic API for npm. + +For bug reports and support, please head over to [npm.community](https://npm.community). + + +## Install + +`$ npm install libnpm` + +## Table of Contents + +* [Example](#example) +* [Features](#features) +* [API](#api) + * Fetching Packages and Their Info + * [`manifest`](https://www.npmjs.com/package/pacote#manifest) + * [`packument`](https://www.npmjs.com/package/pacote#packument) + * [`tarball`](https://www.npmjs.com/package/pacote#tarball) + * [`extract`](https://www.npmjs.com/package/pacote#extract) + * [`search`](https://npm.im/libnpmsearch) + * Package-related Registry APIs + * [`publish`]() + * [`unpublish`](#unpublish) + * [`access`](https://npm.im/libnpmaccess) + * Account-related Registry APIs + * [`login`](https://www.npmjs.com/package/npm-profile#login) + * [`adduser`](https://www.npmjs.com/package/npm-profile#adduser) + * [`profile`](https://npm.im/npm-profile) + * [`hook`](https://npm.im/libnpmhook) + * [`team`](https://npm.im/libnpmteam) + * [`org`](https://npm.im/libnpmorg) + * Miscellaneous + * [`parseArg`](https://npm.im/npm-package-arg) + * [`config`](https://npm.im/libnpmconfig) + * [`readJSON`](https://npm.im/read-package-json) + * [`verifyLock`](https://npm.im/lock-verify) + * [`getPrefix`](https://npm.im/find-npm-prefix) + * [`logicalTree`](https://npm.im/npm-logical-tree) + * [`stringifyPackage`](https://npm.im/stringify-package) + * [`runScript`](https://www.npmjs.com/package/npm-lifecycle) + * [`log`](https://npm.im/npmlog) + * [`fetch`](https://npm.im/npm-registry-fetch) (plain ol' client for registry interaction) + * [`linkBin`](https://npm.im/bin-links) + +### Example + +```javascript +await libnpm.manifest('libnpm') // => Manifest { name: 'libnpm', ... } +``` + +### API + +This package re-exports the APIs from other packages for convenience. Refer to +the [table of contents](#table-of-contents) for detailed documentation on each +individual exported API. diff --git a/node_modules/libnpm/access.js b/node_modules/libnpm/access.js new file mode 100644 index 0000000000000..4b164226a31f6 --- /dev/null +++ b/node_modules/libnpm/access.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('libnpmaccess') diff --git a/node_modules/libnpm/adduser.js b/node_modules/libnpm/adduser.js new file mode 100644 index 0000000000000..e57dbeaf9baf2 --- /dev/null +++ b/node_modules/libnpm/adduser.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('npm-profile').adduser diff --git a/node_modules/libnpm/config.js b/node_modules/libnpm/config.js new file mode 100644 index 0000000000000..51ff1edee8b7c --- /dev/null +++ b/node_modules/libnpm/config.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('libnpmconfig') diff --git a/node_modules/libnpm/extract.js b/node_modules/libnpm/extract.js new file mode 100644 index 0000000000000..4f3aa3d7065c4 --- /dev/null +++ b/node_modules/libnpm/extract.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('pacote/extract') diff --git a/node_modules/libnpm/fetch.js b/node_modules/libnpm/fetch.js new file mode 100644 index 0000000000000..0e5ccd8804297 --- /dev/null +++ b/node_modules/libnpm/fetch.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('npm-registry-fetch') diff --git a/node_modules/libnpm/get-prefix.js b/node_modules/libnpm/get-prefix.js new file mode 100644 index 0000000000000..86bf85862c734 --- /dev/null +++ b/node_modules/libnpm/get-prefix.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('find-npm-prefix') diff --git a/node_modules/libnpm/hook.js b/node_modules/libnpm/hook.js new file mode 100644 index 0000000000000..a45644beeaa6c --- /dev/null +++ b/node_modules/libnpm/hook.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('libnpmhook') diff --git a/node_modules/libnpm/index.js b/node_modules/libnpm/index.js new file mode 100644 index 0000000000000..19c0419e8917f --- /dev/null +++ b/node_modules/libnpm/index.js @@ -0,0 +1,29 @@ +'use strict' + +module.exports = { + config: require('./config.js'), + parseArg: require('./parse-arg.js'), + readJSON: require('./read-json.js'), + logicalTree: require('./logical-tree.js'), + getPrefix: require('./get-prefix.js'), + verifyLock: require('./verify-lock.js'), + stringifyPackage: require('./stringify-package.js'), + manifest: require('./manifest.js'), + tarball: require('./tarball.js'), + extract: require('./extract.js'), + packument: require('./packument.js'), + hook: require('./hook.js'), + access: require('./access.js'), + search: require('./search.js'), + team: require('./team.js'), + org: require('./org.js'), + fetch: require('./fetch.js'), + login: require('./login.js'), + adduser: require('./adduser.js'), + profile: require('./profile.js'), + publish: require('./publish.js'), + unpublish: require('./unpublish.js'), + runScript: require('./run-script.js'), + log: require('./log.js'), + linkBin: require('./link-bin.js') +} diff --git a/node_modules/libnpm/link-bin.js b/node_modules/libnpm/link-bin.js new file mode 100644 index 0000000000000..4d7d35c7315f8 --- /dev/null +++ b/node_modules/libnpm/link-bin.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('bin-links') diff --git a/node_modules/libnpm/log.js b/node_modules/libnpm/log.js new file mode 100644 index 0000000000000..f935c6242dd74 --- /dev/null +++ b/node_modules/libnpm/log.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('npmlog') diff --git a/node_modules/libnpm/logical-tree.js b/node_modules/libnpm/logical-tree.js new file mode 100644 index 0000000000000..a08e7737a62ce --- /dev/null +++ b/node_modules/libnpm/logical-tree.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('npm-logical-tree') diff --git a/node_modules/libnpm/login.js b/node_modules/libnpm/login.js new file mode 100644 index 0000000000000..fbd61e9a2f8da --- /dev/null +++ b/node_modules/libnpm/login.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('npm-profile').login diff --git a/node_modules/libnpm/manifest.js b/node_modules/libnpm/manifest.js new file mode 100644 index 0000000000000..863b004e7f27d --- /dev/null +++ b/node_modules/libnpm/manifest.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('pacote/manifest') diff --git a/node_modules/libnpm/org.js b/node_modules/libnpm/org.js new file mode 100644 index 0000000000000..96b15aac0d815 --- /dev/null +++ b/node_modules/libnpm/org.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('libnpmorg') diff --git a/node_modules/libnpm/package.json b/node_modules/libnpm/package.json new file mode 100644 index 0000000000000..9fb5641b2d9a1 --- /dev/null +++ b/node_modules/libnpm/package.json @@ -0,0 +1,94 @@ +{ + "_from": "libnpm@3.0.1", + "_id": "libnpm@3.0.1", + "_inBundle": false, + "_integrity": "sha512-d7jU5ZcMiTfBqTUJVZ3xid44fE5ERBm9vBnmhp2ECD2Ls+FNXWxHSkO7gtvrnbLO78gwPdNPz1HpsF3W4rjkBQ==", + "_location": "/libnpm", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "libnpm@3.0.1", + "name": "libnpm", + "escapedName": "libnpm", + "rawSpec": "3.0.1", + "saveSpec": null, + "fetchSpec": "3.0.1" + }, + "_requiredBy": [ + "#USER", + "/" + ], + "_resolved": "https://registry.npmjs.org/libnpm/-/libnpm-3.0.1.tgz", + "_shasum": "0be11b4c9dd4d1ffd7d95c786e92e55d65be77a2", + "_spec": "libnpm@3.0.1", + "_where": "/Users/isaacs/dev/npm/cli", + "author": { + "name": "Kat Marchán", + "email": "kzm@zkat.tech" + }, + "bugs": { + "url": "https://github.com/npm/libnpm/issues" + }, + "bundleDependencies": false, + "dependencies": { + "bin-links": "^1.1.2", + "bluebird": "^3.5.3", + "find-npm-prefix": "^1.0.2", + "libnpmaccess": "^3.0.2", + "libnpmconfig": "^1.2.1", + "libnpmhook": "^5.0.3", + "libnpmorg": "^1.0.1", + "libnpmpublish": "^1.1.2", + "libnpmsearch": "^2.0.2", + "libnpmteam": "^1.0.2", + "lock-verify": "^2.0.2", + "npm-lifecycle": "^3.0.0", + "npm-logical-tree": "^1.2.1", + "npm-package-arg": "^6.1.0", + "npm-profile": "^4.0.2", + "npm-registry-fetch": "^4.0.0", + "npmlog": "^4.1.2", + "pacote": "^9.5.3", + "read-package-json": "^2.0.13", + "stringify-package": "^1.0.0" + }, + "deprecated": false, + "description": "Collection of programmatic APIs for the npm CLI", + "devDependencies": { + "nock": "^9.2.3", + "standard": "^11.0.1", + "standard-version": "^4.3.0", + "tap": "^12.0.1", + "weallbehave": "^1.2.0", + "weallcontribute": "^1.0.8" + }, + "files": [ + "*.js", + "lib" + ], + "homepage": "https://github.com/npm/libnpm#readme", + "keywords": [ + "npm", + "api", + "package manager", + "lib" + ], + "license": "ISC", + "main": "index.js", + "name": "libnpm", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/libnpm.git" + }, + "scripts": { + "postrelease": "npm publish && git push --follow-tags", + "prerelease": "npm t", + "pretest": "standard", + "release": "standard-version -s", + "test": "tap -J --coverage test/*.js", + "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", + "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" + }, + "version": "3.0.1" +} diff --git a/node_modules/libnpm/packument.js b/node_modules/libnpm/packument.js new file mode 100644 index 0000000000000..f852a3fe26b60 --- /dev/null +++ b/node_modules/libnpm/packument.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('pacote/packument') diff --git a/node_modules/libnpm/parse-arg.js b/node_modules/libnpm/parse-arg.js new file mode 100644 index 0000000000000..6db5201504f39 --- /dev/null +++ b/node_modules/libnpm/parse-arg.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('npm-package-arg') diff --git a/node_modules/libnpm/profile.js b/node_modules/libnpm/profile.js new file mode 100644 index 0000000000000..6df6b5e23abc9 --- /dev/null +++ b/node_modules/libnpm/profile.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('npm-profile') diff --git a/node_modules/libnpm/publish.js b/node_modules/libnpm/publish.js new file mode 100644 index 0000000000000..bcdbdeb2a8ebd --- /dev/null +++ b/node_modules/libnpm/publish.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('libnpmpublish').publish diff --git a/node_modules/libnpm/read-json.js b/node_modules/libnpm/read-json.js new file mode 100644 index 0000000000000..b59eb9d45a0be --- /dev/null +++ b/node_modules/libnpm/read-json.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('bluebird').promisify(require('read-package-json')) diff --git a/node_modules/libnpm/run-script.js b/node_modules/libnpm/run-script.js new file mode 100644 index 0000000000000..11765d40ae553 --- /dev/null +++ b/node_modules/libnpm/run-script.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('npm-lifecycle') diff --git a/node_modules/libnpm/search.js b/node_modules/libnpm/search.js new file mode 100644 index 0000000000000..172b10b7caf16 --- /dev/null +++ b/node_modules/libnpm/search.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('libnpmsearch') diff --git a/node_modules/libnpm/stringify-package.js b/node_modules/libnpm/stringify-package.js new file mode 100644 index 0000000000000..e7f3bfcbb719e --- /dev/null +++ b/node_modules/libnpm/stringify-package.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('stringify-package') diff --git a/node_modules/libnpm/tarball.js b/node_modules/libnpm/tarball.js new file mode 100644 index 0000000000000..cc1b2e54a49fb --- /dev/null +++ b/node_modules/libnpm/tarball.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('pacote/tarball') diff --git a/node_modules/libnpm/team.js b/node_modules/libnpm/team.js new file mode 100644 index 0000000000000..d407f796f4e44 --- /dev/null +++ b/node_modules/libnpm/team.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('libnpmteam') diff --git a/node_modules/libnpm/unpublish.js b/node_modules/libnpm/unpublish.js new file mode 100644 index 0000000000000..bc0d34c9a09cb --- /dev/null +++ b/node_modules/libnpm/unpublish.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('libnpmpublish').unpublish diff --git a/node_modules/libnpm/verify-lock.js b/node_modules/libnpm/verify-lock.js new file mode 100644 index 0000000000000..e396756419c7a --- /dev/null +++ b/node_modules/libnpm/verify-lock.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('lock-verify') diff --git a/node_modules/libnpmaccess/.travis.yml b/node_modules/libnpmaccess/.travis.yml new file mode 100644 index 0000000000000..db5ea8b018640 --- /dev/null +++ b/node_modules/libnpmaccess/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +sudo: false +node_js: + - "10" + - "9" + - "8" + - "6" diff --git a/node_modules/libnpmaccess/CHANGELOG.md b/node_modules/libnpmaccess/CHANGELOG.md new file mode 100644 index 0000000000000..cbec879a7db2e --- /dev/null +++ b/node_modules/libnpmaccess/CHANGELOG.md @@ -0,0 +1,129 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + + +## [3.0.2](https://github.com/npm/libnpmaccess/compare/v3.0.1...v3.0.2) (2019-07-16) + + + + +## [3.0.1](https://github.com/npm/libnpmaccess/compare/v3.0.0...v3.0.1) (2018-11-12) + + +### Bug Fixes + +* **ls-packages:** fix confusing splitEntity arg check ([1769090](https://github.com/npm/libnpmaccess/commit/1769090)) + + + + +# [3.0.0](https://github.com/npm/libnpmaccess/compare/v2.0.1...v3.0.0) (2018-08-24) + + +### Features + +* **api:** overhaul API ergonomics ([1faf00a](https://github.com/npm/libnpmaccess/commit/1faf00a)) + + +### BREAKING CHANGES + +* **api:** all API calls where scope and team were separate, or +where team was an extra, optional argument should now use a +fully-qualified team name instead, in the `scope:team` format. + + + + +## [2.0.1](https://github.com/npm/libnpmaccess/compare/v2.0.0...v2.0.1) (2018-08-24) + + + + +# [2.0.0](https://github.com/npm/libnpmaccess/compare/v1.2.2...v2.0.0) (2018-08-21) + + +### Bug Fixes + +* **json:** stop trying to parse response JSON ([20fdd84](https://github.com/npm/libnpmaccess/commit/20fdd84)) +* **lsPackages:** team URL was wrong D: ([b52201c](https://github.com/npm/libnpmaccess/commit/b52201c)) + + +### BREAKING CHANGES + +* **json:** use cases where registries were returning JSON +strings in the response body will no longer have an effect. All +API functions except for lsPackages and lsCollaborators will return +`true` on completion. + + + + +## [1.2.2](https://github.com/npm/libnpmaccess/compare/v1.2.1...v1.2.2) (2018-08-20) + + +### Bug Fixes + +* **docs:** tiny doc hiccup fix ([106396f](https://github.com/npm/libnpmaccess/commit/106396f)) + + + + +## [1.2.1](https://github.com/npm/libnpmaccess/compare/v1.2.0...v1.2.1) (2018-08-20) + + +### Bug Fixes + +* **docs:** document the stream interfaces ([c435aa2](https://github.com/npm/libnpmaccess/commit/c435aa2)) + + + + +# [1.2.0](https://github.com/npm/libnpmaccess/compare/v1.1.0...v1.2.0) (2018-08-20) + + +### Bug Fixes + +* **readme:** fix up appveyor badge url ([42b45a1](https://github.com/npm/libnpmaccess/commit/42b45a1)) + + +### Features + +* **streams:** add streaming result support for lsPkg and lsCollab ([0f06f46](https://github.com/npm/libnpmaccess/commit/0f06f46)) + + + + +# [1.1.0](https://github.com/npm/libnpmaccess/compare/v1.0.0...v1.1.0) (2018-08-17) + + +### Bug Fixes + +* **2fa:** escape package names correctly ([f2d83fe](https://github.com/npm/libnpmaccess/commit/f2d83fe)) +* **grant:** fix permissions validation ([07f7435](https://github.com/npm/libnpmaccess/commit/07f7435)) +* **ls-collaborators:** fix package name escaping + query ([3c02858](https://github.com/npm/libnpmaccess/commit/3c02858)) +* **ls-packages:** add query + fix fallback request order ([bdc4791](https://github.com/npm/libnpmaccess/commit/bdc4791)) +* **node6:** stop using Object.entries() ([4fec03c](https://github.com/npm/libnpmaccess/commit/4fec03c)) +* **public/restricted:** body should be string, not bool ([cffc727](https://github.com/npm/libnpmaccess/commit/cffc727)) +* **readme:** fix up title and badges ([2bd6113](https://github.com/npm/libnpmaccess/commit/2bd6113)) +* **specs:** require specs to be registry specs ([7892891](https://github.com/npm/libnpmaccess/commit/7892891)) + + +### Features + +* **test:** add 100% coverage test suite ([22b5dec](https://github.com/npm/libnpmaccess/commit/22b5dec)) + + + + +# 1.0.0 (2018-08-17) + + +### Bug Fixes + +* **test:** -100 is apparently bad now ([a5ab879](https://github.com/npm/libnpmaccess/commit/a5ab879)) + + +### Features + +* **impl:** initial implementation of api ([7039390](https://github.com/npm/libnpmaccess/commit/7039390)) diff --git a/node_modules/libnpmaccess/CODE_OF_CONDUCT.md b/node_modules/libnpmaccess/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000000..aeb72f598dcb4 --- /dev/null +++ b/node_modules/libnpmaccess/CODE_OF_CONDUCT.md @@ -0,0 +1,151 @@ +# Code of Conduct + +## When Something Happens + +If you see a Code of Conduct violation, follow these steps: + +1. Let the person know that what they did is not appropriate and ask them to stop and/or edit their message(s) or commits. +2. That person should immediately stop the behavior and correct the issue. +3. If this doesn’t happen, or if you're uncomfortable speaking up, [contact the maintainers](#contacting-maintainers). +4. As soon as available, a maintainer will look into the issue, and take [further action (see below)](#further-enforcement), starting with a warning, then temporary block, then long-term repo or organization ban. + +When reporting, please include any relevant details, links, screenshots, context, or other information that may be used to better understand and resolve the situation. + +**The maintainer team will prioritize the well-being and comfort of the recipients of the violation over the comfort of the violator.** See [some examples below](#enforcement-examples). + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers of this project pledge to making participation in our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, technical preferences, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + + * Using welcoming and inclusive language. + * Being respectful of differing viewpoints and experiences. + * Gracefully accepting constructive feedback. + * Focusing on what is best for the community. + * Showing empathy and kindness towards other community members. + * Encouraging and raising up your peers in the project so you can all bask in hacks and glory. + +Examples of unacceptable behavior by participants include: + + * The use of sexualized language or imagery and unwelcome sexual attention or advances, including when simulated online. The only exception to sexual topics is channels/spaces specifically for topics of sexual identity. + * Casual mention of slavery or indentured servitude and/or false comparisons of one's occupation or situation to slavery. Please consider using or asking about alternate terminology when referring to such metaphors in technology. + * Making light of/making mocking comments about trigger warnings and content warnings. + * Trolling, insulting/derogatory comments, and personal or political attacks. + * Public or private harassment, deliberate intimidation, or threats. + * Publishing others' private information, such as a physical or electronic address, without explicit permission. This includes any sort of "outing" of any aspect of someone's identity without their consent. + * Publishing private screenshots or quotes of interactions in the context of this project without all quoted users' *explicit* consent. + * Publishing of private communication that doesn't have to do with reporting harrassment. + * Any of the above even when [presented as "ironic" or "joking"](https://en.wikipedia.org/wiki/Hipster_racism). + * Any attempt to present "reverse-ism" versions of the above as violations. Examples of reverse-isms are "reverse racism", "reverse sexism", "heterophobia", and "cisphobia". + * Unsolicited explanations under the assumption that someone doesn't already know it. Ask before you teach! Don't assume what people's knowledge gaps are. + * [Feigning or exaggerating surprise](https://www.recurse.com/manual#no-feigned-surprise) when someone admits to not knowing something. + * "[Well-actuallies](https://www.recurse.com/manual#no-well-actuallys)" + * Other conduct which could reasonably be considered inappropriate in a professional or community setting. + +## Scope + +This Code of Conduct applies both within spaces involving this project and in other spaces involving community members. This includes the repository, its Pull Requests and Issue tracker, its Twitter community, private email communications in the context of the project, and any events where members of the project are participating, as well as adjacent communities and venues affecting the project's members. + +Depending on the violation, the maintainers may decide that violations of this code of conduct that have happened outside of the scope of the community may deem an individual unwelcome, and take appropriate action to maintain the comfort and safety of its members. + +### Other Community Standards + +As a project on GitHub, this project is additionally covered by the [GitHub Community Guidelines](https://help.github.com/articles/github-community-guidelines/). + +Additionally, as a project hosted on npm, is is covered by [npm, Inc's Code of Conduct](https://www.npmjs.com/policies/conduct). + +Enforcement of those guidelines after violations overlapping with the above are the responsibility of the entities, and enforcement may happen in any or all of the services/communities. + +## Maintainer Enforcement Process + +Once the maintainers get involved, they will follow a documented series of steps and do their best to preserve the well-being of project members. This section covers actual concrete steps. + +### Contacting Maintainers + +You may get in touch with the maintainer team through any of the following methods: + + * Through email: + * [kzm@zkat.tech](mailto:kzm@zkat.tech) (Kat Marchán) + + * Through Twitter: + * [@maybekatz](https://twitter.com/maybekatz) (Kat Marchán) + +### Further Enforcement + +If you've already followed the [initial enforcement steps](#enforcement), these are the steps maintainers will take for further enforcement, as needed: + + 1. Repeat the request to stop. + 2. If the person doubles down, they will have offending messages removed or edited by a maintainers given an official warning. The PR or Issue may be locked. + 3. If the behavior continues or is repeated later, the person will be blocked from participating for 24 hours. + 4. If the behavior continues or is repeated after the temporary block, a long-term (6-12mo) ban will be used. + +On top of this, maintainers may remove any offending messages, images, contributions, etc, as they deem necessary. + +Maintainers reserve full rights to skip any of these steps, at their discretion, if the violation is considered to be a serious and/or immediate threat to the health and well-being of members of the community. These include any threats, serious physical or verbal attacks, and other such behavior that would be completely unacceptable in any social setting that puts our members at risk. + +Members expelled from events or venues with any sort of paid attendance will not be refunded. + +### Who Watches the Watchers? + +Maintainers and other leaders who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. These may include anything from removal from the maintainer team to a permanent ban from the community. + +Additionally, as a project hosted on both GitHub and npm, [their own Codes of Conducts may be applied against maintainers of this project](#other-community-standards), externally of this project's procedures. + +### Enforcement Examples + +#### The Best Case + +The vast majority of situations work out like this. This interaction is common, and generally positive. + +> Alex: "Yeah I used X and it was really crazy!" + +> Patt (not a maintainer): "Hey, could you not use that word? What about 'ridiculous' instead?" + +> Alex: "oh sorry, sure." -> edits old comment to say "it was really confusing!" + +#### The Maintainer Case + +Sometimes, though, you need to get maintainers involved. Maintainers will do their best to resolve conflicts, but people who were harmed by something **will take priority**. + +> Patt: "Honestly, sometimes I just really hate using $library and anyone who uses it probably sucks at their job." + +> Alex: "Whoa there, could you dial it back a bit? There's a CoC thing about attacking folks' tech use like that." + +> Patt: "I'm not attacking anyone, what's your problem?" + +> Alex: "@maintainers hey uh. Can someone look at this issue? Patt is getting a bit aggro. I tried to nudge them about it, but nope." + +> KeeperOfCommitBits: (on issue) "Hey Patt, maintainer here. Could you tone it down? This sort of attack is really not okay in this space." + +> Patt: "Leave me alone I haven't said anything bad wtf is wrong with you." + +> KeeperOfCommitBits: (deletes user's comment), "@patt I mean it. Please refer to the CoC over at (URL to this CoC) if you have questions, but you can consider this an actual warning. I'd appreciate it if you reworded your messages in this thread, since they made folks there uncomfortable. Let's try and be kind, yeah?" + +> Patt: "@keeperofbits Okay sorry. I'm just frustrated and I'm kinda burnt out and I guess I got carried away. I'll DM Alex a note apologizing and edit my messages. Sorry for the trouble." + +> KeeperOfCommitBits: "@patt Thanks for that. I hear you on the stress. Burnout sucks :/. Have a good one!" + +#### The Nope Case + +> PepeTheFrog🐸: "Hi, I am a literal actual nazi and I think white supremacists are quite fashionable." + +> Patt: "NOOOOPE. OH NOPE NOPE." + +> Alex: "JFC NO. NOPE. @keeperofbits NOPE NOPE LOOK HERE" + +> KeeperOfCommitBits: "👀 Nope. NOPE NOPE NOPE. 🔥" + +> PepeTheFrog🐸 has been banned from all organization or user repositories belonging to KeeperOfCommitBits. + +## Attribution + +This Code of Conduct was generated using [WeAllJS Code of Conduct Generator](https://npm.im/weallbehave), which is based on the [WeAllJS Code of +Conduct](https://wealljs.org/code-of-conduct), which is itself based on +[Contributor Covenant](http://contributor-covenant.org), version 1.4, available +at +[http://contributor-covenant.org/version/1/4](http://contributor-covenant.org/version/1/4), +and the LGBTQ in Technology Slack [Code of +Conduct](http://lgbtq.technology/coc.html). diff --git a/node_modules/libnpmaccess/CONTRIBUTING.md b/node_modules/libnpmaccess/CONTRIBUTING.md new file mode 100644 index 0000000000000..e13356ea44497 --- /dev/null +++ b/node_modules/libnpmaccess/CONTRIBUTING.md @@ -0,0 +1,256 @@ +# Contributing + +## How do I... + +* [Use This Guide](#introduction)? +* Ask or Say Something? 🤔🐛😱 + * [Request Support](#request-support) + * [Report an Error or Bug](#report-an-error-or-bug) + * [Request a Feature](#request-a-feature) +* Make Something? 🤓👩🏽‍💻📜🍳 + * [Project Setup](#project-setup) + * [Contribute Documentation](#contribute-documentation) + * [Contribute Code](#contribute-code) +* Manage Something ✅🙆🏼💃👔 + * [Provide Support on Issues](#provide-support-on-issues) + * [Label Issues](#label-issues) + * [Clean Up Issues and PRs](#clean-up-issues-and-prs) + * [Review Pull Requests](#review-pull-requests) + * [Merge Pull Requests](#merge-pull-requests) + * [Tag a Release](#tag-a-release) + * [Join the Project Team](#join-the-project-team) +* Add a Guide Like This One [To My Project](#attribution)? 🤖😻👻 + +## Introduction + +Thank you so much for your interest in contributing!. All types of contributions are encouraged and valued. See the [table of contents](#toc) for different ways to help and details about how this project handles them!📝 + +Please make sure to read the relevant section before making your contribution! It will make it a lot easier for us maintainers to make the most of it and smooth out the experience for all involved. 💚 + +The [Project Team](#join-the-project-team) looks forward to your contributions. 🙌🏾✨ + +## Request Support + +If you have a question about this project, how to use it, or just need clarification about something: + +* Open an Issue at https://github.com/npm/libnpmaccess/issues +* Provide as much context as you can about what you're running into. +* Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant. If not, please be ready to provide that information if maintainers ask for it. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* Someone will try to have a response soon. +* If you or the maintainers don't respond to an issue for 30 days, the [issue will be closed](#clean-up-issues-and-prs). If you want to come back to it, reply (once, please), and we'll reopen the existing issue. Please avoid filing new issues as extensions of one you already made. + +## Report an Error or Bug + +If you run into an error or bug with the project: + +* Open an Issue at https://github.com/npm/libnpmaccess/issues +* Include *reproduction steps* that someone else can follow to recreate the bug or error on their own. +* Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant. If not, please be ready to provide that information if maintainers ask for it. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* A team member will try to reproduce the issue with your provided steps. If there are no repro steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs-repro`. Bugs with the `needs-repro` tag will not be addressed until they are reproduced. +* If the team is able to reproduce the issue, it will be marked `needs-fix`, as well as possibly other tags (such as `critical`), and the issue will be left to be [implemented by someone](#contribute-code). +* If you or the maintainers don't respond to an issue for 30 days, the [issue will be closed](#clean-up-issues-and-prs). If you want to come back to it, reply (once, please), and we'll reopen the existing issue. Please avoid filing new issues as extensions of one you already made. +* `critical` issues may be left open, depending on perceived immediacy and severity, even past the 30 day deadline. + +## Request a Feature + +If the project doesn't do something you need or want it to do: + +* Open an Issue at https://github.com/npm/libnpmaccess/issues +* Provide as much context as you can about what you're running into. +* Please try and be clear about why existing features and alternatives would not work for you. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* The project team will evaluate the feature request, possibly asking you more questions to understand its purpose and any relevant requirements. If the issue is closed, the team will convey their reasoning and suggest an alternative path forward. +* If the feature request is accepted, it will be marked for implementation with `feature-accepted`, which can then be done by either by a core team member or by anyone in the community who wants to [contribute code](#contribute-code). + +Note: The team is unlikely to be able to accept every single feature request that is filed. Please understand if they need to say no. + +## Project Setup + +So you wanna contribute some code! That's great! This project uses GitHub Pull Requests to manage contributions, so [read up on how to fork a GitHub project and file a PR](https://guides.github.com/activities/forking) if you've never done it before. + +If this seems like a lot or you aren't able to do all this setup, you might also be able to [edit the files directly](https://help.github.com/articles/editing-files-in-another-user-s-repository/) without having to do any of this setup. Yes, [even code](#contribute-code). + +If you want to go the usual route and run the project locally, though: + +* [Install Node.js](https://nodejs.org/en/download/) +* [Fork the project](https://guides.github.com/activities/forking/#fork) + +Then in your terminal: +* `cd path/to/your/clone` +* `npm install` +* `npm test` + +And you should be ready to go! + +## Contribute Documentation + +Documentation is a super important, critical part of this project. Docs are how we keep track of what we're doing, how, and why. It's how we stay on the same page about our policies. And it's how we tell others everything they need in order to be able to use this project -- or contribute to it. So thank you in advance. + +Documentation contributions of any size are welcome! Feel free to file a PR even if you're just rewording a sentence to be more clear, or fixing a spelling mistake! + +To contribute documentation: + +* [Set up the project](#project-setup). +* Edit or add any relevant documentation. +* Make sure your changes are formatted correctly and consistently with the rest of the documentation. +* Re-read what you wrote, and run a spellchecker on it to make sure you didn't miss anything. +* In your commit message(s), begin the first line with `docs: `. For example: `docs: Adding a doc contrib section to CONTRIBUTING.md`. +* Write clear, concise commit message(s) using [conventional-changelog format](https://github.com/conventional-changelog/conventional-changelog-angular/blob/master/convention.md). Documentation commits should use `docs(): `. +* Go to https://github.com/npm/libnpmaccess/pulls and open a new pull request with your changes. +* If your PR is connected to an open issue, add a line in your PR's description that says `Fixes: #123`, where `#123` is the number of the issue you're fixing. + +Once you've filed the PR: + +* One or more maintainers will use GitHub's review feature to review your PR. +* If the maintainer asks for any changes, edit your changes, push, and ask for another review. +* If the maintainer decides to pass on your PR, they will thank you for the contribution and explain why they won't be accepting the changes. That's ok! We still really appreciate you taking the time to do it, and we don't take that lightly. 💚 +* If your PR gets accepted, it will be marked as such, and merged into the `latest` branch soon after. Your contribution will be distributed to the masses next time the maintainers [tag a release](#tag-a-release) + +## Contribute Code + +We like code commits a lot! They're super handy, and they keep the project going and doing the work it needs to do to be useful to others. + +Code contributions of just about any size are acceptable! + +The main difference between code contributions and documentation contributions is that contributing code requires inclusion of relevant tests for the code being added or changed. Contributions without accompanying tests will be held off until a test is added, unless the maintainers consider the specific tests to be either impossible, or way too much of a burden for such a contribution. + +To contribute code: + +* [Set up the project](#project-setup). +* Make any necessary changes to the source code. +* Include any [additional documentation](#contribute-documentation) the changes might need. +* Write tests that verify that your contribution works as expected. +* Write clear, concise commit message(s) using [conventional-changelog format](https://github.com/conventional-changelog/conventional-changelog-angular/blob/master/convention.md). +* Dependency updates, additions, or removals must be in individual commits, and the message must use the format: `(deps): PKG@VERSION`, where `` is any of the usual `conventional-changelog` prefixes, at your discretion. +* Go to https://github.com/npm/libnpmaccess/pulls and open a new pull request with your changes. +* If your PR is connected to an open issue, add a line in your PR's description that says `Fixes: #123`, where `#123` is the number of the issue you're fixing. + +Once you've filed the PR: + +* Barring special circumstances, maintainers will not review PRs until all checks pass (Travis, AppVeyor, etc). +* One or more maintainers will use GitHub's review feature to review your PR. +* If the maintainer asks for any changes, edit your changes, push, and ask for another review. Additional tags (such as `needs-tests`) will be added depending on the review. +* If the maintainer decides to pass on your PR, they will thank you for the contribution and explain why they won't be accepting the changes. That's ok! We still really appreciate you taking the time to do it, and we don't take that lightly. 💚 +* If your PR gets accepted, it will be marked as such, and merged into the `latest` branch soon after. Your contribution will be distributed to the masses next time the maintainers [tag a release](#tag-a-release) + +## Provide Support on Issues + +[Needs Collaborator](#join-the-project-team): none + +Helping out other users with their questions is a really awesome way of contributing to any community. It's not uncommon for most of the issues on an open source projects being support-related questions by users trying to understand something they ran into, or find their way around a known bug. + +Sometimes, the `support` label will be added to things that turn out to actually be other things, like bugs or feature requests. In that case, suss out the details with the person who filed the original issue, add a comment explaining what the bug is, and change the label from `support` to `bug` or `feature`. If you can't do this yourself, @mention a maintainer so they can do it. + +In order to help other folks out with their questions: + +* Go to the issue tracker and [filter open issues by the `support` label](https://github.com/npm/libnpmaccess/issues?q=is%3Aopen+is%3Aissue+label%3Asupport). +* Read through the list until you find something that you're familiar enough with to give an answer to. +* Respond to the issue with whatever details are needed to clarify the question, or get more details about what's going on. +* Once the discussion wraps up and things are clarified, either close the issue, or ask the original issue filer (or a maintainer) to close it for you. + +Some notes on picking up support issues: + +* Avoid responding to issues you don't know you can answer accurately. +* As much as possible, try to refer to past issues with accepted answers. Link to them from your replies with the `#123` format. +* Be kind and patient with users -- often, folks who have run into confusing things might be upset or impatient. This is ok. Try to understand where they're coming from, and if you're too uncomfortable with the tone, feel free to stay away or withdraw from the issue. (note: if the user is outright hostile or is violating the CoC, [refer to the Code of Conduct](CODE_OF_CONDUCT.md) to resolve the conflict). + +## Label Issues + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +One of the most important tasks in handling issues is labeling them usefully and accurately. All other tasks involving issues ultimately rely on the issue being classified in such a way that relevant parties looking to do their own tasks can find them quickly and easily. + +In order to label issues, [open up the list of unlabeled issues](https://github.com/npm/libnpmaccess/issues?q=is%3Aopen+is%3Aissue+no%3Alabel) and, **from newest to oldest**, read through each one and apply issue labels according to the table below. If you're unsure about what label to apply, skip the issue and try the next one: don't feel obligated to label each and every issue yourself! + +Label | Apply When | Notes +--- | --- | --- +`bug` | Cases where the code (or documentation) is behaving in a way it wasn't intended to. | If something is happening that surprises the *user* but does not go against the way the code is designed, it should use the `enhancement` label. +`critical` | Added to `bug` issues if the problem described makes the code completely unusable in a common situation. | +`documentation` | Added to issues or pull requests that affect any of the documentation for the project. | Can be combined with other labels, such as `bug` or `enhancement`. +`duplicate` | Added to issues or PRs that refer to the exact same issue as another one that's been previously labeled. | Duplicate issues should be marked and closed right away, with a message referencing the issue it's a duplicate of (with `#123`) +`enhancement` | Added to [feature requests](#request-a-feature), PRs, or documentation issues that are purely additive: the code or docs currently work as expected, but a change is being requested or suggested. | +`help wanted` | Applied by [Committers](#join-the-project-team) to issues and PRs that they would like to get outside help for. Generally, this means it's lower priority for the maintainer team to itself implement, but that the community is encouraged to pick up if they so desire | Never applied on first-pass labeling. +`in-progress` | Applied by [Committers](#join-the-project-team) to PRs that are pending some work before they're ready for review. | The original PR submitter should @mention the team member that applied the label once the PR is complete. +`performance` | This issue or PR is directly related to improving performance. | +`refactor` | Added to issues or PRs that deal with cleaning up or modifying the project for the betterment of it. | +`starter` | Applied by [Committers](#join-the-project-team) to issues that they consider good introductions to the project for people who have not contributed before. These are not necessarily "easy", but rather focused around how much context is necessary in order to understand what needs to be done for this project in particular. | Existing project members are expected to stay away from these unless they increase in priority. +`support` | This issue is either asking a question about how to use the project, clarifying the reason for unexpected behavior, or possibly reporting a `bug` but does not have enough detail yet to determine whether it would count as such. | The label should be switched to `bug` if reliable reproduction steps are provided. Issues primarily with unintended configurations of a user's environment are not considered bugs, even if they cause crashes. +`tests` | This issue or PR either requests or adds primarily tests to the project. | If a PR is pending tests, that will be handled through the [PR review process](#review-pull-requests) +`wontfix` | Labelers may apply this label to issues that clearly have nothing at all to do with the project or are otherwise entirely outside of its scope/sphere of influence. [Committers](#join-the-project-team) may apply this label and close an issue or PR if they decide to pass on an otherwise relevant issue. | The issue or PR should be closed as soon as the label is applied, and a clear explanation provided of why the label was used. Contributors are free to contest the labeling, but the decision ultimately falls on committers as to whether to accept something or not. + +## Clean Up Issues and PRs + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +Issues and PRs can go stale after a while. Maybe they're abandoned. Maybe the team will just plain not have time to address them any time soon. + +In these cases, they should be closed until they're brought up again or the interaction starts over. + +To clean up issues and PRs: + +* Search the issue tracker for issues or PRs, and add the term `updated:<=YYYY-MM-DD`, where the date is 30 days before today. +* Go through each issue *from oldest to newest*, and close them if **all of the following are true**: + * not opened by a maintainer + * not marked as `critical` + * not marked as `starter` or `help wanted` (these might stick around for a while, in general, as they're intended to be available) + * no explicit messages in the comments asking for it to be left open + * does not belong to a milestone +* Leave a message when closing saying "Cleaning up stale issue. Please reopen or ping us if and when you're ready to resume this. See https://github.com/npm/libnpmaccess/blob/latest/CONTRIBUTING.md#clean-up-issues-and-prs for more details." + +## Review Pull Requests + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +While anyone can comment on a PR, add feedback, etc, PRs are only *approved* by team members with Issue Tracker or higher permissions. + +PR reviews use [GitHub's own review feature](https://help.github.com/articles/about-pull-request-reviews/), which manages comments, approval, and review iteration. + +Some notes: + +* You may ask for minor changes ("nitpicks"), but consider whether they are really blockers to merging: try to err on the side of "approve, with comments". +* *ALL PULL REQUESTS* should be covered by a test: either by a previously-failing test, an existing test that covers the entire functionality of the submitted code, or new tests to verify any new/changed behavior. All tests must also pass and follow established conventions. Test coverage should not drop, unless the specific case is considered reasonable by maintainers. +* Please make sure you're familiar with the code or documentation being updated, unless it's a minor change (spellchecking, minor formatting, etc). You may @mention another project member who you think is better suited for the review, but still provide a non-approving review of your own. +* Be extra kind: people who submit code/doc contributions are putting themselves in a pretty vulnerable position, and have put time and care into what they've done (even if that's not obvious to you!) -- always respond with respect, be understanding, but don't feel like you need to sacrifice your standards for their sake, either. Just don't be a jerk about it? + +## Merge Pull Requests + +[Needs Collaborator](#join-the-project-team): Committer + +TBD - need to hash out a bit more of this process. + +## Tag A Release + +[Needs Collaborator](#join-the-project-team): Committer + +TBD - need to hash out a bit more of this process. The most important bit here is probably that all tests must pass, and tags must use [semver](https://semver.org). + +## Join the Project Team + +### Ways to Join + +There are many ways to contribute! Most of them don't require any official status unless otherwise noted. That said, there's a couple of positions that grant special repository abilities, and this section describes how they're granted and what they do. + +All of the below positions are granted based on the project team's needs, as well as their consensus opinion about whether they would like to work with the person and think that they would fit well into that position. The process is relatively informal, and it's likely that people who express interest in participating can just be granted the permissions they'd like. + +You can spot a collaborator on the repo by looking for the `[Collaborator]` or `[Owner]` tags next to their names. + +Permission | Description +--- | --- +Issue Tracker | Granted to contributors who express a strong interest in spending time on the project's issue tracker. These tasks are mainly [labeling issues](#label-issues), [cleaning up old ones](#clean-up-issues-and-prs), and [reviewing pull requests](#review-pull-requests), as well as all the usual things non-team-member contributors can do. Issue handlers should not merge pull requests, tag releases, or directly commit code themselves: that should still be done through the usual pull request process. Becoming an Issue Handler means the project team trusts you to understand enough of the team's process and context to implement it on the issue tracker. +Committer | Granted to contributors who want to handle the actual pull request merges, tagging new versions, etc. Committers should have a good level of familiarity with the codebase, and enough context to understand the implications of various changes, as well as a good sense of the will and expectations of the project team. +Admin/Owner | Granted to people ultimately responsible for the project, its community, etc. + +## Attribution + +This guide was generated using the WeAllJS `CONTRIBUTING.md` generator. [Make your own](https://npm.im/weallcontribute)! diff --git a/node_modules/libnpmaccess/LICENSE b/node_modules/libnpmaccess/LICENSE new file mode 100644 index 0000000000000..209e4477f39c1 --- /dev/null +++ b/node_modules/libnpmaccess/LICENSE @@ -0,0 +1,13 @@ +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/libnpmaccess/PULL_REQUEST_TEMPLATE b/node_modules/libnpmaccess/PULL_REQUEST_TEMPLATE new file mode 100644 index 0000000000000..9471c6d325f7e --- /dev/null +++ b/node_modules/libnpmaccess/PULL_REQUEST_TEMPLATE @@ -0,0 +1,7 @@ + diff --git a/node_modules/libnpmaccess/README.md b/node_modules/libnpmaccess/README.md new file mode 100644 index 0000000000000..2b639823a0641 --- /dev/null +++ b/node_modules/libnpmaccess/README.md @@ -0,0 +1,258 @@ +# libnpmaccess [![npm version](https://img.shields.io/npm/v/libnpmaccess.svg)](https://npm.im/libnpmaccess) [![license](https://img.shields.io/npm/l/libnpmaccess.svg)](https://npm.im/libnpmaccess) [![Travis](https://img.shields.io/travis/npm/libnpmaccess/latest.svg)](https://travis-ci.org/npm/libnpmaccess) [![AppVeyor](https://img.shields.io/appveyor/ci/zkat/libnpmaccess/latest.svg)](https://ci.appveyor.com/project/zkat/libnpmaccess) [![Coverage Status](https://coveralls.io/repos/github/npm/libnpmaccess/badge.svg?branch=latest)](https://coveralls.io/github/npm/libnpmaccess?branch=latest) + +[`libnpmaccess`](https://github.com/npm/libnpmaccess) is a Node.js +library that provides programmatic access to the guts of the npm CLI's `npm +access` command and its various subcommands. This includes managing account 2FA, +listing packages and permissions, looking at package collaborators, and defining +package permissions for users, orgs, and teams. + +## Example + +```javascript +const access = require('libnpmaccess') + +// List all packages @zkat has access to on the npm registry. +console.log(Object.keys(await access.lsPackages('zkat'))) +``` + +## Table of Contents + +* [Installing](#install) +* [Example](#example) +* [Contributing](#contributing) +* [API](#api) + * [access opts](#opts) + * [`public()`](#public) + * [`restricted()`](#restricted) + * [`grant()`](#grant) + * [`revoke()`](#revoke) + * [`tfaRequired()`](#tfa-required) + * [`tfaNotRequired()`](#tfa-not-required) + * [`lsPackages()`](#ls-packages) + * [`lsPackages.stream()`](#ls-packages-stream) + * [`lsCollaborators()`](#ls-collaborators) + * [`lsCollaborators.stream()`](#ls-collaborators-stream) + +### Install + +`$ npm install libnpmaccess` + +### Contributing + +The npm team enthusiastically welcomes contributions and project participation! +There's a bunch of things you can do if you want to contribute! The [Contributor +Guide](CONTRIBUTING.md) has all the information you need for everything from +reporting bugs to contributing entire new features. Please don't hesitate to +jump in if you'd like to, or even ask us questions if something isn't clear. + +All participants and maintainers in this project are expected to follow [Code of +Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other. + +Please refer to the [Changelog](CHANGELOG.md) for project history details, too. + +Happy hacking! + +### API + +#### `opts` for `libnpmaccess` commands + +`libnpmaccess` uses [`npm-registry-fetch`](https://npm.im/npm-registry-fetch). +All options are passed through directly to that library, so please refer to [its +own `opts` +documentation](https://www.npmjs.com/package/npm-registry-fetch#fetch-options) +for options that can be passed in. + +A couple of options of note for those in a hurry: + +* `opts.token` - can be passed in and will be used as the authentication token for the registry. For other ways to pass in auth details, see the n-r-f docs. +* `opts.otp` - certain operations will require an OTP token to be passed in. If a `libnpmaccess` command fails with `err.code === EOTP`, please retry the request with `{otp: <2fa token>}` +* `opts.Promise` - If you pass this in, the Promises returned by `libnpmaccess` commands will use this Promise class instead. For example: `{Promise: require('bluebird')}` + +#### `> access.public(spec, [opts]) -> Promise` + +`spec` must be an [`npm-package-arg`](https://npm.im/npm-package-arg)-compatible +registry spec. + +Makes package described by `spec` public. + +##### Example + +```javascript +await access.public('@foo/bar', {token: 'myregistrytoken'}) +// `@foo/bar` is now public +``` + +#### `> access.restricted(spec, [opts]) -> Promise` + +`spec` must be an [`npm-package-arg`](https://npm.im/npm-package-arg)-compatible +registry spec. + +Makes package described by `spec` private/restricted. + +##### Example + +```javascript +await access.restricted('@foo/bar', {token: 'myregistrytoken'}) +// `@foo/bar` is now private +``` + +#### `> access.grant(spec, team, permissions, [opts]) -> Promise` + +`spec` must be an [`npm-package-arg`](https://npm.im/npm-package-arg)-compatible +registry spec. `team` must be a fully-qualified team name, in the `scope:team` +format, with or without the `@` prefix, and the team must be a valid team within +that scope. `permissions` must be one of `'read-only'` or `'read-write'`. + +Grants `read-only` or `read-write` permissions for a certain package to a team. + +##### Example + +```javascript +await access.grant('@foo/bar', '@foo:myteam', 'read-write', { + token: 'myregistrytoken' +}) +// `@foo/bar` is now read/write enabled for the @foo:myteam team. +``` + +#### `> access.revoke(spec, team, [opts]) -> Promise` + +`spec` must be an [`npm-package-arg`](https://npm.im/npm-package-arg)-compatible +registry spec. `team` must be a fully-qualified team name, in the `scope:team` +format, with or without the `@` prefix, and the team must be a valid team within +that scope. `permissions` must be one of `'read-only'` or `'read-write'`. + +Removes access to a package from a certain team. + +##### Example + +```javascript +await access.revoke('@foo/bar', '@foo:myteam', { + token: 'myregistrytoken' +}) +// @foo:myteam can no longer access `@foo/bar` +``` + +#### `> access.tfaRequired(spec, [opts]) -> Promise` + +`spec` must be an [`npm-package-arg`](https://npm.im/npm-package-arg)-compatible +registry spec. + +Makes it so publishing or managing a package requires using 2FA tokens to +complete operations. + +##### Example + +```javascript +await access.tfaRequires('lodash', {token: 'myregistrytoken'}) +// Publishing or changing dist-tags on `lodash` now require OTP to be enabled. +``` + +#### `> access.tfaNotRequired(spec, [opts]) -> Promise` + +`spec` must be an [`npm-package-arg`](https://npm.im/npm-package-arg)-compatible +registry spec. + +Disabled the package-level 2FA requirement for `spec`. Note that you will need +to pass in an `otp` token in `opts` in order to complete this operation. + +##### Example + +```javascript +await access.tfaNotRequired('lodash', {otp: '123654', token: 'myregistrytoken'}) +// Publishing or editing dist-tags on `lodash` no longer requires OTP to be +// enabled. +``` + +#### `> access.lsPackages(entity, [opts]) -> Promise` + +`entity` must be either a valid org or user name, or a fully-qualified team name +in the `scope:team` format, with or without the `@` prefix. + +Lists out packages a user, org, or team has access to, with corresponding +permissions. Packages that the access token does not have access to won't be +listed. + +In order to disambiguate between users and orgs, two requests may end up being +made when listing orgs or users. + +For a streamed version of these results, see +[`access.lsPackages.stream()`](#ls-package-stream). + +##### Example + +```javascript +await access.lsPackages('zkat', { + token: 'myregistrytoken' +}) +// Lists all packages `@zkat` has access to on the registry, and the +// corresponding permissions. +``` + +#### `> access.lsPackages.stream(scope, [team], [opts]) -> Stream` + +`entity` must be either a valid org or user name, or a fully-qualified team name +in the `scope:team` format, with or without the `@` prefix. + +Streams out packages a user, org, or team has access to, with corresponding +permissions, with each stream entry being formatted like `[packageName, +permissions]`. Packages that the access token does not have access to won't be +listed. + +In order to disambiguate between users and orgs, two requests may end up being +made when listing orgs or users. + +The returned stream is a valid `asyncIterator`. + +##### Example + +```javascript +for await (let [pkg, perm] of access.lsPackages.stream('zkat')) { + console.log('zkat has', perm, 'access to', pkg) +} +// zkat has read-write access to eggplant +// zkat has read-only access to @npmcorp/secret +``` + +#### `> access.lsCollaborators(spec, [user], [opts]) -> Promise` + +`spec` must be an [`npm-package-arg`](https://npm.im/npm-package-arg)-compatible +registry spec. `user` must be a valid user name, with or without the `@` +prefix. + +Lists out access privileges for a certain package. Will only show permissions +for packages to which you have at least read access. If `user` is passed in, the +list is filtered only to teams _that_ user happens to belong to. + +For a streamed version of these results, see [`access.lsCollaborators.stream()`](#ls-collaborators-stream). + +##### Example + +```javascript +await access.lsCollaborators('@npm/foo', 'zkat', { + token: 'myregistrytoken' +}) +// Lists all teams with access to @npm/foo that @zkat belongs to. +``` + +#### `> access.lsCollaborators.stream(spec, [user], [opts]) -> Stream` + +`spec` must be an [`npm-package-arg`](https://npm.im/npm-package-arg)-compatible +registry spec. `user` must be a valid user name, with or without the `@` +prefix. + +Stream out access privileges for a certain package, with each entry in `[user, +permissions]` format. Will only show permissions for packages to which you have +at least read access. If `user` is passed in, the list is filtered only to teams +_that_ user happens to belong to. + +The returned stream is a valid `asyncIterator`. + +##### Example + +```javascript +for await (let [usr, perm] of access.lsCollaborators.stream('npm')) { + console.log(usr, 'has', perm, 'access to npm') +} +// zkat has read-write access to npm +// iarna has read-write access to npm +``` diff --git a/node_modules/libnpmaccess/appveyor.yml b/node_modules/libnpmaccess/appveyor.yml new file mode 100644 index 0000000000000..9cc64c58e02f9 --- /dev/null +++ b/node_modules/libnpmaccess/appveyor.yml @@ -0,0 +1,22 @@ +environment: + matrix: + - nodejs_version: "10" + - nodejs_version: "9" + - nodejs_version: "8" + - nodejs_version: "6" + +platform: + - x64 + +install: + - ps: Install-Product node $env:nodejs_version $env:platform + - npm config set spin false + - npm install + +test_script: + - npm test + +matrix: + fast_finish: true + +build: off diff --git a/node_modules/libnpmaccess/index.js b/node_modules/libnpmaccess/index.js new file mode 100644 index 0000000000000..e241fcbfc68a2 --- /dev/null +++ b/node_modules/libnpmaccess/index.js @@ -0,0 +1,201 @@ +'use strict' + +const figgyPudding = require('figgy-pudding') +const getStream = require('get-stream') +const npa = require('npm-package-arg') +const npmFetch = require('npm-registry-fetch') +const {PassThrough} = require('stream') +const validate = require('aproba') + +const AccessConfig = figgyPudding({ + Promise: {default: () => Promise} +}) + +const eu = encodeURIComponent +const npar = spec => { + spec = npa(spec) + if (!spec.registry) { + throw new Error('`spec` must be a registry spec') + } + return spec +} + +const cmd = module.exports = {} + +cmd.public = (spec, opts) => setAccess(spec, 'public', opts) +cmd.restricted = (spec, opts) => setAccess(spec, 'restricted', opts) +function setAccess (spec, access, opts) { + opts = AccessConfig(opts) + return pwrap(opts, () => { + spec = npar(spec) + validate('OSO', [spec, access, opts]) + const uri = `/-/package/${eu(spec.name)}/access` + return npmFetch(uri, opts.concat({ + method: 'POST', + body: {access}, + spec + })) + }).then(res => res.body.resume() && true) +} + +cmd.grant = (spec, entity, permissions, opts) => { + opts = AccessConfig(opts) + return pwrap(opts, () => { + spec = npar(spec) + const {scope, team} = splitEntity(entity) + validate('OSSSO', [spec, scope, team, permissions, opts]) + if (permissions !== 'read-write' && permissions !== 'read-only') { + throw new Error('`permissions` must be `read-write` or `read-only`. Got `' + permissions + '` instead') + } + const uri = `/-/team/${eu(scope)}/${eu(team)}/package` + return npmFetch(uri, opts.concat({ + method: 'PUT', + body: {package: spec.name, permissions}, + scope, + spec, + ignoreBody: true + })) + }).then(() => true) +} + +cmd.revoke = (spec, entity, opts) => { + opts = AccessConfig(opts) + return pwrap(opts, () => { + spec = npar(spec) + const {scope, team} = splitEntity(entity) + validate('OSSO', [spec, scope, team, opts]) + const uri = `/-/team/${eu(scope)}/${eu(team)}/package` + return npmFetch(uri, opts.concat({ + method: 'DELETE', + body: {package: spec.name}, + scope, + spec, + ignoreBody: true + })) + }).then(() => true) +} + +cmd.lsPackages = (entity, opts) => { + opts = AccessConfig(opts) + return pwrap(opts, () => { + return getStream.array( + cmd.lsPackages.stream(entity, opts) + ).then(data => data.reduce((acc, [key, val]) => { + if (!acc) { + acc = {} + } + acc[key] = val + return acc + }, null)) + }) +} + +cmd.lsPackages.stream = (entity, opts) => { + validate('SO|SZ', [entity, opts]) + opts = AccessConfig(opts) + const {scope, team} = splitEntity(entity) + let uri + if (team) { + uri = `/-/team/${eu(scope)}/${eu(team)}/package` + } else { + uri = `/-/org/${eu(scope)}/package` + } + opts = opts.concat({ + query: {format: 'cli'}, + mapJson (value, [key]) { + if (value === 'read') { + return [key, 'read-only'] + } else if (value === 'write') { + return [key, 'read-write'] + } else { + return [key, value] + } + } + }) + const ret = new PassThrough({objectMode: true}) + npmFetch.json.stream(uri, '*', opts).on('error', err => { + if (err.code === 'E404' && !team) { + uri = `/-/user/${eu(scope)}/package` + npmFetch.json.stream(uri, '*', opts).on( + 'error', err => ret.emit('error', err) + ).pipe(ret) + } else { + ret.emit('error', err) + } + }).pipe(ret) + return ret +} + +cmd.lsCollaborators = (spec, user, opts) => { + if (typeof user === 'object' && !opts) { + opts = user + user = undefined + } + opts = AccessConfig(opts) + return pwrap(opts, () => { + return getStream.array( + cmd.lsCollaborators.stream(spec, user, opts) + ).then(data => data.reduce((acc, [key, val]) => { + if (!acc) { + acc = {} + } + acc[key] = val + return acc + }, null)) + }) +} + +cmd.lsCollaborators.stream = (spec, user, opts) => { + if (typeof user === 'object' && !opts) { + opts = user + user = undefined + } + opts = AccessConfig(opts) + spec = npar(spec) + validate('OSO|OZO', [spec, user, opts]) + const uri = `/-/package/${eu(spec.name)}/collaborators` + return npmFetch.json.stream(uri, '*', opts.concat({ + query: {format: 'cli', user: user || undefined}, + mapJson (value, [key]) { + if (value === 'read') { + return [key, 'read-only'] + } else if (value === 'write') { + return [key, 'read-write'] + } else { + return [key, value] + } + } + })) +} + +cmd.tfaRequired = (spec, opts) => setRequires2fa(spec, true, opts) +cmd.tfaNotRequired = (spec, opts) => setRequires2fa(spec, false, opts) +function setRequires2fa (spec, required, opts) { + opts = AccessConfig(opts) + return new opts.Promise((resolve, reject) => { + spec = npar(spec) + validate('OBO', [spec, required, opts]) + const uri = `/-/package/${eu(spec.name)}/access` + return npmFetch(uri, opts.concat({ + method: 'POST', + body: {publish_requires_tfa: required}, + spec, + ignoreBody: true + })).then(resolve, reject) + }).then(() => true) +} + +cmd.edit = () => { + throw new Error('Not implemented yet') +} + +function splitEntity (entity = '') { + let [, scope, team] = entity.match(/^@?([^:]+)(?::(.*))?$/) || [] + return {scope, team} +} + +function pwrap (opts, fn) { + return new opts.Promise((resolve, reject) => { + fn().then(resolve, reject) + }) +} diff --git a/node_modules/libnpmaccess/package.json b/node_modules/libnpmaccess/package.json new file mode 100644 index 0000000000000..943b0aeb46a24 --- /dev/null +++ b/node_modules/libnpmaccess/package.json @@ -0,0 +1,67 @@ +{ + "_from": "libnpmaccess@3.0.2", + "_id": "libnpmaccess@3.0.2", + "_inBundle": false, + "_integrity": "sha512-01512AK7MqByrI2mfC7h5j8N9V4I7MHJuk9buo8Gv+5QgThpOgpjB7sQBDDkeZqRteFb1QM/6YNdHfG7cDvfAQ==", + "_location": "/libnpmaccess", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "libnpmaccess@3.0.2", + "name": "libnpmaccess", + "escapedName": "libnpmaccess", + "rawSpec": "3.0.2", + "saveSpec": null, + "fetchSpec": "3.0.2" + }, + "_requiredBy": [ + "#USER", + "/" + ], + "_resolved": "https://registry.npmjs.org/libnpmaccess/-/libnpmaccess-3.0.2.tgz", + "_shasum": "8b2d72345ba3bef90d3b4f694edd5c0417f58923", + "_spec": "libnpmaccess@3.0.2", + "_where": "/Users/isaacs/dev/npm/cli", + "author": { + "name": "Kat Marchán", + "email": "kzm@zkat.tech" + }, + "bugs": { + "url": "https://github.com/npm/libnpmaccess/issues" + }, + "bundleDependencies": false, + "dependencies": { + "aproba": "^2.0.0", + "get-stream": "^4.0.0", + "npm-package-arg": "^6.1.0", + "npm-registry-fetch": "^4.0.0" + }, + "deprecated": false, + "description": "programmatic library for `npm access` commands", + "devDependencies": { + "nock": "^9.6.1", + "standard": "*", + "standard-version": "*", + "tap": "*", + "weallbehave": "*", + "weallcontribute": "*" + }, + "homepage": "https://npmjs.com/package/libnpmaccess", + "license": "ISC", + "name": "libnpmaccess", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/libnpmaccess.git" + }, + "scripts": { + "postrelease": "npm publish && git push --follow-tags", + "prerelease": "npm t", + "pretest": "standard", + "release": "standard-version -s", + "test": "tap -J --100 test/*.js", + "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", + "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" + }, + "version": "3.0.2" +} diff --git a/node_modules/libnpmaccess/test/index.js b/node_modules/libnpmaccess/test/index.js new file mode 100644 index 0000000000000..b48815e79a1fb --- /dev/null +++ b/node_modules/libnpmaccess/test/index.js @@ -0,0 +1,347 @@ +'use strict' + +const figgyPudding = require('figgy-pudding') +const getStream = require('get-stream') +const {test} = require('tap') +const tnock = require('./util/tnock.js') + +const access = require('../index.js') + +const REG = 'http://localhost:1337' +const OPTS = figgyPudding({})({ + registry: REG +}) + +test('access public', t => { + tnock(t, REG).post( + '/-/package/%40foo%2Fbar/access', {access: 'public'} + ).reply(200) + return access.public('@foo/bar', OPTS).then(ret => { + t.deepEqual(ret, true, 'request succeeded') + }) +}) + +test('access restricted', t => { + tnock(t, REG).post( + '/-/package/%40foo%2Fbar/access', {access: 'restricted'} + ).reply(200) + return access.restricted('@foo/bar', OPTS).then(ret => { + t.deepEqual(ret, true, 'request succeeded') + }) +}) + +test('access 2fa-required', t => { + tnock(t, REG).post('/-/package/%40foo%2Fbar/access', { + publish_requires_tfa: true + }).reply(200, {ok: true}) + return access.tfaRequired('@foo/bar', OPTS).then(ret => { + t.deepEqual(ret, true, 'request succeeded') + }) +}) + +test('access 2fa-not-required', t => { + tnock(t, REG).post('/-/package/%40foo%2Fbar/access', { + publish_requires_tfa: false + }).reply(200, {ok: true}) + return access.tfaNotRequired('@foo/bar', OPTS).then(ret => { + t.deepEqual(ret, true, 'request succeeded') + }) +}) + +test('access grant basic read-write', t => { + tnock(t, REG).put('/-/team/myorg/myteam/package', { + package: '@foo/bar', + permissions: 'read-write' + }).reply(201) + return access.grant( + '@foo/bar', 'myorg:myteam', 'read-write', OPTS + ).then(ret => { + t.deepEqual(ret, true, 'request succeeded') + }) +}) + +test('access grant basic read-only', t => { + tnock(t, REG).put('/-/team/myorg/myteam/package', { + package: '@foo/bar', + permissions: 'read-only' + }).reply(201) + return access.grant( + '@foo/bar', 'myorg:myteam', 'read-only', OPTS + ).then(ret => { + t.deepEqual(ret, true, 'request succeeded') + }) +}) + +test('access grant bad perm', t => { + return access.grant( + '@foo/bar', 'myorg:myteam', 'unknown', OPTS + ).then(ret => { + throw new Error('should not have succeeded') + }, err => { + t.match( + err.message, + /must be.*read-write.*read-only/, + 'only read-write and read-only are accepted' + ) + }) +}) + +test('access grant no entity', t => { + return access.grant( + '@foo/bar', undefined, 'read-write', OPTS + ).then(ret => { + throw new Error('should not have succeeded') + }, err => { + t.match( + err.message, + /Expected string/, + 'passing undefined entity gives useful error' + ) + }) +}) + +test('access grant basic unscoped', t => { + tnock(t, REG).put('/-/team/myorg/myteam/package', { + package: 'bar', + permissions: 'read-write' + }).reply(201) + return access.grant( + 'bar', 'myorg:myteam', 'read-write', OPTS + ).then(ret => { + t.deepEqual(ret, true, 'request succeeded') + }) +}) + +test('access revoke basic', t => { + tnock(t, REG).delete('/-/team/myorg/myteam/package', { + package: '@foo/bar' + }).reply(200) + return access.revoke('@foo/bar', 'myorg:myteam', OPTS).then(ret => { + t.deepEqual(ret, true, 'request succeeded') + }) +}) + +test('access revoke basic unscoped', t => { + tnock(t, REG).delete('/-/team/myorg/myteam/package', { + package: 'bar' + }).reply(200, {accessChanged: true}) + return access.revoke('bar', 'myorg:myteam', OPTS).then(ret => { + t.deepEqual(ret, true, 'request succeeded') + }) +}) + +test('ls-packages on team', t => { + const serverPackages = { + '@foo/bar': 'write', + '@foo/util': 'read', + '@foo/other': 'shrödinger' + } + const clientPackages = { + '@foo/bar': 'read-write', + '@foo/util': 'read-only', + '@foo/other': 'shrödinger' + } + tnock(t, REG).get( + '/-/team/myorg/myteam/package?format=cli' + ).reply(200, serverPackages) + return access.lsPackages('myorg:myteam', OPTS).then(data => { + t.deepEqual(data, clientPackages, 'got client package info') + }) +}) + +test('ls-packages on org', t => { + const serverPackages = { + '@foo/bar': 'write', + '@foo/util': 'read', + '@foo/other': 'shrödinger' + } + const clientPackages = { + '@foo/bar': 'read-write', + '@foo/util': 'read-only', + '@foo/other': 'shrödinger' + } + tnock(t, REG).get( + '/-/org/myorg/package?format=cli' + ).reply(200, serverPackages) + return access.lsPackages('myorg', OPTS).then(data => { + t.deepEqual(data, clientPackages, 'got client package info') + }) +}) + +test('ls-packages on user', t => { + const serverPackages = { + '@foo/bar': 'write', + '@foo/util': 'read', + '@foo/other': 'shrödinger' + } + const clientPackages = { + '@foo/bar': 'read-write', + '@foo/util': 'read-only', + '@foo/other': 'shrödinger' + } + const srv = tnock(t, REG) + srv.get('/-/org/myuser/package?format=cli').reply(404, {error: 'not found'}) + srv.get('/-/user/myuser/package?format=cli').reply(200, serverPackages) + return access.lsPackages('myuser', OPTS).then(data => { + t.deepEqual(data, clientPackages, 'got client package info') + }) +}) + +test('ls-packages error on team', t => { + tnock(t, REG).get('/-/team/myorg/myteam/package?format=cli').reply(404) + return access.lsPackages('myorg:myteam', OPTS).then( + () => { throw new Error('should not have succeeded') }, + err => t.equal(err.code, 'E404', 'spit out 404 directly if team provided') + ) +}) + +test('ls-packages error on user', t => { + const srv = tnock(t, REG) + srv.get('/-/org/myuser/package?format=cli').reply(404, {error: 'not found'}) + srv.get('/-/user/myuser/package?format=cli').reply(404, {error: 'not found'}) + return access.lsPackages('myuser', OPTS).then( + () => { throw new Error('should not have succeeded') }, + err => t.equal(err.code, 'E404', 'spit out 404 if both reqs fail') + ) +}) + +test('ls-packages bad response', t => { + tnock(t, REG).get( + '/-/team/myorg/myteam/package?format=cli' + ).reply(200, JSON.stringify(null)) + return access.lsPackages('myorg:myteam', OPTS).then(data => { + t.deepEqual(data, null, 'succeeds with null') + }) +}) + +test('ls-packages stream', t => { + const serverPackages = { + '@foo/bar': 'write', + '@foo/util': 'read', + '@foo/other': 'shrödinger' + } + const clientPackages = [ + ['@foo/bar', 'read-write'], + ['@foo/util', 'read-only'], + ['@foo/other', 'shrödinger'] + ] + tnock(t, REG).get( + '/-/team/myorg/myteam/package?format=cli' + ).reply(200, serverPackages) + return getStream.array( + access.lsPackages.stream('myorg:myteam', OPTS) + ).then(data => { + t.deepEqual(data, clientPackages, 'got streamed client package info') + }) +}) + +test('ls-collaborators', t => { + const serverCollaborators = { + 'myorg:myteam': 'write', + 'myorg:anotherteam': 'read', + 'myorg:thirdteam': 'special-case' + } + const clientCollaborators = { + 'myorg:myteam': 'read-write', + 'myorg:anotherteam': 'read-only', + 'myorg:thirdteam': 'special-case' + } + tnock(t, REG).get( + '/-/package/%40foo%2Fbar/collaborators?format=cli' + ).reply(200, serverCollaborators) + return access.lsCollaborators('@foo/bar', OPTS).then(data => { + t.deepEqual(data, clientCollaborators, 'got collaborators') + }) +}) + +test('ls-collaborators stream', t => { + const serverCollaborators = { + 'myorg:myteam': 'write', + 'myorg:anotherteam': 'read', + 'myorg:thirdteam': 'special-case' + } + const clientCollaborators = [ + ['myorg:myteam', 'read-write'], + ['myorg:anotherteam', 'read-only'], + ['myorg:thirdteam', 'special-case'] + ] + tnock(t, REG).get( + '/-/package/%40foo%2Fbar/collaborators?format=cli' + ).reply(200, serverCollaborators) + return getStream.array( + access.lsCollaborators.stream('@foo/bar', OPTS) + ).then(data => { + t.deepEqual(data, clientCollaborators, 'got collaborators') + }) +}) + +test('ls-collaborators w/scope', t => { + const serverCollaborators = { + 'myorg:myteam': 'write', + 'myorg:anotherteam': 'read', + 'myorg:thirdteam': 'special-case' + } + const clientCollaborators = { + 'myorg:myteam': 'read-write', + 'myorg:anotherteam': 'read-only', + 'myorg:thirdteam': 'special-case' + } + tnock(t, REG).get( + '/-/package/%40foo%2Fbar/collaborators?format=cli&user=zkat' + ).reply(200, serverCollaborators) + return access.lsCollaborators('@foo/bar', 'zkat', OPTS).then(data => { + t.deepEqual(data, clientCollaborators, 'got collaborators') + }) +}) + +test('ls-collaborators w/o scope', t => { + const serverCollaborators = { + 'myorg:myteam': 'write', + 'myorg:anotherteam': 'read', + 'myorg:thirdteam': 'special-case' + } + const clientCollaborators = { + 'myorg:myteam': 'read-write', + 'myorg:anotherteam': 'read-only', + 'myorg:thirdteam': 'special-case' + } + tnock(t, REG).get( + '/-/package/bar/collaborators?format=cli&user=zkat' + ).reply(200, serverCollaborators) + return access.lsCollaborators('bar', 'zkat', OPTS).then(data => { + t.deepEqual(data, clientCollaborators, 'got collaborators') + }) +}) + +test('ls-collaborators bad response', t => { + tnock(t, REG).get( + '/-/package/%40foo%2Fbar/collaborators?format=cli' + ).reply(200, JSON.stringify(null)) + return access.lsCollaborators('@foo/bar', null, OPTS).then(data => { + t.deepEqual(data, null, 'succeeds with null') + }) +}) + +test('error on non-registry specs', t => { + const resolve = () => { throw new Error('should not succeed') } + const reject = err => t.match( + err.message, /spec.*must be a registry spec/, 'registry spec required' + ) + return Promise.all([ + access.public('foo/bar').then(resolve, reject), + access.restricted('foo/bar').then(resolve, reject), + access.grant('foo/bar', 'myorg', 'myteam', 'read-only').then(resolve, reject), + access.revoke('foo/bar', 'myorg', 'myteam').then(resolve, reject), + access.lsCollaborators('foo/bar').then(resolve, reject), + access.tfaRequired('foo/bar').then(resolve, reject), + access.tfaNotRequired('foo/bar').then(resolve, reject) + ]) +}) + +test('edit', t => { + t.equal(typeof access.edit, 'function', 'access.edit exists') + t.throws(() => { + access.edit() + }, /Not implemented/, 'directly throws NIY message') + t.done() +}) diff --git a/node_modules/libnpmaccess/test/util/tnock.js b/node_modules/libnpmaccess/test/util/tnock.js new file mode 100644 index 0000000000000..00b6e160e1019 --- /dev/null +++ b/node_modules/libnpmaccess/test/util/tnock.js @@ -0,0 +1,12 @@ +'use strict' + +const nock = require('nock') + +module.exports = tnock +function tnock (t, host) { + const server = nock(host) + t.tearDown(function () { + server.done() + }) + return server +} diff --git a/node_modules/libnpmconfig/CHANGELOG.md b/node_modules/libnpmconfig/CHANGELOG.md new file mode 100644 index 0000000000000..a5708cc7ca50d --- /dev/null +++ b/node_modules/libnpmconfig/CHANGELOG.md @@ -0,0 +1,51 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + + +## [1.2.1](https://github.com/npm/libnpmconfig/compare/v1.2.0...v1.2.1) (2018-11-13) + + +### Bug Fixes + +* **proj:** make sure proj object exists ([8fe2663](https://github.com/npm/libnpmconfig/commit/8fe2663)) + + + + +# [1.2.0](https://github.com/npm/libnpmconfig/compare/v1.1.1...v1.2.0) (2018-11-13) + + +### Features + +* **cache:** improved cache parsing/handling ([63ba3bb](https://github.com/npm/libnpmconfig/commit/63ba3bb)) + + + + +## [1.1.1](https://github.com/npm/libnpmconfig/compare/v1.1.0...v1.1.1) (2018-11-04) + + +### Bug Fixes + +* **config:** rework load order and support builtin configs ([5ef1ac5](https://github.com/npm/libnpmconfig/commit/5ef1ac5)) + + + + +# [1.1.0](https://github.com/npm/libnpmconfig/compare/v1.0.0...v1.1.0) (2018-11-04) + + +### Features + +* **userconfig:** allow passing in userconfig from env ([f613877](https://github.com/npm/libnpmconfig/commit/f613877)) + + + + +# 1.0.0 (2018-11-04) + + +### Features + +* **api:** add read() function ([710426b](https://github.com/npm/libnpmconfig/commit/710426b)) diff --git a/node_modules/libnpmconfig/CODE_OF_CONDUCT.md b/node_modules/libnpmconfig/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000000..aeb72f598dcb4 --- /dev/null +++ b/node_modules/libnpmconfig/CODE_OF_CONDUCT.md @@ -0,0 +1,151 @@ +# Code of Conduct + +## When Something Happens + +If you see a Code of Conduct violation, follow these steps: + +1. Let the person know that what they did is not appropriate and ask them to stop and/or edit their message(s) or commits. +2. That person should immediately stop the behavior and correct the issue. +3. If this doesn’t happen, or if you're uncomfortable speaking up, [contact the maintainers](#contacting-maintainers). +4. As soon as available, a maintainer will look into the issue, and take [further action (see below)](#further-enforcement), starting with a warning, then temporary block, then long-term repo or organization ban. + +When reporting, please include any relevant details, links, screenshots, context, or other information that may be used to better understand and resolve the situation. + +**The maintainer team will prioritize the well-being and comfort of the recipients of the violation over the comfort of the violator.** See [some examples below](#enforcement-examples). + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers of this project pledge to making participation in our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, technical preferences, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + + * Using welcoming and inclusive language. + * Being respectful of differing viewpoints and experiences. + * Gracefully accepting constructive feedback. + * Focusing on what is best for the community. + * Showing empathy and kindness towards other community members. + * Encouraging and raising up your peers in the project so you can all bask in hacks and glory. + +Examples of unacceptable behavior by participants include: + + * The use of sexualized language or imagery and unwelcome sexual attention or advances, including when simulated online. The only exception to sexual topics is channels/spaces specifically for topics of sexual identity. + * Casual mention of slavery or indentured servitude and/or false comparisons of one's occupation or situation to slavery. Please consider using or asking about alternate terminology when referring to such metaphors in technology. + * Making light of/making mocking comments about trigger warnings and content warnings. + * Trolling, insulting/derogatory comments, and personal or political attacks. + * Public or private harassment, deliberate intimidation, or threats. + * Publishing others' private information, such as a physical or electronic address, without explicit permission. This includes any sort of "outing" of any aspect of someone's identity without their consent. + * Publishing private screenshots or quotes of interactions in the context of this project without all quoted users' *explicit* consent. + * Publishing of private communication that doesn't have to do with reporting harrassment. + * Any of the above even when [presented as "ironic" or "joking"](https://en.wikipedia.org/wiki/Hipster_racism). + * Any attempt to present "reverse-ism" versions of the above as violations. Examples of reverse-isms are "reverse racism", "reverse sexism", "heterophobia", and "cisphobia". + * Unsolicited explanations under the assumption that someone doesn't already know it. Ask before you teach! Don't assume what people's knowledge gaps are. + * [Feigning or exaggerating surprise](https://www.recurse.com/manual#no-feigned-surprise) when someone admits to not knowing something. + * "[Well-actuallies](https://www.recurse.com/manual#no-well-actuallys)" + * Other conduct which could reasonably be considered inappropriate in a professional or community setting. + +## Scope + +This Code of Conduct applies both within spaces involving this project and in other spaces involving community members. This includes the repository, its Pull Requests and Issue tracker, its Twitter community, private email communications in the context of the project, and any events where members of the project are participating, as well as adjacent communities and venues affecting the project's members. + +Depending on the violation, the maintainers may decide that violations of this code of conduct that have happened outside of the scope of the community may deem an individual unwelcome, and take appropriate action to maintain the comfort and safety of its members. + +### Other Community Standards + +As a project on GitHub, this project is additionally covered by the [GitHub Community Guidelines](https://help.github.com/articles/github-community-guidelines/). + +Additionally, as a project hosted on npm, is is covered by [npm, Inc's Code of Conduct](https://www.npmjs.com/policies/conduct). + +Enforcement of those guidelines after violations overlapping with the above are the responsibility of the entities, and enforcement may happen in any or all of the services/communities. + +## Maintainer Enforcement Process + +Once the maintainers get involved, they will follow a documented series of steps and do their best to preserve the well-being of project members. This section covers actual concrete steps. + +### Contacting Maintainers + +You may get in touch with the maintainer team through any of the following methods: + + * Through email: + * [kzm@zkat.tech](mailto:kzm@zkat.tech) (Kat Marchán) + + * Through Twitter: + * [@maybekatz](https://twitter.com/maybekatz) (Kat Marchán) + +### Further Enforcement + +If you've already followed the [initial enforcement steps](#enforcement), these are the steps maintainers will take for further enforcement, as needed: + + 1. Repeat the request to stop. + 2. If the person doubles down, they will have offending messages removed or edited by a maintainers given an official warning. The PR or Issue may be locked. + 3. If the behavior continues or is repeated later, the person will be blocked from participating for 24 hours. + 4. If the behavior continues or is repeated after the temporary block, a long-term (6-12mo) ban will be used. + +On top of this, maintainers may remove any offending messages, images, contributions, etc, as they deem necessary. + +Maintainers reserve full rights to skip any of these steps, at their discretion, if the violation is considered to be a serious and/or immediate threat to the health and well-being of members of the community. These include any threats, serious physical or verbal attacks, and other such behavior that would be completely unacceptable in any social setting that puts our members at risk. + +Members expelled from events or venues with any sort of paid attendance will not be refunded. + +### Who Watches the Watchers? + +Maintainers and other leaders who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. These may include anything from removal from the maintainer team to a permanent ban from the community. + +Additionally, as a project hosted on both GitHub and npm, [their own Codes of Conducts may be applied against maintainers of this project](#other-community-standards), externally of this project's procedures. + +### Enforcement Examples + +#### The Best Case + +The vast majority of situations work out like this. This interaction is common, and generally positive. + +> Alex: "Yeah I used X and it was really crazy!" + +> Patt (not a maintainer): "Hey, could you not use that word? What about 'ridiculous' instead?" + +> Alex: "oh sorry, sure." -> edits old comment to say "it was really confusing!" + +#### The Maintainer Case + +Sometimes, though, you need to get maintainers involved. Maintainers will do their best to resolve conflicts, but people who were harmed by something **will take priority**. + +> Patt: "Honestly, sometimes I just really hate using $library and anyone who uses it probably sucks at their job." + +> Alex: "Whoa there, could you dial it back a bit? There's a CoC thing about attacking folks' tech use like that." + +> Patt: "I'm not attacking anyone, what's your problem?" + +> Alex: "@maintainers hey uh. Can someone look at this issue? Patt is getting a bit aggro. I tried to nudge them about it, but nope." + +> KeeperOfCommitBits: (on issue) "Hey Patt, maintainer here. Could you tone it down? This sort of attack is really not okay in this space." + +> Patt: "Leave me alone I haven't said anything bad wtf is wrong with you." + +> KeeperOfCommitBits: (deletes user's comment), "@patt I mean it. Please refer to the CoC over at (URL to this CoC) if you have questions, but you can consider this an actual warning. I'd appreciate it if you reworded your messages in this thread, since they made folks there uncomfortable. Let's try and be kind, yeah?" + +> Patt: "@keeperofbits Okay sorry. I'm just frustrated and I'm kinda burnt out and I guess I got carried away. I'll DM Alex a note apologizing and edit my messages. Sorry for the trouble." + +> KeeperOfCommitBits: "@patt Thanks for that. I hear you on the stress. Burnout sucks :/. Have a good one!" + +#### The Nope Case + +> PepeTheFrog🐸: "Hi, I am a literal actual nazi and I think white supremacists are quite fashionable." + +> Patt: "NOOOOPE. OH NOPE NOPE." + +> Alex: "JFC NO. NOPE. @keeperofbits NOPE NOPE LOOK HERE" + +> KeeperOfCommitBits: "👀 Nope. NOPE NOPE NOPE. 🔥" + +> PepeTheFrog🐸 has been banned from all organization or user repositories belonging to KeeperOfCommitBits. + +## Attribution + +This Code of Conduct was generated using [WeAllJS Code of Conduct Generator](https://npm.im/weallbehave), which is based on the [WeAllJS Code of +Conduct](https://wealljs.org/code-of-conduct), which is itself based on +[Contributor Covenant](http://contributor-covenant.org), version 1.4, available +at +[http://contributor-covenant.org/version/1/4](http://contributor-covenant.org/version/1/4), +and the LGBTQ in Technology Slack [Code of +Conduct](http://lgbtq.technology/coc.html). diff --git a/node_modules/libnpmconfig/CONTRIBUTING.md b/node_modules/libnpmconfig/CONTRIBUTING.md new file mode 100644 index 0000000000000..970c1cf43aca0 --- /dev/null +++ b/node_modules/libnpmconfig/CONTRIBUTING.md @@ -0,0 +1,256 @@ +# Contributing + +## How do I... + +* [Use This Guide](#introduction)? +* Ask or Say Something? 🤔🐛😱 + * [Request Support](#request-support) + * [Report an Error or Bug](#report-an-error-or-bug) + * [Request a Feature](#request-a-feature) +* Make Something? 🤓👩🏽‍💻📜🍳 + * [Project Setup](#project-setup) + * [Contribute Documentation](#contribute-documentation) + * [Contribute Code](#contribute-code) +* Manage Something ✅🙆🏼💃👔 + * [Provide Support on Issues](#provide-support-on-issues) + * [Label Issues](#label-issues) + * [Clean Up Issues and PRs](#clean-up-issues-and-prs) + * [Review Pull Requests](#review-pull-requests) + * [Merge Pull Requests](#merge-pull-requests) + * [Tag a Release](#tag-a-release) + * [Join the Project Team](#join-the-project-team) +* Add a Guide Like This One [To My Project](#attribution)? 🤖😻👻 + +## Introduction + +Thank you so much for your interest in contributing!. All types of contributions are encouraged and valued. See the [table of contents](#toc) for different ways to help and details about how this project handles them!📝 + +Please make sure to read the relevant section before making your contribution! It will make it a lot easier for us maintainers to make the most of it and smooth out the experience for all involved. 💚 + +The [Project Team](#join-the-project-team) looks forward to your contributions. 🙌🏾✨ + +## Request Support + +If you have a question about this project, how to use it, or just need clarification about something: + +* Open an Issue at https://github.com/npm/libnpmconfig/issues +* Provide as much context as you can about what you're running into. +* Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant. If not, please be ready to provide that information if maintainers ask for it. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* Someone will try to have a response soon. +* If you or the maintainers don't respond to an issue for 30 days, the [issue will be closed](#clean-up-issues-and-prs). If you want to come back to it, reply (once, please), and we'll reopen the existing issue. Please avoid filing new issues as extensions of one you already made. + +## Report an Error or Bug + +If you run into an error or bug with the project: + +* Open an Issue at https://github.com/npm/libnpmconfig/issues +* Include *reproduction steps* that someone else can follow to recreate the bug or error on their own. +* Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant. If not, please be ready to provide that information if maintainers ask for it. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* A team member will try to reproduce the issue with your provided steps. If there are no repro steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs-repro`. Bugs with the `needs-repro` tag will not be addressed until they are reproduced. +* If the team is able to reproduce the issue, it will be marked `needs-fix`, as well as possibly other tags (such as `critical`), and the issue will be left to be [implemented by someone](#contribute-code). +* If you or the maintainers don't respond to an issue for 30 days, the [issue will be closed](#clean-up-issues-and-prs). If you want to come back to it, reply (once, please), and we'll reopen the existing issue. Please avoid filing new issues as extensions of one you already made. +* `critical` issues may be left open, depending on perceived immediacy and severity, even past the 30 day deadline. + +## Request a Feature + +If the project doesn't do something you need or want it to do: + +* Open an Issue at https://github.com/npm/libnpmconfig/issues +* Provide as much context as you can about what you're running into. +* Please try and be clear about why existing features and alternatives would not work for you. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* The project team will evaluate the feature request, possibly asking you more questions to understand its purpose and any relevant requirements. If the issue is closed, the team will convey their reasoning and suggest an alternative path forward. +* If the feature request is accepted, it will be marked for implementation with `feature-accepted`, which can then be done by either by a core team member or by anyone in the community who wants to [contribute code](#contribute-code). + +Note: The team is unlikely to be able to accept every single feature request that is filed. Please understand if they need to say no. + +## Project Setup + +So you wanna contribute some code! That's great! This project uses GitHub Pull Requests to manage contributions, so [read up on how to fork a GitHub project and file a PR](https://guides.github.com/activities/forking) if you've never done it before. + +If this seems like a lot or you aren't able to do all this setup, you might also be able to [edit the files directly](https://help.github.com/articles/editing-files-in-another-user-s-repository/) without having to do any of this setup. Yes, [even code](#contribute-code). + +If you want to go the usual route and run the project locally, though: + +* [Install Node.js](https://nodejs.org/en/download/) +* [Fork the project](https://guides.github.com/activities/forking/#fork) + +Then in your terminal: +* `cd path/to/your/clone` +* `npm install` +* `npm test` + +And you should be ready to go! + +## Contribute Documentation + +Documentation is a super important, critical part of this project. Docs are how we keep track of what we're doing, how, and why. It's how we stay on the same page about our policies. And it's how we tell others everything they need in order to be able to use this project -- or contribute to it. So thank you in advance. + +Documentation contributions of any size are welcome! Feel free to file a PR even if you're just rewording a sentence to be more clear, or fixing a spelling mistake! + +To contribute documentation: + +* [Set up the project](#project-setup). +* Edit or add any relevant documentation. +* Make sure your changes are formatted correctly and consistently with the rest of the documentation. +* Re-read what you wrote, and run a spellchecker on it to make sure you didn't miss anything. +* In your commit message(s), begin the first line with `docs: `. For example: `docs: Adding a doc contrib section to CONTRIBUTING.md`. +* Write clear, concise commit message(s) using [conventional-changelog format](https://github.com/conventional-changelog/conventional-changelog-angular/blob/master/convention.md). Documentation commits should use `docs(): `. +* Go to https://github.com/npm/libnpmconfig/pulls and open a new pull request with your changes. +* If your PR is connected to an open issue, add a line in your PR's description that says `Fixes: #123`, where `#123` is the number of the issue you're fixing. + +Once you've filed the PR: + +* One or more maintainers will use GitHub's review feature to review your PR. +* If the maintainer asks for any changes, edit your changes, push, and ask for another review. +* If the maintainer decides to pass on your PR, they will thank you for the contribution and explain why they won't be accepting the changes. That's ok! We still really appreciate you taking the time to do it, and we don't take that lightly. 💚 +* If your PR gets accepted, it will be marked as such, and merged into the `latest` branch soon after. Your contribution will be distributed to the masses next time the maintainers [tag a release](#tag-a-release) + +## Contribute Code + +We like code commits a lot! They're super handy, and they keep the project going and doing the work it needs to do to be useful to others. + +Code contributions of just about any size are acceptable! + +The main difference between code contributions and documentation contributions is that contributing code requires inclusion of relevant tests for the code being added or changed. Contributions without accompanying tests will be held off until a test is added, unless the maintainers consider the specific tests to be either impossible, or way too much of a burden for such a contribution. + +To contribute code: + +* [Set up the project](#project-setup). +* Make any necessary changes to the source code. +* Include any [additional documentation](#contribute-documentation) the changes might need. +* Write tests that verify that your contribution works as expected. +* Write clear, concise commit message(s) using [conventional-changelog format](https://github.com/conventional-changelog/conventional-changelog-angular/blob/master/convention.md). +* Dependency updates, additions, or removals must be in individual commits, and the message must use the format: `(deps): PKG@VERSION`, where `` is any of the usual `conventional-changelog` prefixes, at your discretion. +* Go to https://github.com/npm/libnpmconfig/pulls and open a new pull request with your changes. +* If your PR is connected to an open issue, add a line in your PR's description that says `Fixes: #123`, where `#123` is the number of the issue you're fixing. + +Once you've filed the PR: + +* Barring special circumstances, maintainers will not review PRs until all checks pass (Travis, AppVeyor, etc). +* One or more maintainers will use GitHub's review feature to review your PR. +* If the maintainer asks for any changes, edit your changes, push, and ask for another review. Additional tags (such as `needs-tests`) will be added depending on the review. +* If the maintainer decides to pass on your PR, they will thank you for the contribution and explain why they won't be accepting the changes. That's ok! We still really appreciate you taking the time to do it, and we don't take that lightly. 💚 +* If your PR gets accepted, it will be marked as such, and merged into the `latest` branch soon after. Your contribution will be distributed to the masses next time the maintainers [tag a release](#tag-a-release) + +## Provide Support on Issues + +[Needs Collaborator](#join-the-project-team): none + +Helping out other users with their questions is a really awesome way of contributing to any community. It's not uncommon for most of the issues on an open source projects being support-related questions by users trying to understand something they ran into, or find their way around a known bug. + +Sometimes, the `support` label will be added to things that turn out to actually be other things, like bugs or feature requests. In that case, suss out the details with the person who filed the original issue, add a comment explaining what the bug is, and change the label from `support` to `bug` or `feature`. If you can't do this yourself, @mention a maintainer so they can do it. + +In order to help other folks out with their questions: + +* Go to the issue tracker and [filter open issues by the `support` label](https://github.com/npm/libnpmconfig/issues?q=is%3Aopen+is%3Aissue+label%3Asupport). +* Read through the list until you find something that you're familiar enough with to give an answer to. +* Respond to the issue with whatever details are needed to clarify the question, or get more details about what's going on. +* Once the discussion wraps up and things are clarified, either close the issue, or ask the original issue filer (or a maintainer) to close it for you. + +Some notes on picking up support issues: + +* Avoid responding to issues you don't know you can answer accurately. +* As much as possible, try to refer to past issues with accepted answers. Link to them from your replies with the `#123` format. +* Be kind and patient with users -- often, folks who have run into confusing things might be upset or impatient. This is ok. Try to understand where they're coming from, and if you're too uncomfortable with the tone, feel free to stay away or withdraw from the issue. (note: if the user is outright hostile or is violating the CoC, [refer to the Code of Conduct](CODE_OF_CONDUCT.md) to resolve the conflict). + +## Label Issues + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +One of the most important tasks in handling issues is labeling them usefully and accurately. All other tasks involving issues ultimately rely on the issue being classified in such a way that relevant parties looking to do their own tasks can find them quickly and easily. + +In order to label issues, [open up the list of unlabeled issues](https://github.com/npm/libnpmconfig/issues?q=is%3Aopen+is%3Aissue+no%3Alabel) and, **from newest to oldest**, read through each one and apply issue labels according to the table below. If you're unsure about what label to apply, skip the issue and try the next one: don't feel obligated to label each and every issue yourself! + +Label | Apply When | Notes +--- | --- | --- +`bug` | Cases where the code (or documentation) is behaving in a way it wasn't intended to. | If something is happening that surprises the *user* but does not go against the way the code is designed, it should use the `enhancement` label. +`critical` | Added to `bug` issues if the problem described makes the code completely unusable in a common situation. | +`documentation` | Added to issues or pull requests that affect any of the documentation for the project. | Can be combined with other labels, such as `bug` or `enhancement`. +`duplicate` | Added to issues or PRs that refer to the exact same issue as another one that's been previously labeled. | Duplicate issues should be marked and closed right away, with a message referencing the issue it's a duplicate of (with `#123`) +`enhancement` | Added to [feature requests](#request-a-feature), PRs, or documentation issues that are purely additive: the code or docs currently work as expected, but a change is being requested or suggested. | +`help wanted` | Applied by [Committers](#join-the-project-team) to issues and PRs that they would like to get outside help for. Generally, this means it's lower priority for the maintainer team to itself implement, but that the community is encouraged to pick up if they so desire | Never applied on first-pass labeling. +`in-progress` | Applied by [Committers](#join-the-project-team) to PRs that are pending some work before they're ready for review. | The original PR submitter should @mention the team member that applied the label once the PR is complete. +`performance` | This issue or PR is directly related to improving performance. | +`refactor` | Added to issues or PRs that deal with cleaning up or modifying the project for the betterment of it. | +`starter` | Applied by [Committers](#join-the-project-team) to issues that they consider good introductions to the project for people who have not contributed before. These are not necessarily "easy", but rather focused around how much context is necessary in order to understand what needs to be done for this project in particular. | Existing project members are expected to stay away from these unless they increase in priority. +`support` | This issue is either asking a question about how to use the project, clarifying the reason for unexpected behavior, or possibly reporting a `bug` but does not have enough detail yet to determine whether it would count as such. | The label should be switched to `bug` if reliable reproduction steps are provided. Issues primarily with unintended configurations of a user's environment are not considered bugs, even if they cause crashes. +`tests` | This issue or PR either requests or adds primarily tests to the project. | If a PR is pending tests, that will be handled through the [PR review process](#review-pull-requests) +`wontfix` | Labelers may apply this label to issues that clearly have nothing at all to do with the project or are otherwise entirely outside of its scope/sphere of influence. [Committers](#join-the-project-team) may apply this label and close an issue or PR if they decide to pass on an otherwise relevant issue. | The issue or PR should be closed as soon as the label is applied, and a clear explanation provided of why the label was used. Contributors are free to contest the labeling, but the decision ultimately falls on committers as to whether to accept something or not. + +## Clean Up Issues and PRs + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +Issues and PRs can go stale after a while. Maybe they're abandoned. Maybe the team will just plain not have time to address them any time soon. + +In these cases, they should be closed until they're brought up again or the interaction starts over. + +To clean up issues and PRs: + +* Search the issue tracker for issues or PRs, and add the term `updated:<=YYYY-MM-DD`, where the date is 30 days before today. +* Go through each issue *from oldest to newest*, and close them if **all of the following are true**: + * not opened by a maintainer + * not marked as `critical` + * not marked as `starter` or `help wanted` (these might stick around for a while, in general, as they're intended to be available) + * no explicit messages in the comments asking for it to be left open + * does not belong to a milestone +* Leave a message when closing saying "Cleaning up stale issue. Please reopen or ping us if and when you're ready to resume this. See https://github.com/npm/libnpmconfig/blob/latest/CONTRIBUTING.md#clean-up-issues-and-prs for more details." + +## Review Pull Requests + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +While anyone can comment on a PR, add feedback, etc, PRs are only *approved* by team members with Issue Tracker or higher permissions. + +PR reviews use [GitHub's own review feature](https://help.github.com/articles/about-pull-request-reviews/), which manages comments, approval, and review iteration. + +Some notes: + +* You may ask for minor changes ("nitpicks"), but consider whether they are really blockers to merging: try to err on the side of "approve, with comments". +* *ALL PULL REQUESTS* should be covered by a test: either by a previously-failing test, an existing test that covers the entire functionality of the submitted code, or new tests to verify any new/changed behavior. All tests must also pass and follow established conventions. Test coverage should not drop, unless the specific case is considered reasonable by maintainers. +* Please make sure you're familiar with the code or documentation being updated, unless it's a minor change (spellchecking, minor formatting, etc). You may @mention another project member who you think is better suited for the review, but still provide a non-approving review of your own. +* Be extra kind: people who submit code/doc contributions are putting themselves in a pretty vulnerable position, and have put time and care into what they've done (even if that's not obvious to you!) -- always respond with respect, be understanding, but don't feel like you need to sacrifice your standards for their sake, either. Just don't be a jerk about it? + +## Merge Pull Requests + +[Needs Collaborator](#join-the-project-team): Committer + +TBD - need to hash out a bit more of this process. + +## Tag A Release + +[Needs Collaborator](#join-the-project-team): Committer + +TBD - need to hash out a bit more of this process. The most important bit here is probably that all tests must pass, and tags must use [semver](https://semver.org). + +## Join the Project Team + +### Ways to Join + +There are many ways to contribute! Most of them don't require any official status unless otherwise noted. That said, there's a couple of positions that grant special repository abilities, and this section describes how they're granted and what they do. + +All of the below positions are granted based on the project team's needs, as well as their consensus opinion about whether they would like to work with the person and think that they would fit well into that position. The process is relatively informal, and it's likely that people who express interest in participating can just be granted the permissions they'd like. + +You can spot a collaborator on the repo by looking for the `[Collaborator]` or `[Owner]` tags next to their names. + +Permission | Description +--- | --- +Issue Tracker | Granted to contributors who express a strong interest in spending time on the project's issue tracker. These tasks are mainly [labeling issues](#label-issues), [cleaning up old ones](#clean-up-issues-and-prs), and [reviewing pull requests](#review-pull-requests), as well as all the usual things non-team-member contributors can do. Issue handlers should not merge pull requests, tag releases, or directly commit code themselves: that should still be done through the usual pull request process. Becoming an Issue Handler means the project team trusts you to understand enough of the team's process and context to implement it on the issue tracker. +Committer | Granted to contributors who want to handle the actual pull request merges, tagging new versions, etc. Committers should have a good level of familiarity with the codebase, and enough context to understand the implications of various changes, as well as a good sense of the will and expectations of the project team. +Admin/Owner | Granted to people ultimately responsible for the project, its community, etc. + +## Attribution + +This guide was generated using the WeAllJS `CONTRIBUTING.md` generator. [Make your own](https://npm.im/weallcontribute)! diff --git a/node_modules/libnpmconfig/LICENSE b/node_modules/libnpmconfig/LICENSE new file mode 100644 index 0000000000000..209e4477f39c1 --- /dev/null +++ b/node_modules/libnpmconfig/LICENSE @@ -0,0 +1,13 @@ +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/libnpmconfig/PULL_REQUEST_TEMPLATE b/node_modules/libnpmconfig/PULL_REQUEST_TEMPLATE new file mode 100644 index 0000000000000..9471c6d325f7e --- /dev/null +++ b/node_modules/libnpmconfig/PULL_REQUEST_TEMPLATE @@ -0,0 +1,7 @@ + diff --git a/node_modules/libnpmconfig/README.md b/node_modules/libnpmconfig/README.md new file mode 100644 index 0000000000000..91bac0d171750 --- /dev/null +++ b/node_modules/libnpmconfig/README.md @@ -0,0 +1,40 @@ +# libnpmconfig [![npm version](https://img.shields.io/npm/v/libnpmconfig.svg)](https://npm.im/libnpmconfig) [![license](https://img.shields.io/npm/l/libnpmconfig.svg)](https://npm.im/libnpmconfig) [![Travis](https://img.shields.io/travis/npm/libnpmconfig.svg)](https://travis-ci.org/npm/libnpmconfig) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/libnpmconfig?svg=true)](https://ci.appveyor.com/project/zkat/libnpmconfig) [![Coverage Status](https://coveralls.io/repos/github/npm/libnpmconfig/badge.svg?branch=latest)](https://coveralls.io/github/npm/libnpmconfig?branch=latest) + +[`libnpmconfig`](https://github.com/npm/libnpmconfig) is a Node.js library for +programmatically managing npm's configuration files and data. + +## Example + +```js +const config = require('libnpmconfig') + +console.log('configured registry:', config.read({ + registry: 'https://default.registry/' +})) +// => configured registry: https://registry.npmjs.org +``` + +## Install + +`$ npm install libnpmconfig` + +## Table of Contents + +* [Example](#example) +* [Install](#install) +* [API](#api) + +### API + +##### `> read(cliOpts, builtinOpts)` + +Reads configurations from the filesystem and the env and returns a +[`figgy-pudding`](https://npm.im/figgy-pudding) object with the configuration +values. + +If `cliOpts` is provided, it will be merged with the returned config pudding, +shadowing any read values. These are intended as CLI-provided options. Do your +own `process.argv` parsing, though. + +If `builtinOpts.cwd` is provided, it will be used instead of `process.cwd()` as +the starting point for config searching. diff --git a/node_modules/libnpmconfig/index.js b/node_modules/libnpmconfig/index.js new file mode 100644 index 0000000000000..5501e26b75e7e --- /dev/null +++ b/node_modules/libnpmconfig/index.js @@ -0,0 +1,107 @@ +'use strict' + +const fs = require('fs') +const figgyPudding = require('figgy-pudding') +const findUp = require('find-up') +const ini = require('ini') +const os = require('os') +const path = require('path') + +const NpmConfig = figgyPudding({}, { + // Open up the pudding object. + other () { return true } +}) + +const ConfigOpts = figgyPudding({ + cache: { default: path.join(os.homedir(), '.npm') }, + configNames: { default: ['npmrc', '.npmrc'] }, + envPrefix: { default: /^npm_config_/i }, + cwd: { default: () => process.cwd() }, + globalconfig: { + default: () => path.join(getGlobalPrefix(), 'etc', 'npmrc') + }, + userconfig: { default: path.join(os.homedir(), '.npmrc') } +}) + +module.exports.read = getNpmConfig +function getNpmConfig (_opts, _builtin) { + const builtin = ConfigOpts(_builtin) + const env = {} + for (let key of Object.keys(process.env)) { + if (!key.match(builtin.envPrefix)) continue + const newKey = key.toLowerCase() + .replace(builtin.envPrefix, '') + .replace(/(?!^)_/g, '-') + env[newKey] = process.env[key] + } + const cli = NpmConfig(_opts) + const userConfPath = ( + builtin.userconfig || + cli.userconfig || + env.userconfig + ) + const user = userConfPath && maybeReadIni(userConfPath) + const globalConfPath = ( + builtin.globalconfig || + cli.globalconfig || + env.globalconfig + ) + const global = globalConfPath && maybeReadIni(globalConfPath) + const projConfPath = findUp.sync(builtin.configNames, { cwd: builtin.cwd }) + let proj = {} + if (projConfPath && projConfPath !== userConfPath) { + proj = maybeReadIni(projConfPath) + } + const newOpts = NpmConfig(builtin, global, user, proj, env, cli) + if (newOpts.cache) { + return newOpts.concat({ + cache: path.resolve( + ( + (cli.cache || env.cache) + ? builtin.cwd + : proj.cache + ? path.dirname(projConfPath) + : user.cache + ? path.dirname(userConfPath) + : global.cache + ? path.dirname(globalConfPath) + : path.dirname(userConfPath) + ), + newOpts.cache + ) + }) + } else { + return newOpts + } +} + +function maybeReadIni (f) { + let txt + try { + txt = fs.readFileSync(f, 'utf8') + } catch (err) { + if (err.code === 'ENOENT') { + return '' + } else { + throw err + } + } + return ini.parse(txt) +} + +function getGlobalPrefix () { + if (process.env.PREFIX) { + return process.env.PREFIX + } else if (process.platform === 'win32') { + // c:\node\node.exe --> prefix=c:\node\ + return path.dirname(process.execPath) + } else { + // /usr/local/bin/node --> prefix=/usr/local + let pref = path.dirname(path.dirname(process.execPath)) + // destdir only is respected on Unix + if (process.env.DESTDIR) { + pref = path.join(process.env.DESTDIR, pref) + } + return pref + } +} diff --git a/node_modules/libnpmconfig/node_modules/find-up/index.js b/node_modules/libnpmconfig/node_modules/find-up/index.js new file mode 100644 index 0000000000000..8e83819cea5a9 --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/find-up/index.js @@ -0,0 +1,46 @@ +'use strict'; +const path = require('path'); +const locatePath = require('locate-path'); + +module.exports = (filename, opts = {}) => { + const startDir = path.resolve(opts.cwd || ''); + const {root} = path.parse(startDir); + + const filenames = [].concat(filename); + + return new Promise(resolve => { + (function find(dir) { + locatePath(filenames, {cwd: dir}).then(file => { + if (file) { + resolve(path.join(dir, file)); + } else if (dir === root) { + resolve(null); + } else { + find(path.dirname(dir)); + } + }); + })(startDir); + }); +}; + +module.exports.sync = (filename, opts = {}) => { + let dir = path.resolve(opts.cwd || ''); + const {root} = path.parse(dir); + + const filenames = [].concat(filename); + + // eslint-disable-next-line no-constant-condition + while (true) { + const file = locatePath.sync(filenames, {cwd: dir}); + + if (file) { + return path.join(dir, file); + } + + if (dir === root) { + return null; + } + + dir = path.dirname(dir); + } +}; diff --git a/node_modules/libnpmconfig/node_modules/find-up/license b/node_modules/libnpmconfig/node_modules/find-up/license new file mode 100644 index 0000000000000..e7af2f77107d7 --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/find-up/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/libnpmconfig/node_modules/find-up/package.json b/node_modules/libnpmconfig/node_modules/find-up/package.json new file mode 100644 index 0000000000000..d18dba3f17cec --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/find-up/package.json @@ -0,0 +1,82 @@ +{ + "_from": "find-up@^3.0.0", + "_id": "find-up@3.0.0", + "_inBundle": false, + "_integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "_location": "/libnpmconfig/find-up", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "find-up@^3.0.0", + "name": "find-up", + "escapedName": "find-up", + "rawSpec": "^3.0.0", + "saveSpec": null, + "fetchSpec": "^3.0.0" + }, + "_requiredBy": [ + "/libnpmconfig" + ], + "_resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "_shasum": "49169f1d7993430646da61ecc5ae355c21c97b73", + "_spec": "find-up@^3.0.0", + "_where": "/Users/zkat/Documents/code/work/npm/node_modules/libnpmconfig", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/find-up/issues" + }, + "bundleDependencies": false, + "dependencies": { + "locate-path": "^3.0.0" + }, + "deprecated": false, + "description": "Find a file or directory by walking up parent directories", + "devDependencies": { + "ava": "*", + "tempy": "^0.2.1", + "xo": "*" + }, + "engines": { + "node": ">=6" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/sindresorhus/find-up#readme", + "keywords": [ + "find", + "up", + "find-up", + "findup", + "look-up", + "look", + "file", + "search", + "match", + "package", + "resolve", + "parent", + "parents", + "folder", + "directory", + "dir", + "walk", + "walking", + "path" + ], + "license": "MIT", + "name": "find-up", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/find-up.git" + }, + "scripts": { + "test": "xo && ava" + }, + "version": "3.0.0" +} diff --git a/node_modules/libnpmconfig/node_modules/find-up/readme.md b/node_modules/libnpmconfig/node_modules/find-up/readme.md new file mode 100644 index 0000000000000..810ad7ceb5276 --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/find-up/readme.md @@ -0,0 +1,87 @@ +# find-up [![Build Status: Linux and macOS](https://travis-ci.org/sindresorhus/find-up.svg?branch=master)](https://travis-ci.org/sindresorhus/find-up) [![Build Status: Windows](https://ci.appveyor.com/api/projects/status/l0cyjmvh5lq72vq2/branch/master?svg=true)](https://ci.appveyor.com/project/sindresorhus/find-up/branch/master) + +> Find a file or directory by walking up parent directories + + +## Install + +``` +$ npm install find-up +``` + + +## Usage + +``` +/ +└── Users + └── sindresorhus + ├── unicorn.png + └── foo + └── bar + ├── baz + └── example.js +``` + +`example.js` + +```js +const findUp = require('find-up'); + +(async () => { + console.log(await findUp('unicorn.png')); + //=> '/Users/sindresorhus/unicorn.png' + + console.log(await findUp(['rainbow.png', 'unicorn.png'])); + //=> '/Users/sindresorhus/unicorn.png' +})(); +``` + + +## API + +### findUp(filename, [options]) + +Returns a `Promise` for either the filepath or `null` if it couldn't be found. + +### findUp([filenameA, filenameB], [options]) + +Returns a `Promise` for either the first filepath found (by respecting the order) or `null` if none could be found. + +### findUp.sync(filename, [options]) + +Returns a filepath or `null`. + +### findUp.sync([filenameA, filenameB], [options]) + +Returns the first filepath found (by respecting the order) or `null`. + +#### filename + +Type: `string` + +Filename of the file to find. + +#### options + +Type: `Object` + +##### cwd + +Type: `string`
    +Default: `process.cwd()` + +Directory to start from. + + +## Related + +- [find-up-cli](https://github.com/sindresorhus/find-up-cli) - CLI for this module +- [pkg-up](https://github.com/sindresorhus/pkg-up) - Find the closest package.json file +- [pkg-dir](https://github.com/sindresorhus/pkg-dir) - Find the root directory of an npm package +- [resolve-from](https://github.com/sindresorhus/resolve-from) - Resolve the path of a module like `require.resolve()` but from a given path + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/libnpmconfig/node_modules/locate-path/index.js b/node_modules/libnpmconfig/node_modules/locate-path/index.js new file mode 100644 index 0000000000000..5aae6ee4ad027 --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/locate-path/index.js @@ -0,0 +1,24 @@ +'use strict'; +const path = require('path'); +const pathExists = require('path-exists'); +const pLocate = require('p-locate'); + +module.exports = (iterable, options) => { + options = Object.assign({ + cwd: process.cwd() + }, options); + + return pLocate(iterable, el => pathExists(path.resolve(options.cwd, el)), options); +}; + +module.exports.sync = (iterable, options) => { + options = Object.assign({ + cwd: process.cwd() + }, options); + + for (const el of iterable) { + if (pathExists.sync(path.resolve(options.cwd, el))) { + return el; + } + } +}; diff --git a/node_modules/libnpmconfig/node_modules/locate-path/license b/node_modules/libnpmconfig/node_modules/locate-path/license new file mode 100644 index 0000000000000..e7af2f77107d7 --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/locate-path/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/libnpmconfig/node_modules/locate-path/package.json b/node_modules/libnpmconfig/node_modules/locate-path/package.json new file mode 100644 index 0000000000000..54600c0c483dc --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/locate-path/package.json @@ -0,0 +1,76 @@ +{ + "_from": "locate-path@^3.0.0", + "_id": "locate-path@3.0.0", + "_inBundle": false, + "_integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "_location": "/libnpmconfig/locate-path", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "locate-path@^3.0.0", + "name": "locate-path", + "escapedName": "locate-path", + "rawSpec": "^3.0.0", + "saveSpec": null, + "fetchSpec": "^3.0.0" + }, + "_requiredBy": [ + "/libnpmconfig/find-up" + ], + "_resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "_shasum": "dbec3b3ab759758071b58fe59fc41871af21400e", + "_spec": "locate-path@^3.0.0", + "_where": "/Users/zkat/Documents/code/work/npm/node_modules/libnpmconfig/node_modules/find-up", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/locate-path/issues" + }, + "bundleDependencies": false, + "dependencies": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + }, + "deprecated": false, + "description": "Get the first path that exists on disk of multiple paths", + "devDependencies": { + "ava": "*", + "xo": "*" + }, + "engines": { + "node": ">=6" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/sindresorhus/locate-path#readme", + "keywords": [ + "locate", + "path", + "paths", + "file", + "files", + "exists", + "find", + "finder", + "search", + "searcher", + "array", + "iterable", + "iterator" + ], + "license": "MIT", + "name": "locate-path", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/locate-path.git" + }, + "scripts": { + "test": "xo && ava" + }, + "version": "3.0.0" +} diff --git a/node_modules/libnpmconfig/node_modules/locate-path/readme.md b/node_modules/libnpmconfig/node_modules/locate-path/readme.md new file mode 100644 index 0000000000000..a1d2e628328c2 --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/locate-path/readme.md @@ -0,0 +1,99 @@ +# locate-path [![Build Status](https://travis-ci.org/sindresorhus/locate-path.svg?branch=master)](https://travis-ci.org/sindresorhus/locate-path) + +> Get the first path that exists on disk of multiple paths + + +## Install + +``` +$ npm install locate-path +``` + + +## Usage + +Here we find the first file that exists on disk, in array order. + +```js +const locatePath = require('locate-path'); + +const files = [ + 'unicorn.png', + 'rainbow.png', // Only this one actually exists on disk + 'pony.png' +]; + +(async () => { + console(await locatePath(files)); + //=> 'rainbow' +})(); +``` + + +## API + +### locatePath(input, [options]) + +Returns a `Promise` for the first path that exists or `undefined` if none exists. + +#### input + +Type: `Iterable` + +Paths to check. + +#### options + +Type: `Object` + +##### concurrency + +Type: `number`
    +Default: `Infinity`
    +Minimum: `1` + +Number of concurrently pending promises. + +##### preserveOrder + +Type: `boolean`
    +Default: `true` + +Preserve `input` order when searching. + +Disable this to improve performance if you don't care about the order. + +##### cwd + +Type: `string`
    +Default: `process.cwd()` + +Current working directory. + +### locatePath.sync(input, [options]) + +Returns the first path that exists or `undefined` if none exists. + +#### input + +Type: `Iterable` + +Paths to check. + +#### options + +Type: `Object` + +##### cwd + +Same as above. + + +## Related + +- [path-exists](https://github.com/sindresorhus/path-exists) - Check if a path exists + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/libnpmconfig/node_modules/p-limit/index.d.ts b/node_modules/libnpmconfig/node_modules/p-limit/index.d.ts new file mode 100644 index 0000000000000..02988aaf89f99 --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/p-limit/index.d.ts @@ -0,0 +1,29 @@ +export interface Limit { + /** + * @param fn - Promise-returning/async function. + * @param arguments - Any arguments to pass through to `fn`. Support for passing arguments on to the `fn` is provided in order to be able to avoid creating unnecessary closures. You probably don't need this optimization unless you're pushing a lot of functions. + * @returns The promise returned by calling `fn(...arguments)`. + */ + ( + fn: (...arguments: Arguments) => PromiseLike | ReturnType, + ...arguments: Arguments + ): Promise; + + /** + * The number of promises that are currently running. + */ + readonly activeCount: number; + + /** + * The number of promises that are waiting to run (i.e. their internal `fn` was not called yet). + */ + readonly pendingCount: number; +} + +/** + * Run multiple promise-returning & async functions with limited concurrency. + * + * @param concurrency - Concurrency limit. Minimum: `1`. + * @returns A `limit` function. + */ +export default function pLimit(concurrency: number): Limit; diff --git a/node_modules/libnpmconfig/node_modules/p-limit/index.js b/node_modules/libnpmconfig/node_modules/p-limit/index.js new file mode 100644 index 0000000000000..d22fbe8a49d0a --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/p-limit/index.js @@ -0,0 +1,52 @@ +'use strict'; +const pTry = require('p-try'); + +const pLimit = concurrency => { + if (concurrency < 1) { + throw new TypeError('Expected `concurrency` to be a number from 1 and up'); + } + + const queue = []; + let activeCount = 0; + + const next = () => { + activeCount--; + + if (queue.length > 0) { + queue.shift()(); + } + }; + + const run = (fn, resolve, ...args) => { + activeCount++; + + const result = pTry(fn, ...args); + + resolve(result); + + result.then(next, next); + }; + + const enqueue = (fn, resolve, ...args) => { + if (activeCount < concurrency) { + run(fn, resolve, ...args); + } else { + queue.push(run.bind(null, fn, resolve, ...args)); + } + }; + + const generator = (fn, ...args) => new Promise(resolve => enqueue(fn, resolve, ...args)); + Object.defineProperties(generator, { + activeCount: { + get: () => activeCount + }, + pendingCount: { + get: () => queue.length + } + }); + + return generator; +}; + +module.exports = pLimit; +module.exports.default = pLimit; diff --git a/node_modules/libnpmconfig/node_modules/p-limit/license b/node_modules/libnpmconfig/node_modules/p-limit/license new file mode 100644 index 0000000000000..e7af2f77107d7 --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/p-limit/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/libnpmconfig/node_modules/p-limit/package.json b/node_modules/libnpmconfig/node_modules/p-limit/package.json new file mode 100644 index 0000000000000..9c9c7d9ffc077 --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/p-limit/package.json @@ -0,0 +1,83 @@ +{ + "_from": "p-limit@^2.0.0", + "_id": "p-limit@2.2.0", + "_inBundle": false, + "_integrity": "sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ==", + "_location": "/libnpmconfig/p-limit", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "p-limit@^2.0.0", + "name": "p-limit", + "escapedName": "p-limit", + "rawSpec": "^2.0.0", + "saveSpec": null, + "fetchSpec": "^2.0.0" + }, + "_requiredBy": [ + "/libnpmconfig/p-locate" + ], + "_resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz", + "_shasum": "417c9941e6027a9abcba5092dd2904e255b5fbc2", + "_spec": "p-limit@^2.0.0", + "_where": "/Users/isaacs/dev/npm/cli/node_modules/libnpmconfig/node_modules/p-locate", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/p-limit/issues" + }, + "bundleDependencies": false, + "dependencies": { + "p-try": "^2.0.0" + }, + "deprecated": false, + "description": "Run multiple promise-returning & async functions with limited concurrency", + "devDependencies": { + "ava": "^1.2.1", + "delay": "^4.1.0", + "in-range": "^1.0.0", + "random-int": "^1.0.0", + "time-span": "^2.0.0", + "tsd-check": "^0.3.0", + "xo": "^0.24.0" + }, + "engines": { + "node": ">=6" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "homepage": "https://github.com/sindresorhus/p-limit#readme", + "keywords": [ + "promise", + "limit", + "limited", + "concurrency", + "throttle", + "throat", + "rate", + "batch", + "ratelimit", + "task", + "queue", + "async", + "await", + "promises", + "bluebird" + ], + "license": "MIT", + "name": "p-limit", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/p-limit.git" + }, + "scripts": { + "test": "xo && ava && tsd-check" + }, + "version": "2.2.0" +} diff --git a/node_modules/libnpmconfig/node_modules/p-limit/readme.md b/node_modules/libnpmconfig/node_modules/p-limit/readme.md new file mode 100644 index 0000000000000..b87f3e0c9c315 --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/p-limit/readme.md @@ -0,0 +1,90 @@ +# p-limit [![Build Status](https://travis-ci.org/sindresorhus/p-limit.svg?branch=master)](https://travis-ci.org/sindresorhus/p-limit) + +> Run multiple promise-returning & async functions with limited concurrency + + +## Install + +``` +$ npm install p-limit +``` + + +## Usage + +```js +const pLimit = require('p-limit'); + +const limit = pLimit(1); + +const input = [ + limit(() => fetchSomething('foo')), + limit(() => fetchSomething('bar')), + limit(() => doSomething()) +]; + +(async () => { + // Only one promise is run at once + const result = await Promise.all(input); + console.log(result); +})(); +``` + + +## API + +### pLimit(concurrency) + +Returns a `limit` function. + +#### concurrency + +Type: `number`
    +Minimum: `1` + +Concurrency limit. + +### limit(fn, ...args) + +Returns the promise returned by calling `fn(...args)`. + +#### fn + +Type: `Function` + +Promise-returning/async function. + +#### args + +Any arguments to pass through to `fn`. + +Support for passing arguments on to the `fn` is provided in order to be able to avoid creating unnecessary closures. You probably don't need this optimization unless you're pushing a *lot* of functions. + +### limit.activeCount + +The number of promises that are currently running. + +### limit.pendingCount + +The number of promises that are waiting to run (i.e. their internal `fn` was not called yet). + + +## FAQ + +### How is this different from the [`p-queue`](https://github.com/sindresorhus/p-queue) package? + +This package is only about limiting the number of concurrent executions, while `p-queue` is a fully featured queue implementation with lots of different options, introspection, and ability to pause and clear the queue. + + +## Related + +- [p-queue](https://github.com/sindresorhus/p-queue) - Promise queue with concurrency control +- [p-throttle](https://github.com/sindresorhus/p-throttle) - Throttle promise-returning & async functions +- [p-debounce](https://github.com/sindresorhus/p-debounce) - Debounce promise-returning & async functions +- [p-all](https://github.com/sindresorhus/p-all) - Run promise-returning & async functions concurrently with optional limited concurrency +- [More…](https://github.com/sindresorhus/promise-fun) + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/libnpmconfig/node_modules/p-locate/index.js b/node_modules/libnpmconfig/node_modules/p-locate/index.js new file mode 100644 index 0000000000000..4bd08aad19312 --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/p-locate/index.js @@ -0,0 +1,34 @@ +'use strict'; +const pLimit = require('p-limit'); + +class EndError extends Error { + constructor(value) { + super(); + this.value = value; + } +} + +// The input can also be a promise, so we `Promise.resolve()` it +const testElement = (el, tester) => Promise.resolve(el).then(tester); + +// The input can also be a promise, so we `Promise.all()` them both +const finder = el => Promise.all(el).then(val => val[1] === true && Promise.reject(new EndError(val[0]))); + +module.exports = (iterable, tester, opts) => { + opts = Object.assign({ + concurrency: Infinity, + preserveOrder: true + }, opts); + + const limit = pLimit(opts.concurrency); + + // Start all the promises concurrently with optional limit + const items = [...iterable].map(el => [el, limit(testElement, el, tester)]); + + // Check the promises either serially or concurrently + const checkLimit = pLimit(opts.preserveOrder ? 1 : Infinity); + + return Promise.all(items.map(el => checkLimit(finder, el))) + .then(() => {}) + .catch(err => err instanceof EndError ? err.value : Promise.reject(err)); +}; diff --git a/node_modules/libnpmconfig/node_modules/p-locate/license b/node_modules/libnpmconfig/node_modules/p-locate/license new file mode 100644 index 0000000000000..e7af2f77107d7 --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/p-locate/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/libnpmconfig/node_modules/p-locate/package.json b/node_modules/libnpmconfig/node_modules/p-locate/package.json new file mode 100644 index 0000000000000..d49e3027e1e68 --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/p-locate/package.json @@ -0,0 +1,83 @@ +{ + "_from": "p-locate@^3.0.0", + "_id": "p-locate@3.0.0", + "_inBundle": false, + "_integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "_location": "/libnpmconfig/p-locate", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "p-locate@^3.0.0", + "name": "p-locate", + "escapedName": "p-locate", + "rawSpec": "^3.0.0", + "saveSpec": null, + "fetchSpec": "^3.0.0" + }, + "_requiredBy": [ + "/libnpmconfig/locate-path" + ], + "_resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "_shasum": "322d69a05c0264b25997d9f40cd8a891ab0064a4", + "_spec": "p-locate@^3.0.0", + "_where": "/Users/zkat/Documents/code/work/npm/node_modules/libnpmconfig/node_modules/locate-path", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/p-locate/issues" + }, + "bundleDependencies": false, + "dependencies": { + "p-limit": "^2.0.0" + }, + "deprecated": false, + "description": "Get the first fulfilled promise that satisfies the provided testing function", + "devDependencies": { + "ava": "*", + "delay": "^3.0.0", + "in-range": "^1.0.0", + "time-span": "^2.0.0", + "xo": "*" + }, + "engines": { + "node": ">=6" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/sindresorhus/p-locate#readme", + "keywords": [ + "promise", + "locate", + "find", + "finder", + "search", + "searcher", + "test", + "array", + "collection", + "iterable", + "iterator", + "race", + "fulfilled", + "fastest", + "async", + "await", + "promises", + "bluebird" + ], + "license": "MIT", + "name": "p-locate", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/p-locate.git" + }, + "scripts": { + "test": "xo && ava" + }, + "version": "3.0.0" +} diff --git a/node_modules/libnpmconfig/node_modules/p-locate/readme.md b/node_modules/libnpmconfig/node_modules/p-locate/readme.md new file mode 100644 index 0000000000000..3b0173bc4ea78 --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/p-locate/readme.md @@ -0,0 +1,88 @@ +# p-locate [![Build Status](https://travis-ci.org/sindresorhus/p-locate.svg?branch=master)](https://travis-ci.org/sindresorhus/p-locate) + +> Get the first fulfilled promise that satisfies the provided testing function + +Think of it like an async version of [`Array#find`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Array/find). + + +## Install + +``` +$ npm install p-locate +``` + + +## Usage + +Here we find the first file that exists on disk, in array order. + +```js +const pathExists = require('path-exists'); +const pLocate = require('p-locate'); + +const files = [ + 'unicorn.png', + 'rainbow.png', // Only this one actually exists on disk + 'pony.png' +]; + +(async () => { + const foundPath = await pLocate(files, file => pathExists(file)); + + console.log(foundPath); + //=> 'rainbow' +})(); +``` + +*The above is just an example. Use [`locate-path`](https://github.com/sindresorhus/locate-path) if you need this.* + + +## API + +### pLocate(input, tester, [options]) + +Returns a `Promise` that is fulfilled when `tester` resolves to `true` or the iterable is done, or rejects if any of the promises reject. The fulfilled value is the current iterable value or `undefined` if `tester` never resolved to `true`. + +#### input + +Type: `Iterable` + +#### tester(element) + +Type: `Function` + +Expected to return a `Promise` or boolean. + +#### options + +Type: `Object` + +##### concurrency + +Type: `number`
    +Default: `Infinity`
    +Minimum: `1` + +Number of concurrently pending promises returned by `tester`. + +##### preserveOrder + +Type: `boolean`
    +Default: `true` + +Preserve `input` order when searching. + +Disable this to improve performance if you don't care about the order. + + +## Related + +- [p-map](https://github.com/sindresorhus/p-map) - Map over promises concurrently +- [p-filter](https://github.com/sindresorhus/p-filter) - Filter promises concurrently +- [p-any](https://github.com/sindresorhus/p-any) - Wait for any promise to be fulfilled +- [More…](https://github.com/sindresorhus/promise-fun) + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/libnpmconfig/node_modules/p-try/index.d.ts b/node_modules/libnpmconfig/node_modules/p-try/index.d.ts new file mode 100644 index 0000000000000..2a7319ec2a556 --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/p-try/index.d.ts @@ -0,0 +1,39 @@ +declare const pTry: { + /** + Start a promise chain. + + @param fn - The function to run to start the promise chain. + @param arguments - Arguments to pass to `fn`. + @returns The value of calling `fn(...arguments)`. If the function throws an error, the returned `Promise` will be rejected with that error. + + @example + ``` + import pTry = require('p-try'); + + (async () => { + try { + const value = await pTry(() => { + return synchronousFunctionThatMightThrow(); + }); + console.log(value); + } catch (error) { + console.error(error); + } + })(); + ``` + */ + ( + fn: (...arguments: ArgumentsType) => PromiseLike | ValueType, + ...arguments: ArgumentsType + ): Promise; + + // TODO: remove this in the next major version, refactor the whole definition to: + // declare function pTry( + // fn: (...arguments: ArgumentsType) => PromiseLike | ValueType, + // ...arguments: ArgumentsType + // ): Promise; + // export = pTry; + default: typeof pTry; +}; + +export = pTry; diff --git a/node_modules/libnpmconfig/node_modules/p-try/index.js b/node_modules/libnpmconfig/node_modules/p-try/index.js new file mode 100644 index 0000000000000..db858da29252b --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/p-try/index.js @@ -0,0 +1,9 @@ +'use strict'; + +const pTry = (fn, ...arguments_) => new Promise(resolve => { + resolve(fn(...arguments_)); +}); + +module.exports = pTry; +// TODO: remove this in the next major version +module.exports.default = pTry; diff --git a/node_modules/libnpmconfig/node_modules/p-try/license b/node_modules/libnpmconfig/node_modules/p-try/license new file mode 100644 index 0000000000000..e7af2f77107d7 --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/p-try/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/libnpmconfig/node_modules/p-try/package.json b/node_modules/libnpmconfig/node_modules/p-try/package.json new file mode 100644 index 0000000000000..af14d60c3680e --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/p-try/package.json @@ -0,0 +1,74 @@ +{ + "_from": "p-try@^2.0.0", + "_id": "p-try@2.2.0", + "_inBundle": false, + "_integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "_location": "/libnpmconfig/p-try", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "p-try@^2.0.0", + "name": "p-try", + "escapedName": "p-try", + "rawSpec": "^2.0.0", + "saveSpec": null, + "fetchSpec": "^2.0.0" + }, + "_requiredBy": [ + "/libnpmconfig/p-limit" + ], + "_resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "_shasum": "cb2868540e313d61de58fafbe35ce9004d5540e6", + "_spec": "p-try@^2.0.0", + "_where": "/Users/isaacs/dev/npm/cli/node_modules/libnpmconfig/node_modules/p-limit", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/p-try/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "`Start a promise chain", + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.1", + "xo": "^0.24.0" + }, + "engines": { + "node": ">=6" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "homepage": "https://github.com/sindresorhus/p-try#readme", + "keywords": [ + "promise", + "try", + "resolve", + "function", + "catch", + "async", + "await", + "promises", + "settled", + "ponyfill", + "polyfill", + "shim", + "bluebird" + ], + "license": "MIT", + "name": "p-try", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/p-try.git" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "version": "2.2.0" +} diff --git a/node_modules/libnpmconfig/node_modules/p-try/readme.md b/node_modules/libnpmconfig/node_modules/p-try/readme.md new file mode 100644 index 0000000000000..4d7bd64dfcb8b --- /dev/null +++ b/node_modules/libnpmconfig/node_modules/p-try/readme.md @@ -0,0 +1,58 @@ +# p-try [![Build Status](https://travis-ci.org/sindresorhus/p-try.svg?branch=master)](https://travis-ci.org/sindresorhus/p-try) + +> Start a promise chain + +[How is it useful?](http://cryto.net/~joepie91/blog/2016/05/11/what-is-promise-try-and-why-does-it-matter/) + + +## Install + +``` +$ npm install p-try +``` + + +## Usage + +```js +const pTry = require('p-try'); + +(async () => { + try { + const value = await pTry(() => { + return synchronousFunctionThatMightThrow(); + }); + console.log(value); + } catch (error) { + console.error(error); + } +})(); +``` + + +## API + +### pTry(fn, ...arguments) + +Returns a `Promise` resolved with the value of calling `fn(...arguments)`. If the function throws an error, the returned `Promise` will be rejected with that error. + +Support for passing arguments on to the `fn` is provided in order to be able to avoid creating unnecessary closures. You probably don't need this optimization unless you're pushing a *lot* of functions. + +#### fn + +The function to run to start the promise chain. + +#### arguments + +Arguments to pass to `fn`. + + +## Related + +- [p-finally](https://github.com/sindresorhus/p-finally) - `Promise#finally()` ponyfill - Invoked when the promise is settled regardless of outcome +- [More…](https://github.com/sindresorhus/promise-fun) + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/libnpmconfig/package.json b/node_modules/libnpmconfig/package.json new file mode 100644 index 0000000000000..d272290b32fa4 --- /dev/null +++ b/node_modules/libnpmconfig/package.json @@ -0,0 +1,66 @@ +{ + "_from": "libnpmconfig@^1.1.1", + "_id": "libnpmconfig@1.2.1", + "_inBundle": false, + "_integrity": "sha512-9esX8rTQAHqarx6qeZqmGQKBNZR5OIbl/Ayr0qQDy3oXja2iFVQQI81R6GZ2a02bSNZ9p3YOGX1O6HHCb1X7kA==", + "_location": "/libnpmconfig", + "_phantomChildren": { + "path-exists": "3.0.0" + }, + "_requested": { + "type": "range", + "registry": true, + "raw": "libnpmconfig@^1.1.1", + "name": "libnpmconfig", + "escapedName": "libnpmconfig", + "rawSpec": "^1.1.1", + "saveSpec": null, + "fetchSpec": "^1.1.1" + }, + "_requiredBy": [ + "/libnpm" + ], + "_resolved": "https://registry.npmjs.org/libnpmconfig/-/libnpmconfig-1.2.1.tgz", + "_shasum": "c0c2f793a74e67d4825e5039e7a02a0044dfcbc0", + "_spec": "libnpmconfig@^1.1.1", + "_where": "/Users/zkat/Documents/code/work/npm/node_modules/libnpm", + "author": { + "name": "Kat Marchán", + "email": "kzm@zkat.tech" + }, + "bugs": { + "url": "https://github.com/npm/libnpmconfig/issues" + }, + "bundleDependencies": false, + "dependencies": { + "figgy-pudding": "^3.5.1", + "find-up": "^3.0.0", + "ini": "^1.3.5" + }, + "deprecated": false, + "description": "Standalone library for reading/writing/managing npm configurations", + "devDependencies": { + "standard": "*", + "standard-version": "*", + "tap": "*", + "weallbehave": "*", + "weallcontribute": "*" + }, + "homepage": "https://npmjs.com/package/libnpmconfig", + "license": "ISC", + "name": "libnpmconfig", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/libnpmconfig.git" + }, + "scripts": { + "postrelease": "npm publish && git push --follow-tags", + "prerelease": "npm t", + "pretest": "standard", + "release": "standard-version -s", + "test": "tap -J --100 test/*.js", + "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", + "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" + }, + "version": "1.2.1" +} diff --git a/node_modules/libnpmhook/CHANGELOG.md b/node_modules/libnpmhook/CHANGELOG.md index 6fe3e05b5e84e..4121122e0b735 100644 --- a/node_modules/libnpmhook/CHANGELOG.md +++ b/node_modules/libnpmhook/CHANGELOG.md @@ -2,6 +2,41 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [5.0.3](https://github.com/npm/libnpmhook/compare/v5.0.2...v5.0.3) (2019-07-16) + + + + +## [5.0.2](https://github.com/npm/libnpmhook/compare/v5.0.1...v5.0.2) (2018-08-24) + + + + +## [5.0.1](https://github.com/npm/libnpmhook/compare/v5.0.0...v5.0.1) (2018-08-23) + + +### Bug Fixes + +* **deps:** move JSONStream to prod deps ([bb63594](https://github.com/npm/libnpmhook/commit/bb63594)) + + + + +# [5.0.0](https://github.com/npm/libnpmhook/compare/v4.0.1...v5.0.0) (2018-08-21) + + +### Features + +* **api:** overhauled API ([46b271b](https://github.com/npm/libnpmhook/commit/46b271b)) + + +### BREAKING CHANGES + +* **api:** the API for ls() has changed, and rm() no longer errors on 404 + + + ## [4.0.1](https://github.com/npm/libnpmhook/compare/v4.0.0...v4.0.1) (2018-04-09) diff --git a/node_modules/libnpmhook/README.md b/node_modules/libnpmhook/README.md index 0e2f018f2a038..9a13d055317a5 100644 --- a/node_modules/libnpmhook/README.md +++ b/node_modules/libnpmhook/README.md @@ -1,8 +1,20 @@ -# libnpmhook [![npm version](https://img.shields.io/npm/v/libnpmhook.svg)](https://npm.im/libnpmhook) [![license](https://img.shields.io/npm/l/libnpmhook.svg)](https://npm.im/libnpmhook) [![Travis](https://img.shields.io/travis/npm/libnpmhook.svg)](https://travis-ci.org/npm/libnpmhook) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/libnpmhook?svg=true)](https://ci.appveyor.com/project/npm/libnpmhook) [![Coverage Status](https://coveralls.io/repos/github/npm/libnpmhook/badge.svg?branch=latest)](https://coveralls.io/github/npm/libnpmhook?branch=latest) +# libnpmhook [![npm version](https://img.shields.io/npm/v/libnpmhook.svg)](https://npm.im/libnpmhook) [![license](https://img.shields.io/npm/l/libnpmhook.svg)](https://npm.im/libnpmhook) [![Travis](https://img.shields.io/travis/npm/libnpmhook.svg)](https://travis-ci.org/npm/libnpmhook) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/libnpmhook?svg=true)](https://ci.appveyor.com/project/zkat/libnpmhook) [![Coverage Status](https://coveralls.io/repos/github/npm/libnpmhook/badge.svg?branch=latest)](https://coveralls.io/github/npm/libnpmhook?branch=latest) [`libnpmhook`](https://github.com/npm/libnpmhook) is a Node.js library for programmatically managing the npm registry's server-side hooks. +For a more general introduction to managing hooks, see [the introductory blog +post](https://blog.npmjs.org/post/145260155635/introducing-hooks-get-notifications-of-npm). + +## Example + +```js +const hooks = require('libnpmhook') + +console.log(await hooks.ls('mypkg', {token: 'deadbeef'})) +// array of hook objects on `mypkg`. +``` + ## Install `$ npm install libnpmhook` @@ -10,14 +22,246 @@ programmatically managing the npm registry's server-side hooks. ## Table of Contents * [Example](#example) -* [Features](#features) +* [Install](#install) * [API](#api) + * [hook opts](#opts) + * [`add()`](#add) + * [`rm()`](#rm) + * [`ls()`](#ls) + * [`ls.stream()`](#ls-stream) + * [`update()`](#update) + +### API + +#### `opts` for `libnpmhook` commands + +`libnpmhook` uses [`npm-registry-fetch`](https://npm.im/npm-registry-fetch). +All options are passed through directly to that library, so please refer to [its +own `opts` +documentation](https://www.npmjs.com/package/npm-registry-fetch#fetch-options) +for options that can be passed in. -### Example +A couple of options of note for those in a hurry: + +* `opts.token` - can be passed in and will be used as the authentication token for the registry. For other ways to pass in auth details, see the n-r-f docs. +* `opts.otp` - certain operations will require an OTP token to be passed in. If a `libnpmhook` command fails with `err.code === EOTP`, please retry the request with `{otp: <2fa token>}` +* `opts.Promise` - If you pass this in, the Promises returned by `libnpmhook` commands will use this Promise class instead. For example: `{Promise: require('bluebird')}` + +#### `> hooks.add(name, endpoint, secret, [opts]) -> Promise` + +`name` is the name of the package, org, or user/org scope to watch. The type is +determined by the name syntax: `'@foo/bar'` and `'foo'` are treated as packages, +`@foo` is treated as a scope, and `~user` is treated as an org name or scope. +Each type will attach to different events. + +The `endpoint` should be a fully-qualified http URL for the endpoint the hook +will send its payload to when it fires. `secret` is a shared secret that the +hook will send to that endpoint to verify that it's actually coming from the +registry hook. + +The returned Promise resolves to the full hook object that was created, +including its generated `id`. + +See also: [`POST +/v1/hooks/hook`](https://github.com/npm/registry/blob/master/docs/hooks/endpoints.md#post-v1hookshook) + +##### Example ```javascript +await hooks.add('~zkat', 'https://zkat.tech/api/added', 'supersekrit', { + token: 'myregistrytoken', + otp: '694207' +}) + +=> + +{ id: '16f7xoal', + username: 'zkat', + name: 'zkat', + endpoint: 'https://zkat.tech/api/added', + secret: 'supersekrit', + type: 'owner', + created: '2018-08-21T20:05:25.125Z', + updated: '2018-08-21T20:05:25.125Z', + deleted: false, + delivered: false, + last_delivery: null, + response_code: 0, + status: 'active' } ``` -### Features +#### `> hooks.find(id, [opts]) -> Promise` -### API +Returns the hook identified by `id`. + +The returned Promise resolves to the full hook object that was found, or error +with `err.code` of `'E404'` if it didn't exist. + +See also: [`GET +/v1/hooks/hook/:id`](https://github.com/npm/registry/blob/master/docs/hooks/endpoints.md#get-v1hookshookid) + +##### Example + +```javascript +await hooks.find('16f7xoal', {token: 'myregistrytoken'}) + +=> + +{ id: '16f7xoal', + username: 'zkat', + name: 'zkat', + endpoint: 'https://zkat.tech/api/added', + secret: 'supersekrit', + type: 'owner', + created: '2018-08-21T20:05:25.125Z', + updated: '2018-08-21T20:05:25.125Z', + deleted: false, + delivered: false, + last_delivery: null, + response_code: 0, + status: 'active' } +``` + +#### `> hooks.rm(id, [opts]) -> Promise` + +Removes the hook identified by `id`. + +The returned Promise resolves to the full hook object that was removed, if it +existed, or `null` if no such hook was there (instead of erroring). + +See also: [`DELETE +/v1/hooks/hook/:id`](https://github.com/npm/registry/blob/master/docs/hooks/endpoints.md#delete-v1hookshookid) + +##### Example + +```javascript +await hooks.rm('16f7xoal', { + token: 'myregistrytoken', + otp: '694207' +}) + +=> + +{ id: '16f7xoal', + username: 'zkat', + name: 'zkat', + endpoint: 'https://zkat.tech/api/added', + secret: 'supersekrit', + type: 'owner', + created: '2018-08-21T20:05:25.125Z', + updated: '2018-08-21T20:05:25.125Z', + deleted: true, + delivered: false, + last_delivery: null, + response_code: 0, + status: 'active' } + +// Repeat it... +await hooks.rm('16f7xoal', { + token: 'myregistrytoken', + otp: '694207' +}) + +=> null +``` + +#### `> hooks.update(id, endpoint, secret, [opts]) -> Promise` + +The `id` should be a hook ID from a previously-created hook. + +The `endpoint` should be a fully-qualified http URL for the endpoint the hook +will send its payload to when it fires. `secret` is a shared secret that the +hook will send to that endpoint to verify that it's actually coming from the +registry hook. + +The returned Promise resolves to the full hook object that was updated, if it +existed. Otherwise, it will error with an `'E404'` error code. + +See also: [`PUT +/v1/hooks/hook/:id`](https://github.com/npm/registry/blob/master/docs/hooks/endpoints.md#put-v1hookshookid) + +##### Example + +```javascript +await hooks.update('16fxoal', 'https://zkat.tech/api/other', 'newsekrit', { + token: 'myregistrytoken', + otp: '694207' +}) + +=> + +{ id: '16f7xoal', + username: 'zkat', + name: 'zkat', + endpoint: 'https://zkat.tech/api/other', + secret: 'newsekrit', + type: 'owner', + created: '2018-08-21T20:05:25.125Z', + updated: '2018-08-21T20:14:41.964Z', + deleted: false, + delivered: false, + last_delivery: null, + response_code: 0, + status: 'active' } +``` + +#### `> hooks.ls([opts]) -> Promise` + +Resolves to an array of hook objects associated with the account you're +authenticated as. + +Results can be further filtered with three values that can be passed in through +`opts`: + +* `opts.package` - filter results by package name +* `opts.limit` - maximum number of hooks to return +* `opts.offset` - pagination offset for results (use with `opts.limit`) + +See also: + * [`hooks.ls.stream()`](#ls-stream) + * [`GET +/v1/hooks`](https://github.com/npm/registry/blob/master/docs/hooks/endpoints.md#get-v1hooks) + +##### Example + +```javascript +await hooks.ls({token: 'myregistrytoken'}) + +=> +[ + { id: '16f7xoal', ... }, + { id: 'wnyf98a1', ... }, + ... +] +``` + +#### `> hooks.ls.stream([opts]) -> Stream` + +Returns a stream of hook objects associated with the account you're +authenticated as. The returned stream is a valid `Symbol.asyncIterator` on +`node@>=10`. + +Results can be further filtered with three values that can be passed in through +`opts`: + +* `opts.package` - filter results by package name +* `opts.limit` - maximum number of hooks to return +* `opts.offset` - pagination offset for results (use with `opts.limit`) + +See also: + * [`hooks.ls()`](#ls) + * [`GET +/v1/hooks`](https://github.com/npm/registry/blob/master/docs/hooks/endpoints.md#get-v1hooks) + +##### Example + +```javascript +for await (let hook of hooks.ls.stream({token: 'myregistrytoken'})) { + console.log('found hook:', hook.id) +} + +=> +// outputs: +// found hook: 16f7xoal +// found hook: wnyf98a1 +``` diff --git a/node_modules/libnpmhook/config.js b/node_modules/libnpmhook/config.js deleted file mode 100644 index 864e1ede6af6a..0000000000000 --- a/node_modules/libnpmhook/config.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict' - -const pudding = require('figgy-pudding') - -const NpmHooksConfig = pudding() - -module.exports = config -function config (opts) { - return NpmHooksConfig.apply( - null, - [opts, opts.config].concat([].slice.call(arguments, 1)) - ) -} diff --git a/node_modules/libnpmhook/index.js b/node_modules/libnpmhook/index.js index b59ff842e2545..b489294951dd0 100644 --- a/node_modules/libnpmhook/index.js +++ b/node_modules/libnpmhook/index.js @@ -1,41 +1,80 @@ 'use strict' -const config = require('./config') const fetch = require('npm-registry-fetch') +const figgyPudding = require('figgy-pudding') +const getStream = require('get-stream') +const validate = require('aproba') -module.exports = { - add (name, endpoint, secret, opts) { - let type = 'package' - if (name && name.match(/^@[^/]+$/)) { - type = 'scope' - } - if (name && name[0] === '~') { - type = 'owner' - name = name.substr(1) - } +const HooksConfig = figgyPudding({ + package: {}, + limit: {}, + offset: {}, + Promise: {default: () => Promise} +}) - opts = config({ - method: 'POST', - body: { type, name, endpoint, secret } - }, opts) - return fetch.json('/-/npm/v1/hooks/hook', opts) - }, - - rm (id, opts) { - return fetch.json(`/-/npm/v1/hooks/hook/${encodeURIComponent(id)}`, config({ - method: 'DELETE' - }, opts)) - }, - - ls (pkg, opts) { - return fetch.json('/-/npm/v1/hooks', config({query: pkg && {package: pkg}}, opts)) - .then(json => json.objects) - }, - - update (id, endpoint, secret, opts) { - return fetch.json(`/-/npm/v1/hooks/hook/${encodeURIComponent(id)}`, config({ - method: 'PUT', - body: {endpoint, secret} - }, opts)) +const eu = encodeURIComponent +const cmd = module.exports = {} +cmd.add = (name, endpoint, secret, opts) => { + opts = HooksConfig(opts) + validate('SSSO', [name, endpoint, secret, opts]) + let type = 'package' + if (name.match(/^@[^/]+$/)) { + type = 'scope' } + if (name[0] === '~') { + type = 'owner' + name = name.substr(1) + } + return fetch.json('/-/npm/v1/hooks/hook', opts.concat({ + method: 'POST', + body: { type, name, endpoint, secret } + })) +} + +cmd.rm = (id, opts) => { + opts = HooksConfig(opts) + validate('SO', [id, opts]) + return fetch.json(`/-/npm/v1/hooks/hook/${eu(id)}`, opts.concat({ + method: 'DELETE' + }, opts)).catch(err => { + if (err.code === 'E404') { + return null + } else { + throw err + } + }) +} + +cmd.update = (id, endpoint, secret, opts) => { + opts = HooksConfig(opts) + validate('SSSO', [id, endpoint, secret, opts]) + return fetch.json(`/-/npm/v1/hooks/hook/${eu(id)}`, opts.concat({ + method: 'PUT', + body: {endpoint, secret} + }, opts)) +} + +cmd.find = (id, opts) => { + opts = HooksConfig(opts) + validate('SO', [id, opts]) + return fetch.json(`/-/npm/v1/hooks/hook/${eu(id)}`, opts) +} + +cmd.ls = (opts) => { + return getStream.array(cmd.ls.stream(opts)) +} + +cmd.ls.stream = (opts) => { + opts = HooksConfig(opts) + const {package: pkg, limit, offset} = opts + validate('S|Z', [pkg]) + validate('N|Z', [limit]) + validate('N|Z', [offset]) + return fetch.json.stream('/-/npm/v1/hooks', 'objects.*', opts.concat({ + query: { + package: pkg, + limit, + offset + } + })) } diff --git a/node_modules/libnpmhook/node_modules/npm-registry-fetch/CHANGELOG.md b/node_modules/libnpmhook/node_modules/npm-registry-fetch/CHANGELOG.md deleted file mode 100644 index 8f9366551fb97..0000000000000 --- a/node_modules/libnpmhook/node_modules/npm-registry-fetch/CHANGELOG.md +++ /dev/null @@ -1,104 +0,0 @@ -# Change Log - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - - -## [3.1.1](https://github.com/npm/registry-fetch/compare/v3.1.0...v3.1.1) (2018-04-09) - - - - -# [3.1.0](https://github.com/npm/registry-fetch/compare/v3.0.0...v3.1.0) (2018-04-09) - - -### Features - -* **config:** support no-proxy and https-proxy options ([9aa906b](https://github.com/npm/registry-fetch/commit/9aa906b)) - - - - -# [3.0.0](https://github.com/npm/registry-fetch/compare/v2.1.0...v3.0.0) (2018-04-09) - - -### Bug Fixes - -* **api:** pacote integration-related fixes ([a29de4f](https://github.com/npm/registry-fetch/commit/a29de4f)) -* **config:** stop caring about opts.config ([5856a6f](https://github.com/npm/registry-fetch/commit/5856a6f)) - - -### BREAKING CHANGES - -* **config:** opts.config is no longer supported. Pass the options down in opts itself. - - - - -# [2.1.0](https://github.com/npm/registry-fetch/compare/v2.0.0...v2.1.0) (2018-04-08) - - -### Features - -* **token:** accept opts.token for opts._authToken ([108c9f0](https://github.com/npm/registry-fetch/commit/108c9f0)) - - - - -# [2.0.0](https://github.com/npm/registry-fetch/compare/v1.1.1...v2.0.0) (2018-04-08) - - -### meta - -* drop support for node@4 ([758536e](https://github.com/npm/registry-fetch/commit/758536e)) - - -### BREAKING CHANGES - -* node@4 is no longer supported - - - - -## [1.1.1](https://github.com/npm/registry-fetch/compare/v1.1.0...v1.1.1) (2018-04-06) - - - - -# [1.1.0](https://github.com/npm/registry-fetch/compare/v1.0.1...v1.1.0) (2018-03-16) - - -### Features - -* **specs:** can use opts.spec to trigger pickManifest ([85c4ac9](https://github.com/npm/registry-fetch/commit/85c4ac9)) - - - - -## [1.0.1](https://github.com/npm/registry-fetch/compare/v1.0.0...v1.0.1) (2018-03-16) - - -### Bug Fixes - -* **query:** oops console.log ([870e4f5](https://github.com/npm/registry-fetch/commit/870e4f5)) - - - - -# 1.0.0 (2018-03-16) - - -### Bug Fixes - -* **auth:** get auth working with all the little details ([84b94ba](https://github.com/npm/registry-fetch/commit/84b94ba)) -* **deps:** add bluebird as an actual dep ([1286e31](https://github.com/npm/registry-fetch/commit/1286e31)) -* **errors:** Unknown auth errors use default code ([#1](https://github.com/npm/registry-fetch/issues/1)) ([3d91b93](https://github.com/npm/registry-fetch/commit/3d91b93)) -* **standard:** remove args from invocation ([9620a0a](https://github.com/npm/registry-fetch/commit/9620a0a)) - - -### Features - -* **api:** baseline kinda-working API impl ([bf91f9f](https://github.com/npm/registry-fetch/commit/bf91f9f)) -* **body:** automatic handling of different opts.body values ([f3b97db](https://github.com/npm/registry-fetch/commit/f3b97db)) -* **config:** nicer input config input handling ([b9ce21d](https://github.com/npm/registry-fetch/commit/b9ce21d)) -* **opts:** use figgy-pudding for opts handling ([0abd527](https://github.com/npm/registry-fetch/commit/0abd527)) -* **query:** add query utility support ([65ea8b1](https://github.com/npm/registry-fetch/commit/65ea8b1)) diff --git a/node_modules/libnpmhook/node_modules/npm-registry-fetch/README.md b/node_modules/libnpmhook/node_modules/npm-registry-fetch/README.md deleted file mode 100644 index 3d55eef6de85b..0000000000000 --- a/node_modules/libnpmhook/node_modules/npm-registry-fetch/README.md +++ /dev/null @@ -1,549 +0,0 @@ -# npm-registry-fetch [![npm version](https://img.shields.io/npm/v/npm-registry-fetch.svg)](https://npm.im/npm-registry-fetch) [![license](https://img.shields.io/npm/l/npm-registry-fetch.svg)](https://npm.im/npm-registry-fetch) [![Travis](https://img.shields.io/travis/npm/npm-registry-fetch/latest.svg)](https://travis-ci.org/npm/npm-registry-fetch) [![AppVeyor](https://img.shields.io/appveyor/ci/zkat/npm-registry-fetch/latest.svg)](https://ci.appveyor.com/project/npm/npm-registry-fetch) [![Coverage Status](https://coveralls.io/repos/github/npm/npm-registry-fetch/badge.svg?branch=latest)](https://coveralls.io/github/npm/npm-registry-fetch?branch=latest) - -[`npm-registry-fetch`](https://github.com/npm/npm-registry-fetch) is a Node.js -library that implements a `fetch`-like API for accessing npm registry APIs -consistently. It's able to consume npm-style configuration values and has all -the necessary logic for picking registries, handling scopes, and dealing with -authentication details built-in. - -This package is meant to replace the older -[`npm-registry-client`](https://npm.im/npm-registry-client). - -## Example - -```javascript -const npmFetch = require('npm-registry-fetch') - -console.log( - await npmFetch.json('/-/ping') -) -``` - -## Table of Contents - -* [Installing](#install) -* [Example](#example) -* [Contributing](#contributing) -* [API](#api) - * [`fetch`](#fetch) - * [`fetch.json`](#fetch-json) - * [`fetch` options](#fetch-opts) - -### Install - -`$ npm install npm-registry-fetch` - -### Contributing - -The npm team enthusiastically welcomes contributions and project participation! -There's a bunch of things you can do if you want to contribute! The [Contributor -Guide](CONTRIBUTING.md) has all the information you need for everything from -reporting bugs to contributing entire new features. Please don't hesitate to -jump in if you'd like to, or even ask us questions if something isn't clear. - -All participants and maintainers in this project are expected to follow [Code of -Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other. - -Please refer to the [Changelog](CHANGELOG.md) for project history details, too. - -Happy hacking! - -### API - -#### `> fetch(url, [opts]) -> Promise` - -Performs a request to a given URL. - -The URL can be either a full URL, or a path to one. The appropriate registry -will be automatically picked if only a URL path is given. - -For available options, please see the section on [`fetch` options](#fetch-opts). - -##### Example - -```javascript -const res = await fetch('/-/ping') -console.log(res.headers) -res.on('data', d => console.log(d.toString('utf8'))) -``` - -#### `> fetch.json(url, [opts]) -> Promise` - -Performs a request to a given registry URL, parses the body of the response as -JSON, and returns it as its final value. This is a utility shorthand for -`fetch(url).then(res => res.json())`. - -For available options, please see the section on [`fetch` options](#fetch-opts). - -##### Example - -```javascript -const res = await fetch.json('/-/ping') -console.log(res) // Body parsed as JSON -``` - -#### `fetch` Options - -Fetch options are optional, and can be passed in as either a Map-like object -(one with a `.get()` method), a plain javascript object, or a -[`figgy-pudding`](https://npm.im/figgy-pudding) instance. - -##### `opts.agent` - -* Type: http.Agent -* Default: an appropriate agent based on URL protocol and proxy settings - -An [`Agent`](https://nodejs.org/api/http.html#http_class_http_agent) instance to -be shared across requests. This allows multiple concurrent `fetch` requests to -happen on the same socket. - -You do _not_ need to provide this option unless you want something particularly -specialized, since proxy configurations and http/https agents are already -automatically managed internally when this option is not passed through. - -##### `opts.body` - -* Type: Buffer | Stream | Object -* Default: null - -Request body to send through the outgoing request. Buffers and Streams will be -passed through as-is, with a default `content-type` of -`application/octet-stream`. Plain JavaScript objects will be `JSON.stringify`ed -and the `content-type` will default to `application/json`. - -Use [`opts.headers`](#opts-headers) to set the content-type to something else. - -##### `opts.ca` - -* Type: String, Array, or null -* Default: null - -The Certificate Authority signing certificate that is trusted for SSL -connections to the registry. Values should be in PEM format (Windows calls it -"Base-64 encoded X.509 (.CER)") with newlines replaced by the string `'\n'`. For -example: - -``` -{ - ca: '-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----' -} -``` - -Set to `null` to only allow "known" registrars, or to a specific CA cert -to trust only that specific signing authority. - -Multiple CAs can be trusted by specifying an array of certificates instead of a -single string. - -See also [`opts.strict-ssl`](#opts-strict-ssl), [`opts.ca`](#opts-ca) and -[`opts.key`](#opts-key) - -##### `opts.cache` - -* Type: path -* Default: null - -The location of the http cache directory. If provided, certain cachable requests -will be cached according to [IETF RFC 7234](https://tools.ietf.org/html/rfc7234) -rules. This will speed up future requests, as well as make the cached data -available offline if necessary/requested. - -See also [`offline`](#opts-offline), [`prefer-offline`](#opts-prefer-offline), -and [`prefer-online`](#opts-prefer-online). - -##### `opts.cert` - -* Type: String -* Default: null - -A client certificate to pass when accessing the registry. Values should be in -PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with newlines -replaced by the string `'\n'`. For example: - -``` -{ - cert: '-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----' -} -``` - -It is _not_ the path to a certificate file (and there is no "certfile" option). - -See also: [`opts.ca`](#opts-ca) and [`opts.key`](#opts-key) - -##### `opts.fetch-retries` - -* Type: Number -* Default: 2 - -The "retries" config for [`retry`](https://npm.im/retry) to use when fetching -packages from the registry. - -See also [`opts.retry`](#opts-retry) to provide all retry options as a single -object. - -##### `opts.fetch-retry-factor` - -* Type: Number -* Default: 10 - -The "factor" config for [`retry`](https://npm.im/retry) to use when fetching -packages. - -See also [`opts.retry`](#opts-retry) to provide all retry options as a single -object. - -##### `opts.fetch-retry-mintimeout` - -* Type: Number -* Default: 10000 (10 seconds) - -The "minTimeout" config for [`retry`](https://npm.im/retry) to use when fetching -packages. - -See also [`opts.retry`](#opts-retry) to provide all retry options as a single -object. - -##### `opts.fetch-retry-maxtimeout` - -* Type: Number -* Default: 60000 (1 minute) - -The "maxTimeout" config for [`retry`](https://npm.im/retry) to use when fetching -packages. - -See also [`opts.retry`](#opts-retry) to provide all retry options as a single -object. - -##### `opts.headers` - -* Type: Object -* Default: null - -Additional headers for the outgoing request. This option can also be used to -override headers automatically generated by `npm-registry-fetch`, such as -`Content-Type`. - -##### `opts.integrity` - -* Type: String | [SRI object](https://npm.im/ssri) -* Default: null - -If provided, the response body's will be verified against this integrity string, -using [`ssri`](https://npm.im/ssri). If verification succeeds, the response will -complete as normal. If verification fails, the response body will error with an -`EINTEGRITY` error. - -Body integrity is only verified if the body is actually consumed to completion -- -that is, if you use `res.json()`/`res.buffer()`, or if you consume the default -`res` stream data to its end. - -Cached data will have its integrity automatically verified using the -previously-generated integrity hash for the saved request information, so -`EINTEGRITY` errors can happen if [`opts.cache`](#opts-cache) is used, even if -`opts.integrity` is not passed in. - -##### `opts.is-from-ci` - -* Alias: `opts.isFromCI` -* Type: Boolean -* Default: Based on environment variables - -This is used to populate the `npm-in-ci` request header sent to the registry. - -##### `opts.key` - -* Type: String -* Default: null - -A client key to pass when accessing the registry. Values should be in PEM -format with newlines replaced by the string `'\n'`. For example: - -``` -{ - key: '-----BEGIN PRIVATE KEY-----\nXXXX\nXXXX\n-----END PRIVATE KEY-----' -} -``` - -It is _not_ the path to a key file (and there is no "keyfile" option). - -See also: [`opts.ca`](#opts-ca) and [`opts.cert`](#opts-cert) - -##### `opts.local-address` - -* Type: IP Address String -* Default: null - -The IP address of the local interface to use when making connections -to the registry. - -See also [`opts.proxy`](#opts-proxy) - -##### `opts.log` - -* Type: [`npmlog`](https://npm.im/npmlog)-like -* Default: null - -Logger object to use for logging operation details. Must have the same methods -as `npmlog`. - -##### `opts.maxsockets` - -* Alias: `opts.max-sockets` -* Type: Integer -* Default: 12 - -Maximum number of sockets to keep open during requests. Has no effect if -[`opts.agent`](#opts-agent) is used. - -##### `opts.method` - -* Type: String -* Default: 'GET' - -HTTP method to use for the outgoing request. Case-insensitive. - -##### `opts.noproxy` - -* Type: Boolean -* Default: process.env.NOPROXY - -If true, proxying will be disabled even if [`opts.proxy`](#opts-proxy) is used. - -##### `opts.npm-session` - -* Alias: `opts.npmSession` -* Type: String -* Default: null - -If provided, will be sent in the `npm-session` header. This header is used by -the npm registry to identify individual user sessions (usually individual -invocations of the CLI). - -##### `opts.offline` - -* Type: Boolean -* Default: false - -Force offline mode: no network requests will be done during install. To allow -`npm-registry-fetch` to fill in missing cache data, see -[`opts.prefer-offline`](#opts-prefer-offline). - -This option is only really useful if you're also using -[`opts.cache`](#opts-cache). - -##### `opts.otp` - -* Type: Number | String -* Default: null - -This is a one-time password from a two-factor authenticator. It is required for -certain registry interactions when two-factor auth is enabled for a user -account. - -##### `opts.password` - -* Alias: _password -* Type: String -* Default: null - -Password used for basic authentication. For the more modern authentication -method, please use the (more secure) [`opts.token`](#opts-token) - -Can optionally be scoped to a registry by using a "nerf dart" for that registry. -That is: - -``` -{ - '//registry.npmjs.org/:password': 't0k3nH34r' -} -``` - -See also [`opts.username`](#opts-username) - -##### `opts.prefer-offline` - -* Type: Boolean -* Default: false - -If true, staleness checks for cached data will be bypassed, but missing data -will be requested from the server. To force full offline mode, use -[`opts.offline`](#opts-offline). - -This option is generally only useful if you're also using -[`opts.cache`](#opts-cache). - -##### `opts.prefer-online` - -* Type: Boolean -* Default: false - -If true, staleness checks for cached data will be forced, making the CLI look -for updates immediately even for fresh package data. - -This option is generally only useful if you're also using -[`opts.cache`](#opts-cache). - - -##### `opts.project-scope` - -* Alias: `opts.projectScope` -* Type: String -* Default: null - -If provided, will be sent in the `npm-scope` header. This header is used by the -npm registry to identify the toplevel package scope that a particular project -installation is using. - -##### `opts.proxy` - -* Type: url -* Default: null - -A proxy to use for outgoing http requests. If not passed in, the `HTTP(S)_PROXY` -environment variable will be used. - -##### `opts.query` - -* Type: String | Object -* Default: null - -If provided, the request URI will have a query string appended to it using this -query. If `opts.query` is an object, it will be converted to a query string -using -[`querystring.stringify()`](https://nodejs.org/api/querystring.html#querystring_querystring_stringify_obj_sep_eq_options). - -If the request URI already has a query string, it will be merged with -`opts.query`, preferring `opts.query` values. - -##### `opts.refer` - -* Alias: `opts.referer` -* Type: String -* Default: null - -Value to use for the `Referer` header. The npm CLI itself uses this to serialize -the npm command line using the given request. - -##### `opts.registry` - -* Type: URL -* Default: `'https://registry.npmjs.org'` - -Registry configuration for a request. If a request URL only includes the URL -path, this registry setting will be prepended. This configuration is also used -to determine authentication details, so even if the request URL references a -completely different host, `opts.registry` will be used to find the auth details -for that request. - -See also [`opts.scope`](#opts-scope), [`opts.spec`](#opts-spec), and -[`opts.:registry`](#opts-scope-registry) which can all affect the actual -registry URL used by the outgoing request. - -##### `opts.retry` - -* Type: Object -* Default: null - -Single-object configuration for request retry settings. If passed in, will -override individually-passed `fetch-retry-*` settings. - -##### `opts.scope` - -* Type: String -* Default: null - -Associate an operation with a scope for a scoped registry. This option can force -lookup of scope-specific registries and authentication. - -See also [`opts.:registry`](#opts-scope-registry) and -[`opts.spec`](#opts-spec) for interactions with this option. - -##### `opts.:registry` - -* Type: String -* Default: null - -This option type can be used to configure the registry used for requests -involving a particular scope. For example, `opts['@myscope:registry'] = -'https://scope-specific.registry/'` will make it so requests go out to this -registry instead of [`opts.registry`](#opts-registry) when -[`opts.scope`](#opts-scope) is used, or when [`opts.spec`](#opts-spec) is a -scoped package spec. - -The `@` before the scope name is optional, but recommended. - -##### `opts.spec` - -* Type: String | [`npm-registry-arg`](https://npm.im/npm-registry-arg) object. -* Default: null - -If provided, can be used to automatically configure [`opts.scope`](#opts-scope) -based on a specific package name. Non-registry package specs will throw an -error. - -##### `opts.strict-ssl` - -* Type: Boolean -* Default: true - -Whether or not to do SSL key validation when making requests to the -registry via https. - -See also [`opts.ca`](#opts-ca). - -##### `opts.timeout` - -* Type: Milliseconds -* Default: 30000 (30 seconds) - -Time before a hanging request times out. - -##### `opts.token` - -* Alias: `opts._authToken` -* Type: String -* Default: null - -Authentication token string. - -Can be scoped to a registry by using a "nerf dart" for that registry. That is: - -``` -{ - '//registry.npmjs.org/:token': 't0k3nH34r' -} -``` - -##### `opts.user-agent` - -* Type: String -* Default: `'npm-registry-fetch@/node@+ ()'` - -User agent string to send in the `User-Agent` header. - -##### `opts.username` - -* Type: String -* Default: null - -Username used for basic authentication. For the more modern authentication -method, please use the (more secure) [`opts.token`](#opts-token) - -Can optionally be scoped to a registry by using a "nerf dart" for that registry. -That is: - -``` -{ - '//registry.npmjs.org/:username': 't0k3nH34r' -} -``` - -See also [`opts.password`](#opts-password) - -##### `opts._auth` - -* Type: String -* Default: null - -** DEPRECATED ** This is a legacy authentication token supported only for -*compatibility. Please use [`opts.token`](#opts-token) instead. diff --git a/node_modules/libnpmhook/node_modules/npm-registry-fetch/auth.js b/node_modules/libnpmhook/node_modules/npm-registry-fetch/auth.js deleted file mode 100644 index 9532341db1400..0000000000000 --- a/node_modules/libnpmhook/node_modules/npm-registry-fetch/auth.js +++ /dev/null @@ -1,48 +0,0 @@ -'use strict' - -const config = require('./config.js') -const url = require('url') - -module.exports = getAuth -function getAuth (registry, opts) { - if (!registry) { throw new Error('registry is required') } - opts = config(opts) - let AUTH = {} - const regKey = registry && registryKey(registry) - const doKey = (key, alias) => addKey(opts, AUTH, regKey, key, alias) - doKey('token') - doKey('_authToken', 'token') - doKey('username') - doKey('password') - doKey('_password', 'password') - doKey('email') - doKey('_auth') - doKey('otp') - doKey('always-auth', 'alwaysAuth') - if (AUTH.password) { - AUTH.password = Buffer.from(AUTH.password, 'base64').toString('utf8') - } - AUTH.alwaysAuth = AUTH.alwaysAuth === 'false' ? false : !!AUTH.alwaysAuth - return AUTH -} - -function addKey (opts, obj, scope, key, objKey) { - if (opts.get(key)) { - obj[objKey || key] = opts.get(key) - } - if (scope && opts.get(`${scope}:${key}`)) { - obj[objKey || key] = opts.get(`${scope}:${key}`) - } -} - -// Called a nerf dart in the main codebase. Used as a "safe" -// key when fetching registry info from config. -function registryKey (registry) { - const parsed = url.parse(registry) - const formatted = url.format({ - host: parsed.host, - pathname: parsed.pathname, - slashes: parsed.slashes - }) - return url.resolve(formatted, '.') -} diff --git a/node_modules/libnpmhook/node_modules/npm-registry-fetch/check-response.js b/node_modules/libnpmhook/node_modules/npm-registry-fetch/check-response.js deleted file mode 100644 index 407a80e4ce38a..0000000000000 --- a/node_modules/libnpmhook/node_modules/npm-registry-fetch/check-response.js +++ /dev/null @@ -1,99 +0,0 @@ -'use strict' - -const config = require('./config.js') -const errors = require('./errors.js') -const LRU = require('lru-cache') - -module.exports = checkResponse -function checkResponse (method, res, registry, startTime, opts) { - opts = config(opts) - if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) { - opts.get('log').notice('', res.headers.get('npm-notice')) - } - checkWarnings(res, registry, opts) - if (res.status >= 400) { - logRequest(method, res, startTime, opts) - return checkErrors(method, res, startTime, opts) - } else { - res.body.on('end', () => logRequest(method, res, startTime, opts)) - return res - } -} - -function logRequest (method, res, startTime, opts) { - const elapsedTime = Date.now() - startTime - const attempt = res.headers.get('x-fetch-attempts') - const attemptStr = attempt && attempt > 1 ? ` attempt #${attempt}` : '' - const cacheStr = res.headers.get('x-local-cache') ? ' (from cache)' : '' - opts.get('log').http( - 'fetch', - `${method.toUpperCase()} ${res.status} ${res.url} ${elapsedTime}ms${attemptStr}${cacheStr}` - ) -} - -const WARNING_REGEXP = /^\s*(\d{3})\s+(\S+)\s+"(.*)"\s+"([^"]+)"/ -const BAD_HOSTS = new LRU({ max: 50 }) - -function checkWarnings (res, registry, opts) { - if (res.headers.has('warning') && !BAD_HOSTS.has(registry)) { - const warnings = {} - res.headers.raw()['warning'].forEach(w => { - const match = w.match(WARNING_REGEXP) - if (match) { - warnings[match[1]] = { - code: match[1], - host: match[2], - message: match[3], - date: new Date(match[4]) - } - } - }) - BAD_HOSTS.set(registry, true) - if (warnings['199']) { - if (warnings['199'].message.match(/ENOTFOUND/)) { - opts.get('log').warn('registry', `Using stale data from ${registry} because the host is inaccessible -- are you offline?`) - } else { - opts.get('log').warn('registry', `Unexpected warning for ${registry}: ${warnings['199'].message}`) - } - } - if (warnings['111']) { - // 111 Revalidation failed -- we're using stale data - opts.get('log').warn( - 'registry', - `Using stale data from ${registry} due to a request error during revalidation.` - ) - } - } -} - -function checkErrors (method, res, startTime, opts) { - return res.buffer() - .catch(() => null) - .then(body => { - try { - body = JSON.parse(body.toString('utf8')) - } catch (e) {} - if (res.status === 401 && res.headers.get('www-authenticate')) { - const auth = res.headers.get('www-authenticate') - .split(/,\s*/) - .map(s => s.toLowerCase()) - if (auth.indexOf('ipaddress') !== -1) { - throw new errors.HttpErrorAuthIPAddress( - method, res, body, opts.spec - ) - } else if (auth.indexOf('otp') !== -1) { - throw new errors.HttpErrorAuthOTP( - method, res, body, opts.spec - ) - } else { - throw new errors.HttpErrorAuthUnknown( - method, res, body, opts.spec - ) - } - } else { - throw new errors.HttpErrorGeneral( - method, res, body, opts.spec - ) - } - }) -} diff --git a/node_modules/libnpmhook/node_modules/npm-registry-fetch/config.js b/node_modules/libnpmhook/node_modules/npm-registry-fetch/config.js deleted file mode 100644 index db08c1e47001f..0000000000000 --- a/node_modules/libnpmhook/node_modules/npm-registry-fetch/config.js +++ /dev/null @@ -1,90 +0,0 @@ -'use strict' - -const pkg = require('./package.json') -const figgyPudding = require('figgy-pudding') -const silentLog = require('./silentlog.js') - -const AUTH_REGEX = /^(?:.*:)?(token|_authToken|username|_password|password|email|always-auth|_auth|otp)$/ -const SCOPE_REGISTRY_REGEX = /@.*:registry$/gi -module.exports = figgyPudding({ - 'agent': {}, - 'algorithms': {}, - 'body': {}, - 'ca': {}, - 'cache': {}, - 'cert': {}, - 'fetch-retries': {}, - 'fetch-retry-factor': {}, - 'fetch-retry-maxtimeout': {}, - 'fetch-retry-mintimeout': {}, - 'gid': {}, - 'headers': {}, - 'https-proxy': {}, - 'integrity': {}, - 'is-from-ci': 'isFromCI', - 'isFromCI': { - default () { - return ( - process.env['CI'] === 'true' || - process.env['TDDIUM'] || - process.env['JENKINS_URL'] || - process.env['bamboo.buildKey'] || - process.env['GO_PIPELINE_NAME'] - ) - } - }, - 'key': {}, - 'local-address': {}, - 'log': { - default: silentLog - }, - 'max-sockets': 'maxsockets', - 'maxsockets': { - default: 12 - }, - 'memoize': {}, - 'method': { - default: 'GET' - }, - 'no-proxy': {}, - 'noproxy': {}, - 'npm-session': 'npmSession', - 'npmSession': {}, - 'offline': {}, - 'otp': {}, - 'prefer-offline': {}, - 'prefer-online': {}, - 'projectScope': {}, - 'project-scope': 'projectScope', - 'Promise': {}, - 'proxy': {}, - 'query': {}, - 'refer': {}, - 'referer': 'refer', - 'registry': { - default: 'https://registry.npmjs.org/' - }, - 'retry': {}, - 'scope': {}, - 'spec': {}, - 'strict-ssl': {}, - 'timeout': {}, - 'uid': {}, - 'user-agent': { - default: `${ - pkg.name - }@${ - pkg.version - }/node@${ - process.version - }+${ - process.arch - } (${ - process.platform - })` - } -}, { - other (key) { - return key.match(AUTH_REGEX) || key.match(SCOPE_REGISTRY_REGEX) - } -}) diff --git a/node_modules/libnpmhook/node_modules/npm-registry-fetch/errors.js b/node_modules/libnpmhook/node_modules/npm-registry-fetch/errors.js deleted file mode 100644 index 217f46f9773a7..0000000000000 --- a/node_modules/libnpmhook/node_modules/npm-registry-fetch/errors.js +++ /dev/null @@ -1,58 +0,0 @@ -'use strict' - -class HttpErrorBase extends Error { - constructor (method, res, body, spec) { - super() - this.headers = res.headers.raw() - this.statusCode = res.status - this.code = `E${res.status}` - this.method = method - this.uri = res.url - this.body = body - } -} -module.exports.HttpErrorBase = HttpErrorBase - -class HttpErrorGeneral extends HttpErrorBase { - constructor (method, res, body, spec) { - super(method, res, body, spec) - this.message = `${res.status} ${res.statusText} - ${ - this.method.toUpperCase() - } ${ - this.spec || this.uri - }${ - (body && body.error) ? ' - ' + body.error : '' - }` - Error.captureStackTrace(this, HttpErrorGeneral) - } -} -module.exports.HttpErrorGeneral = HttpErrorGeneral - -class HttpErrorAuthOTP extends HttpErrorBase { - constructor (method, res, body, spec) { - super(method, res, body, spec) - this.message = 'OTP required for authentication' - this.code = 'EOTP' - Error.captureStackTrace(this, HttpErrorAuthOTP) - } -} -module.exports.HttpErrorAuthOTP = HttpErrorAuthOTP - -class HttpErrorAuthIPAddress extends HttpErrorBase { - constructor (method, res, body, spec) { - super(method, res, body, spec) - this.message = 'Login is not allowed from your IP address' - this.code = 'EAUTHIP' - Error.captureStackTrace(this, HttpErrorAuthIPAddress) - } -} -module.exports.HttpErrorAuthIPAddress = HttpErrorAuthIPAddress - -class HttpErrorAuthUnknown extends HttpErrorBase { - constructor (method, res, body, spec) { - super(method, res, body, spec) - this.message = 'Unable to authenticate, need: ' + res.headers.get('www-authenticate') - Error.captureStackTrace(this, HttpErrorAuthUnknown) - } -} -module.exports.HttpErrorAuthUnknown = HttpErrorAuthUnknown diff --git a/node_modules/libnpmhook/node_modules/npm-registry-fetch/index.js b/node_modules/libnpmhook/node_modules/npm-registry-fetch/index.js deleted file mode 100644 index bb6ddeaee0151..0000000000000 --- a/node_modules/libnpmhook/node_modules/npm-registry-fetch/index.js +++ /dev/null @@ -1,160 +0,0 @@ -'use strict' - -const Buffer = require('safe-buffer').Buffer - -const checkResponse = require('./check-response.js') -const config = require('./config.js') -const getAuth = require('./auth.js') -const fetch = require('make-fetch-happen') -const npa = require('npm-package-arg') -const qs = require('querystring') -const url = require('url') - -module.exports = regFetch -function regFetch (uri, opts) { - opts = config(opts) - const registry = ( - (opts.get('spec') && pickRegistry(opts.get('spec'), opts)) || - opts.get('registry') || - 'https://registry.npmjs.org/' - ) - uri = url.parse(uri).protocol - ? uri - : `${ - registry.trim().replace(/\/?$/g, '') - }/${ - uri.trim().replace(/^\//, '') - }` - // through that takes into account the scope, the prefix of `uri`, etc - const startTime = Date.now() - const headers = getHeaders(registry, uri, opts) - let body = opts.get('body') - const bodyIsStream = body && - typeof body === 'object' && - typeof body.pipe === 'function' - if (body && !bodyIsStream && typeof body !== 'string' && !Buffer.isBuffer(body)) { - headers['content-type'] = headers['content-type'] || 'application/json' - body = JSON.stringify(body) - } else if (body && !headers['content-type']) { - headers['content-type'] = 'application/octet-stream' - } - if (opts.get('query')) { - let q = opts.get('query') - if (typeof q === 'string') { - q = qs.parse(q) - } - const parsed = url.parse(uri) - parsed.search = '?' + qs.stringify( - parsed.query - ? Object.assign(qs.parse(parsed.query), q) - : q - ) - uri = url.format(parsed) - } - return fetch(uri, { - agent: opts.get('agent'), - algorithms: opts.get('algorithms'), - body, - cache: getCacheMode(opts), - cacheManager: opts.get('cache'), - ca: opts.get('ca'), - cert: opts.get('cert'), - headers, - integrity: opts.get('integrity'), - key: opts.get('key'), - localAddress: opts.get('local-address'), - maxSockets: opts.get('maxsockets'), - memoize: opts.get('memoize'), - method: opts.get('method') || 'GET', - noProxy: opts.get('no-proxy') || opts.get('noproxy'), - Promise: opts.get('Promise'), - proxy: opts.get('https-proxy') || opts.get('proxy'), - referer: opts.get('refer'), - retry: opts.get('retry') || { - retries: opts.get('fetch-retries'), - factor: opts.get('fetch-retry-factor'), - minTimeout: opts.get('fetch-retry-mintimeout'), - maxTimeout: opts.get('fetch-retry-maxtimeout') - }, - strictSSL: !!opts.get('strict-ssl'), - timeout: opts.get('timeout'), - uid: opts.get('uid'), - gid: opts.get('gid') - }).then(res => checkResponse( - opts.get('method') || 'GET', res, registry, startTime, opts - )) -} - -module.exports.json = fetchJSON -function fetchJSON (uri, opts) { - return regFetch(uri, opts).then(res => res.json()) -} - -module.exports.pickRegistry = pickRegistry -function pickRegistry (spec, opts) { - spec = npa(spec) - opts = config(opts) - let registry = spec.scope && - opts.get(spec.scope.replace(/^@?/, '@') + ':registry') - - if (!registry && opts.get('scope')) { - registry = opts.get( - opts.get('scope').replace(/^@?/, '@') + ':registry' - ) - } - - if (!registry) { - registry = opts.get('registry') || 'https://registry.npmjs.org/' - } - - return registry -} - -function getCacheMode (opts) { - return opts.get('offline') - ? 'only-if-cached' - : opts.get('prefer-offline') - ? 'force-cache' - : opts.get('prefer-online') - ? 'no-cache' - : 'default' -} - -function getHeaders (registry, uri, opts) { - const headers = Object.assign({ - 'npm-in-ci': !!( - opts.get('is-from-ci') || - process.env['CI'] === 'true' || - process.env['TDDIUM'] || - process.env['JENKINS_URL'] || - process.env['bamboo.buildKey'] || - process.env['GO_PIPELINE_NAME'] - ), - 'npm-scope': opts.get('project-scope'), - 'npm-session': opts.get('npm-session'), - 'user-agent': opts.get('user-agent'), - 'referer': opts.get('refer') - }, opts.get('headers')) - - const auth = getAuth(registry, opts) - // If a tarball is hosted on a different place than the manifest, only send - // credentials on `alwaysAuth` - const shouldAuth = ( - auth.alwaysAuth || - url.parse(uri).host === url.parse(registry).host - ) - if (shouldAuth && auth.token) { - headers.authorization = `Bearer ${auth.token}` - } else if (shouldAuth && auth.username && auth.password) { - const encoded = Buffer.from( - `${auth.username}:${auth.password}`, 'utf8' - ).toString('base64') - headers.authorization = `Basic ${encoded}` - } else if (shouldAuth && auth._auth) { - headers.authorization = `Basic ${auth._auth}` - } - if (shouldAuth && auth.otp) { - headers['npm-otp'] = auth.otp - } - return headers -} diff --git a/node_modules/libnpmhook/node_modules/npm-registry-fetch/package.json b/node_modules/libnpmhook/node_modules/npm-registry-fetch/package.json deleted file mode 100644 index f17636c6cf5e4..0000000000000 --- a/node_modules/libnpmhook/node_modules/npm-registry-fetch/package.json +++ /dev/null @@ -1,90 +0,0 @@ -{ - "_from": "npm-registry-fetch@^3.0.0", - "_id": "npm-registry-fetch@3.1.1", - "_inBundle": false, - "_integrity": "sha512-xBobENeenvjIG8PgQ1dy77AXTI25IbYhmA3DusMIfw/4EL5BaQ5e1V9trkPrqHvyjR3/T0cnH6o0Wt/IzcI5Ag==", - "_location": "/libnpmhook/npm-registry-fetch", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "npm-registry-fetch@^3.0.0", - "name": "npm-registry-fetch", - "escapedName": "npm-registry-fetch", - "rawSpec": "^3.0.0", - "saveSpec": null, - "fetchSpec": "^3.0.0" - }, - "_requiredBy": [ - "/libnpmhook" - ], - "_resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-3.1.1.tgz", - "_shasum": "e96bae698afdd45d4a01aca29e881fc0bc55206c", - "_spec": "npm-registry-fetch@^3.0.0", - "_where": "/Users/rebecca/code/npm/node_modules/libnpmhook", - "author": { - "name": "Kat Marchán", - "email": "kzm@sykosomatic.org" - }, - "bugs": { - "url": "https://github.com/npm/registry-fetch/issues" - }, - "bundleDependencies": false, - "config": { - "nyc": { - "exclude": [ - "node_modules/**", - "test/**" - ] - } - }, - "dependencies": { - "bluebird": "^3.5.1", - "figgy-pudding": "^3.1.0", - "lru-cache": "^4.1.2", - "make-fetch-happen": "^4.0.0", - "npm-package-arg": "^6.0.0" - }, - "deprecated": false, - "description": "Fetch-based http client for use with npm registry APIs", - "devDependencies": { - "cacache": "^11.0.0", - "mkdirp": "^0.5.1", - "nock": "^9.2.3", - "npmlog": "^4.1.2", - "rimraf": "^2.6.2", - "ssri": "^6.0.0", - "standard": "^11.0.1", - "standard-version": "^4.2.0", - "tap": "^11.1.3", - "weallbehave": "^1.2.0", - "weallcontribute": "^1.0.8" - }, - "files": [ - "*.js", - "lib" - ], - "homepage": "https://github.com/npm/registry-fetch#readme", - "keywords": [ - "npm", - "registry", - "fetch" - ], - "license": "ISC", - "main": "index.js", - "name": "npm-registry-fetch", - "repository": { - "type": "git", - "url": "git+https://github.com/npm/registry-fetch.git" - }, - "scripts": { - "postrelease": "npm publish && git push --follow-tags", - "prerelease": "npm t", - "pretest": "standard", - "release": "standard-version -s", - "test": "tap -J --coverage test/*.js", - "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", - "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" - }, - "version": "3.1.1" -} diff --git a/node_modules/libnpmhook/node_modules/npm-registry-fetch/silentlog.js b/node_modules/libnpmhook/node_modules/npm-registry-fetch/silentlog.js deleted file mode 100644 index 886c5d55b2dbb..0000000000000 --- a/node_modules/libnpmhook/node_modules/npm-registry-fetch/silentlog.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict' - -const noop = Function.prototype -module.exports = { - error: noop, - warn: noop, - notice: noop, - info: noop, - verbose: noop, - silly: noop, - http: noop, - pause: noop, - resume: noop -} diff --git a/node_modules/libnpmhook/package.json b/node_modules/libnpmhook/package.json index 2f06e7a6b53bf..20f5e28868788 100644 --- a/node_modules/libnpmhook/package.json +++ b/node_modules/libnpmhook/package.json @@ -1,38 +1,28 @@ { - "_args": [ - [ - "libnpmhook@4.0.1", - "/Users/rebecca/code/npm" - ] - ], - "_from": "libnpmhook@4.0.1", - "_id": "libnpmhook@4.0.1", + "_from": "libnpmhook@5.0.3", + "_id": "libnpmhook@5.0.3", "_inBundle": false, - "_integrity": "sha512-3qqpfqvBD1712WA6iGe0stkG40WwAeoWcujA6BlC0Be1JArQbqwabnEnZ0CRcD05Tf1fPYJYdCbSfcfedEJCOg==", + "_integrity": "sha512-UdNLMuefVZra/wbnBXECZPefHMGsVDTq5zaM/LgKNE9Keyl5YXQTnGAzEo+nFOpdRqTWI9LYi4ApqF9uVCCtuA==", "_location": "/libnpmhook", - "_phantomChildren": { - "bluebird": "3.5.1", - "figgy-pudding": "3.1.0", - "lru-cache": "4.1.3", - "make-fetch-happen": "4.0.1", - "npm-package-arg": "6.1.0" - }, + "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "libnpmhook@4.0.1", + "raw": "libnpmhook@5.0.3", "name": "libnpmhook", "escapedName": "libnpmhook", - "rawSpec": "4.0.1", + "rawSpec": "5.0.3", "saveSpec": null, - "fetchSpec": "4.0.1" + "fetchSpec": "5.0.3" }, "_requiredBy": [ + "#USER", "/" ], - "_resolved": "https://registry.npmjs.org/libnpmhook/-/libnpmhook-4.0.1.tgz", - "_spec": "4.0.1", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/libnpmhook/-/libnpmhook-5.0.3.tgz", + "_shasum": "4020c0f5edbf08ebe395325caa5ea01885b928f7", + "_spec": "libnpmhook@5.0.3", + "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Kat Marchán", "email": "kzm@sykosomatic.org" @@ -40,16 +30,20 @@ "bugs": { "url": "https://github.com/npm/libnpmhook/issues" }, + "bundleDependencies": false, "dependencies": { - "figgy-pudding": "^3.1.0", - "npm-registry-fetch": "^3.0.0" + "aproba": "^2.0.0", + "figgy-pudding": "^3.4.1", + "get-stream": "^4.0.0", + "npm-registry-fetch": "^4.0.0" }, + "deprecated": false, "description": "programmatic API for managing npm registry hooks", "devDependencies": { - "nock": "^9.2.3", + "nock": "^9.6.1", "standard": "^11.0.1", - "standard-version": "^4.3.0", - "tap": "^11.1.3", + "standard-version": "^4.4.0", + "tap": "^12.0.1", "weallbehave": "^1.2.0", "weallcontribute": "^1.0.8" }, @@ -76,9 +70,9 @@ "prerelease": "npm t", "pretest": "standard", "release": "standard-version -s", - "test": "tap -J --coverage test/*.js", + "test": "tap -J --100 --coverage test/*.js", "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "4.0.1" + "version": "5.0.3" } diff --git a/node_modules/libnpmorg/.travis.yml b/node_modules/libnpmorg/.travis.yml new file mode 100644 index 0000000000000..db5ea8b018640 --- /dev/null +++ b/node_modules/libnpmorg/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +sudo: false +node_js: + - "10" + - "9" + - "8" + - "6" diff --git a/node_modules/libnpmorg/CHANGELOG.md b/node_modules/libnpmorg/CHANGELOG.md new file mode 100644 index 0000000000000..3d70c79c5614b --- /dev/null +++ b/node_modules/libnpmorg/CHANGELOG.md @@ -0,0 +1,21 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + + +## [1.0.1](https://github.com/npm/libnpmorg/compare/v1.0.0...v1.0.1) (2019-07-16) + + +### Bug Fixes + +* **standard:** standard --fix ([5118358](https://github.com/npm/libnpmorg/commit/5118358)) + + + + +# 1.0.0 (2018-08-23) + + +### Features + +* **API:** implement org api ([731b9c6](https://github.com/npm/libnpmorg/commit/731b9c6)) diff --git a/node_modules/libnpmorg/CODE_OF_CONDUCT.md b/node_modules/libnpmorg/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000000..aeb72f598dcb4 --- /dev/null +++ b/node_modules/libnpmorg/CODE_OF_CONDUCT.md @@ -0,0 +1,151 @@ +# Code of Conduct + +## When Something Happens + +If you see a Code of Conduct violation, follow these steps: + +1. Let the person know that what they did is not appropriate and ask them to stop and/or edit their message(s) or commits. +2. That person should immediately stop the behavior and correct the issue. +3. If this doesn’t happen, or if you're uncomfortable speaking up, [contact the maintainers](#contacting-maintainers). +4. As soon as available, a maintainer will look into the issue, and take [further action (see below)](#further-enforcement), starting with a warning, then temporary block, then long-term repo or organization ban. + +When reporting, please include any relevant details, links, screenshots, context, or other information that may be used to better understand and resolve the situation. + +**The maintainer team will prioritize the well-being and comfort of the recipients of the violation over the comfort of the violator.** See [some examples below](#enforcement-examples). + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers of this project pledge to making participation in our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, technical preferences, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + + * Using welcoming and inclusive language. + * Being respectful of differing viewpoints and experiences. + * Gracefully accepting constructive feedback. + * Focusing on what is best for the community. + * Showing empathy and kindness towards other community members. + * Encouraging and raising up your peers in the project so you can all bask in hacks and glory. + +Examples of unacceptable behavior by participants include: + + * The use of sexualized language or imagery and unwelcome sexual attention or advances, including when simulated online. The only exception to sexual topics is channels/spaces specifically for topics of sexual identity. + * Casual mention of slavery or indentured servitude and/or false comparisons of one's occupation or situation to slavery. Please consider using or asking about alternate terminology when referring to such metaphors in technology. + * Making light of/making mocking comments about trigger warnings and content warnings. + * Trolling, insulting/derogatory comments, and personal or political attacks. + * Public or private harassment, deliberate intimidation, or threats. + * Publishing others' private information, such as a physical or electronic address, without explicit permission. This includes any sort of "outing" of any aspect of someone's identity without their consent. + * Publishing private screenshots or quotes of interactions in the context of this project without all quoted users' *explicit* consent. + * Publishing of private communication that doesn't have to do with reporting harrassment. + * Any of the above even when [presented as "ironic" or "joking"](https://en.wikipedia.org/wiki/Hipster_racism). + * Any attempt to present "reverse-ism" versions of the above as violations. Examples of reverse-isms are "reverse racism", "reverse sexism", "heterophobia", and "cisphobia". + * Unsolicited explanations under the assumption that someone doesn't already know it. Ask before you teach! Don't assume what people's knowledge gaps are. + * [Feigning or exaggerating surprise](https://www.recurse.com/manual#no-feigned-surprise) when someone admits to not knowing something. + * "[Well-actuallies](https://www.recurse.com/manual#no-well-actuallys)" + * Other conduct which could reasonably be considered inappropriate in a professional or community setting. + +## Scope + +This Code of Conduct applies both within spaces involving this project and in other spaces involving community members. This includes the repository, its Pull Requests and Issue tracker, its Twitter community, private email communications in the context of the project, and any events where members of the project are participating, as well as adjacent communities and venues affecting the project's members. + +Depending on the violation, the maintainers may decide that violations of this code of conduct that have happened outside of the scope of the community may deem an individual unwelcome, and take appropriate action to maintain the comfort and safety of its members. + +### Other Community Standards + +As a project on GitHub, this project is additionally covered by the [GitHub Community Guidelines](https://help.github.com/articles/github-community-guidelines/). + +Additionally, as a project hosted on npm, is is covered by [npm, Inc's Code of Conduct](https://www.npmjs.com/policies/conduct). + +Enforcement of those guidelines after violations overlapping with the above are the responsibility of the entities, and enforcement may happen in any or all of the services/communities. + +## Maintainer Enforcement Process + +Once the maintainers get involved, they will follow a documented series of steps and do their best to preserve the well-being of project members. This section covers actual concrete steps. + +### Contacting Maintainers + +You may get in touch with the maintainer team through any of the following methods: + + * Through email: + * [kzm@zkat.tech](mailto:kzm@zkat.tech) (Kat Marchán) + + * Through Twitter: + * [@maybekatz](https://twitter.com/maybekatz) (Kat Marchán) + +### Further Enforcement + +If you've already followed the [initial enforcement steps](#enforcement), these are the steps maintainers will take for further enforcement, as needed: + + 1. Repeat the request to stop. + 2. If the person doubles down, they will have offending messages removed or edited by a maintainers given an official warning. The PR or Issue may be locked. + 3. If the behavior continues or is repeated later, the person will be blocked from participating for 24 hours. + 4. If the behavior continues or is repeated after the temporary block, a long-term (6-12mo) ban will be used. + +On top of this, maintainers may remove any offending messages, images, contributions, etc, as they deem necessary. + +Maintainers reserve full rights to skip any of these steps, at their discretion, if the violation is considered to be a serious and/or immediate threat to the health and well-being of members of the community. These include any threats, serious physical or verbal attacks, and other such behavior that would be completely unacceptable in any social setting that puts our members at risk. + +Members expelled from events or venues with any sort of paid attendance will not be refunded. + +### Who Watches the Watchers? + +Maintainers and other leaders who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. These may include anything from removal from the maintainer team to a permanent ban from the community. + +Additionally, as a project hosted on both GitHub and npm, [their own Codes of Conducts may be applied against maintainers of this project](#other-community-standards), externally of this project's procedures. + +### Enforcement Examples + +#### The Best Case + +The vast majority of situations work out like this. This interaction is common, and generally positive. + +> Alex: "Yeah I used X and it was really crazy!" + +> Patt (not a maintainer): "Hey, could you not use that word? What about 'ridiculous' instead?" + +> Alex: "oh sorry, sure." -> edits old comment to say "it was really confusing!" + +#### The Maintainer Case + +Sometimes, though, you need to get maintainers involved. Maintainers will do their best to resolve conflicts, but people who were harmed by something **will take priority**. + +> Patt: "Honestly, sometimes I just really hate using $library and anyone who uses it probably sucks at their job." + +> Alex: "Whoa there, could you dial it back a bit? There's a CoC thing about attacking folks' tech use like that." + +> Patt: "I'm not attacking anyone, what's your problem?" + +> Alex: "@maintainers hey uh. Can someone look at this issue? Patt is getting a bit aggro. I tried to nudge them about it, but nope." + +> KeeperOfCommitBits: (on issue) "Hey Patt, maintainer here. Could you tone it down? This sort of attack is really not okay in this space." + +> Patt: "Leave me alone I haven't said anything bad wtf is wrong with you." + +> KeeperOfCommitBits: (deletes user's comment), "@patt I mean it. Please refer to the CoC over at (URL to this CoC) if you have questions, but you can consider this an actual warning. I'd appreciate it if you reworded your messages in this thread, since they made folks there uncomfortable. Let's try and be kind, yeah?" + +> Patt: "@keeperofbits Okay sorry. I'm just frustrated and I'm kinda burnt out and I guess I got carried away. I'll DM Alex a note apologizing and edit my messages. Sorry for the trouble." + +> KeeperOfCommitBits: "@patt Thanks for that. I hear you on the stress. Burnout sucks :/. Have a good one!" + +#### The Nope Case + +> PepeTheFrog🐸: "Hi, I am a literal actual nazi and I think white supremacists are quite fashionable." + +> Patt: "NOOOOPE. OH NOPE NOPE." + +> Alex: "JFC NO. NOPE. @keeperofbits NOPE NOPE LOOK HERE" + +> KeeperOfCommitBits: "👀 Nope. NOPE NOPE NOPE. 🔥" + +> PepeTheFrog🐸 has been banned from all organization or user repositories belonging to KeeperOfCommitBits. + +## Attribution + +This Code of Conduct was generated using [WeAllJS Code of Conduct Generator](https://npm.im/weallbehave), which is based on the [WeAllJS Code of +Conduct](https://wealljs.org/code-of-conduct), which is itself based on +[Contributor Covenant](http://contributor-covenant.org), version 1.4, available +at +[http://contributor-covenant.org/version/1/4](http://contributor-covenant.org/version/1/4), +and the LGBTQ in Technology Slack [Code of +Conduct](http://lgbtq.technology/coc.html). diff --git a/node_modules/libnpmorg/CONTRIBUTING.md b/node_modules/libnpmorg/CONTRIBUTING.md new file mode 100644 index 0000000000000..eb4b58b03ef1a --- /dev/null +++ b/node_modules/libnpmorg/CONTRIBUTING.md @@ -0,0 +1,256 @@ +# Contributing + +## How do I... + +* [Use This Guide](#introduction)? +* Ask or Say Something? 🤔🐛😱 + * [Request Support](#request-support) + * [Report an Error or Bug](#report-an-error-or-bug) + * [Request a Feature](#request-a-feature) +* Make Something? 🤓👩🏽‍💻📜🍳 + * [Project Setup](#project-setup) + * [Contribute Documentation](#contribute-documentation) + * [Contribute Code](#contribute-code) +* Manage Something ✅🙆🏼💃👔 + * [Provide Support on Issues](#provide-support-on-issues) + * [Label Issues](#label-issues) + * [Clean Up Issues and PRs](#clean-up-issues-and-prs) + * [Review Pull Requests](#review-pull-requests) + * [Merge Pull Requests](#merge-pull-requests) + * [Tag a Release](#tag-a-release) + * [Join the Project Team](#join-the-project-team) +* Add a Guide Like This One [To My Project](#attribution)? 🤖😻👻 + +## Introduction + +Thank you so much for your interest in contributing!. All types of contributions are encouraged and valued. See the [table of contents](#toc) for different ways to help and details about how this project handles them!📝 + +Please make sure to read the relevant section before making your contribution! It will make it a lot easier for us maintainers to make the most of it and smooth out the experience for all involved. 💚 + +The [Project Team](#join-the-project-team) looks forward to your contributions. 🙌🏾✨ + +## Request Support + +If you have a question about this project, how to use it, or just need clarification about something: + +* Open an Issue at https://github.com/npm/libnpmorg/issues +* Provide as much context as you can about what you're running into. +* Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant. If not, please be ready to provide that information if maintainers ask for it. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* Someone will try to have a response soon. +* If you or the maintainers don't respond to an issue for 30 days, the [issue will be closed](#clean-up-issues-and-prs). If you want to come back to it, reply (once, please), and we'll reopen the existing issue. Please avoid filing new issues as extensions of one you already made. + +## Report an Error or Bug + +If you run into an error or bug with the project: + +* Open an Issue at https://github.com/npm/libnpmorg/issues +* Include *reproduction steps* that someone else can follow to recreate the bug or error on their own. +* Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant. If not, please be ready to provide that information if maintainers ask for it. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* A team member will try to reproduce the issue with your provided steps. If there are no repro steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs-repro`. Bugs with the `needs-repro` tag will not be addressed until they are reproduced. +* If the team is able to reproduce the issue, it will be marked `needs-fix`, as well as possibly other tags (such as `critical`), and the issue will be left to be [implemented by someone](#contribute-code). +* If you or the maintainers don't respond to an issue for 30 days, the [issue will be closed](#clean-up-issues-and-prs). If you want to come back to it, reply (once, please), and we'll reopen the existing issue. Please avoid filing new issues as extensions of one you already made. +* `critical` issues may be left open, depending on perceived immediacy and severity, even past the 30 day deadline. + +## Request a Feature + +If the project doesn't do something you need or want it to do: + +* Open an Issue at https://github.com/npm/libnpmorg/issues +* Provide as much context as you can about what you're running into. +* Please try and be clear about why existing features and alternatives would not work for you. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* The project team will evaluate the feature request, possibly asking you more questions to understand its purpose and any relevant requirements. If the issue is closed, the team will convey their reasoning and suggest an alternative path forward. +* If the feature request is accepted, it will be marked for implementation with `feature-accepted`, which can then be done by either by a core team member or by anyone in the community who wants to [contribute code](#contribute-code). + +Note: The team is unlikely to be able to accept every single feature request that is filed. Please understand if they need to say no. + +## Project Setup + +So you wanna contribute some code! That's great! This project uses GitHub Pull Requests to manage contributions, so [read up on how to fork a GitHub project and file a PR](https://guides.github.com/activities/forking) if you've never done it before. + +If this seems like a lot or you aren't able to do all this setup, you might also be able to [edit the files directly](https://help.github.com/articles/editing-files-in-another-user-s-repository/) without having to do any of this setup. Yes, [even code](#contribute-code). + +If you want to go the usual route and run the project locally, though: + +* [Install Node.js](https://nodejs.org/en/download/) +* [Fork the project](https://guides.github.com/activities/forking/#fork) + +Then in your terminal: +* `cd path/to/your/clone` +* `npm install` +* `npm test` + +And you should be ready to go! + +## Contribute Documentation + +Documentation is a super important, critical part of this project. Docs are how we keep track of what we're doing, how, and why. It's how we stay on the same page about our policies. And it's how we tell others everything they need in order to be able to use this project -- or contribute to it. So thank you in advance. + +Documentation contributions of any size are welcome! Feel free to file a PR even if you're just rewording a sentence to be more clear, or fixing a spelling mistake! + +To contribute documentation: + +* [Set up the project](#project-setup). +* Edit or add any relevant documentation. +* Make sure your changes are formatted correctly and consistently with the rest of the documentation. +* Re-read what you wrote, and run a spellchecker on it to make sure you didn't miss anything. +* In your commit message(s), begin the first line with `docs: `. For example: `docs: Adding a doc contrib section to CONTRIBUTING.md`. +* Write clear, concise commit message(s) using [conventional-changelog format](https://github.com/conventional-changelog/conventional-changelog-angular/blob/master/convention.md). Documentation commits should use `docs(): `. +* Go to https://github.com/npm/libnpmorg/pulls and open a new pull request with your changes. +* If your PR is connected to an open issue, add a line in your PR's description that says `Fixes: #123`, where `#123` is the number of the issue you're fixing. + +Once you've filed the PR: + +* One or more maintainers will use GitHub's review feature to review your PR. +* If the maintainer asks for any changes, edit your changes, push, and ask for another review. +* If the maintainer decides to pass on your PR, they will thank you for the contribution and explain why they won't be accepting the changes. That's ok! We still really appreciate you taking the time to do it, and we don't take that lightly. 💚 +* If your PR gets accepted, it will be marked as such, and merged into the `latest` branch soon after. Your contribution will be distributed to the masses next time the maintainers [tag a release](#tag-a-release) + +## Contribute Code + +We like code commits a lot! They're super handy, and they keep the project going and doing the work it needs to do to be useful to others. + +Code contributions of just about any size are acceptable! + +The main difference between code contributions and documentation contributions is that contributing code requires inclusion of relevant tests for the code being added or changed. Contributions without accompanying tests will be held off until a test is added, unless the maintainers consider the specific tests to be either impossible, or way too much of a burden for such a contribution. + +To contribute code: + +* [Set up the project](#project-setup). +* Make any necessary changes to the source code. +* Include any [additional documentation](#contribute-documentation) the changes might need. +* Write tests that verify that your contribution works as expected. +* Write clear, concise commit message(s) using [conventional-changelog format](https://github.com/conventional-changelog/conventional-changelog-angular/blob/master/convention.md). +* Dependency updates, additions, or removals must be in individual commits, and the message must use the format: `(deps): PKG@VERSION`, where `` is any of the usual `conventional-changelog` prefixes, at your discretion. +* Go to https://github.com/npm/libnpmorg/pulls and open a new pull request with your changes. +* If your PR is connected to an open issue, add a line in your PR's description that says `Fixes: #123`, where `#123` is the number of the issue you're fixing. + +Once you've filed the PR: + +* Barring special circumstances, maintainers will not review PRs until all checks pass (Travis, AppVeyor, etc). +* One or more maintainers will use GitHub's review feature to review your PR. +* If the maintainer asks for any changes, edit your changes, push, and ask for another review. Additional tags (such as `needs-tests`) will be added depending on the review. +* If the maintainer decides to pass on your PR, they will thank you for the contribution and explain why they won't be accepting the changes. That's ok! We still really appreciate you taking the time to do it, and we don't take that lightly. 💚 +* If your PR gets accepted, it will be marked as such, and merged into the `latest` branch soon after. Your contribution will be distributed to the masses next time the maintainers [tag a release](#tag-a-release) + +## Provide Support on Issues + +[Needs Collaborator](#join-the-project-team): none + +Helping out other users with their questions is a really awesome way of contributing to any community. It's not uncommon for most of the issues on an open source projects being support-related questions by users trying to understand something they ran into, or find their way around a known bug. + +Sometimes, the `support` label will be added to things that turn out to actually be other things, like bugs or feature requests. In that case, suss out the details with the person who filed the original issue, add a comment explaining what the bug is, and change the label from `support` to `bug` or `feature`. If you can't do this yourself, @mention a maintainer so they can do it. + +In order to help other folks out with their questions: + +* Go to the issue tracker and [filter open issues by the `support` label](https://github.com/npm/libnpmorg/issues?q=is%3Aopen+is%3Aissue+label%3Asupport). +* Read through the list until you find something that you're familiar enough with to give an answer to. +* Respond to the issue with whatever details are needed to clarify the question, or get more details about what's going on. +* Once the discussion wraps up and things are clarified, either close the issue, or ask the original issue filer (or a maintainer) to close it for you. + +Some notes on picking up support issues: + +* Avoid responding to issues you don't know you can answer accurately. +* As much as possible, try to refer to past issues with accepted answers. Link to them from your replies with the `#123` format. +* Be kind and patient with users -- often, folks who have run into confusing things might be upset or impatient. This is ok. Try to understand where they're coming from, and if you're too uncomfortable with the tone, feel free to stay away or withdraw from the issue. (note: if the user is outright hostile or is violating the CoC, [refer to the Code of Conduct](CODE_OF_CONDUCT.md) to resolve the conflict). + +## Label Issues + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +One of the most important tasks in handling issues is labeling them usefully and accurately. All other tasks involving issues ultimately rely on the issue being classified in such a way that relevant parties looking to do their own tasks can find them quickly and easily. + +In order to label issues, [open up the list of unlabeled issues](https://github.com/npm/libnpmorg/issues?q=is%3Aopen+is%3Aissue+no%3Alabel) and, **from newest to oldest**, read through each one and apply issue labels according to the table below. If you're unsure about what label to apply, skip the issue and try the next one: don't feel obligated to label each and every issue yourself! + +Label | Apply When | Notes +--- | --- | --- +`bug` | Cases where the code (or documentation) is behaving in a way it wasn't intended to. | If something is happening that surprises the *user* but does not go against the way the code is designed, it should use the `enhancement` label. +`critical` | Added to `bug` issues if the problem described makes the code completely unusable in a common situation. | +`documentation` | Added to issues or pull requests that affect any of the documentation for the project. | Can be combined with other labels, such as `bug` or `enhancement`. +`duplicate` | Added to issues or PRs that refer to the exact same issue as another one that's been previously labeled. | Duplicate issues should be marked and closed right away, with a message referencing the issue it's a duplicate of (with `#123`) +`enhancement` | Added to [feature requests](#request-a-feature), PRs, or documentation issues that are purely additive: the code or docs currently work as expected, but a change is being requested or suggested. | +`help wanted` | Applied by [Committers](#join-the-project-team) to issues and PRs that they would like to get outside help for. Generally, this means it's lower priority for the maintainer team to itself implement, but that the community is encouraged to pick up if they so desire | Never applied on first-pass labeling. +`in-progress` | Applied by [Committers](#join-the-project-team) to PRs that are pending some work before they're ready for review. | The original PR submitter should @mention the team member that applied the label once the PR is complete. +`performance` | This issue or PR is directly related to improving performance. | +`refactor` | Added to issues or PRs that deal with cleaning up or modifying the project for the betterment of it. | +`starter` | Applied by [Committers](#join-the-project-team) to issues that they consider good introductions to the project for people who have not contributed before. These are not necessarily "easy", but rather focused around how much context is necessary in order to understand what needs to be done for this project in particular. | Existing project members are expected to stay away from these unless they increase in priority. +`support` | This issue is either asking a question about how to use the project, clarifying the reason for unexpected behavior, or possibly reporting a `bug` but does not have enough detail yet to determine whether it would count as such. | The label should be switched to `bug` if reliable reproduction steps are provided. Issues primarily with unintended configurations of a user's environment are not considered bugs, even if they cause crashes. +`tests` | This issue or PR either requests or adds primarily tests to the project. | If a PR is pending tests, that will be handled through the [PR review process](#review-pull-requests) +`wontfix` | Labelers may apply this label to issues that clearly have nothing at all to do with the project or are otherwise entirely outside of its scope/sphere of influence. [Committers](#join-the-project-team) may apply this label and close an issue or PR if they decide to pass on an otherwise relevant issue. | The issue or PR should be closed as soon as the label is applied, and a clear explanation provided of why the label was used. Contributors are free to contest the labeling, but the decision ultimately falls on committers as to whether to accept something or not. + +## Clean Up Issues and PRs + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +Issues and PRs can go stale after a while. Maybe they're abandoned. Maybe the team will just plain not have time to address them any time soon. + +In these cases, they should be closed until they're brought up again or the interaction starts over. + +To clean up issues and PRs: + +* Search the issue tracker for issues or PRs, and add the term `updated:<=YYYY-MM-DD`, where the date is 30 days before today. +* Go through each issue *from oldest to newest*, and close them if **all of the following are true**: + * not opened by a maintainer + * not marked as `critical` + * not marked as `starter` or `help wanted` (these might stick around for a while, in general, as they're intended to be available) + * no explicit messages in the comments asking for it to be left open + * does not belong to a milestone +* Leave a message when closing saying "Cleaning up stale issue. Please reopen or ping us if and when you're ready to resume this. See https://github.com/npm/libnpmorg/blob/latest/CONTRIBUTING.md#clean-up-issues-and-prs for more details." + +## Review Pull Requests + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +While anyone can comment on a PR, add feedback, etc, PRs are only *approved* by team members with Issue Tracker or higher permissions. + +PR reviews use [GitHub's own review feature](https://help.github.com/articles/about-pull-request-reviews/), which manages comments, approval, and review iteration. + +Some notes: + +* You may ask for minor changes ("nitpicks"), but consider whether they are really blockers to merging: try to err on the side of "approve, with comments". +* *ALL PULL REQUESTS* should be covered by a test: either by a previously-failing test, an existing test that covers the entire functionality of the submitted code, or new tests to verify any new/changed behavior. All tests must also pass and follow established conventions. Test coverage should not drop, unless the specific case is considered reasonable by maintainers. +* Please make sure you're familiar with the code or documentation being updated, unless it's a minor change (spellchecking, minor formatting, etc). You may @mention another project member who you think is better suited for the review, but still provide a non-approving review of your own. +* Be extra kind: people who submit code/doc contributions are putting themselves in a pretty vulnerable position, and have put time and care into what they've done (even if that's not obvious to you!) -- always respond with respect, be understanding, but don't feel like you need to sacrifice your standards for their sake, either. Just don't be a jerk about it? + +## Merge Pull Requests + +[Needs Collaborator](#join-the-project-team): Committer + +TBD - need to hash out a bit more of this process. + +## Tag A Release + +[Needs Collaborator](#join-the-project-team): Committer + +TBD - need to hash out a bit more of this process. The most important bit here is probably that all tests must pass, and tags must use [semver](https://semver.org). + +## Join the Project Team + +### Ways to Join + +There are many ways to contribute! Most of them don't require any official status unless otherwise noted. That said, there's a couple of positions that grant special repository abilities, and this section describes how they're granted and what they do. + +All of the below positions are granted based on the project team's needs, as well as their consensus opinion about whether they would like to work with the person and think that they would fit well into that position. The process is relatively informal, and it's likely that people who express interest in participating can just be granted the permissions they'd like. + +You can spot a collaborator on the repo by looking for the `[Collaborator]` or `[Owner]` tags next to their names. + +Permission | Description +--- | --- +Issue Tracker | Granted to contributors who express a strong interest in spending time on the project's issue tracker. These tasks are mainly [labeling issues](#label-issues), [cleaning up old ones](#clean-up-issues-and-prs), and [reviewing pull requests](#review-pull-requests), as well as all the usual things non-team-member contributors can do. Issue handlers should not merge pull requests, tag releases, or directly commit code themselves: that should still be done through the usual pull request process. Becoming an Issue Handler means the project team trusts you to understand enough of the team's process and context to implement it on the issue tracker. +Committer | Granted to contributors who want to handle the actual pull request merges, tagging new versions, etc. Committers should have a good level of familiarity with the codebase, and enough context to understand the implications of various changes, as well as a good sense of the will and expectations of the project team. +Admin/Owner | Granted to people ultimately responsible for the project, its community, etc. + +## Attribution + +This guide was generated using the WeAllJS `CONTRIBUTING.md` generator. [Make your own](https://npm.im/weallcontribute)! diff --git a/node_modules/libnpmorg/LICENSE b/node_modules/libnpmorg/LICENSE new file mode 100644 index 0000000000000..209e4477f39c1 --- /dev/null +++ b/node_modules/libnpmorg/LICENSE @@ -0,0 +1,13 @@ +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/libnpmorg/PULL_REQUEST_TEMPLATE b/node_modules/libnpmorg/PULL_REQUEST_TEMPLATE new file mode 100644 index 0000000000000..9471c6d325f7e --- /dev/null +++ b/node_modules/libnpmorg/PULL_REQUEST_TEMPLATE @@ -0,0 +1,7 @@ + diff --git a/node_modules/libnpmorg/README.md b/node_modules/libnpmorg/README.md new file mode 100644 index 0000000000000..6244794eda016 --- /dev/null +++ b/node_modules/libnpmorg/README.md @@ -0,0 +1,144 @@ +# libnpmorg [![npm version](https://img.shields.io/npm/v/libnpmorg.svg)](https://npm.im/libnpmorg) [![license](https://img.shields.io/npm/l/libnpmorg.svg)](https://npm.im/libnpmorg) [![Travis](https://img.shields.io/travis/npm/libnpmorg.svg)](https://travis-ci.org/npm/libnpmorg) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/libnpmorg?svg=true)](https://ci.appveyor.com/project/zkat/libnpmorg) [![Coverage Status](https://coveralls.io/repos/github/npm/libnpmorg/badge.svg?branch=latest)](https://coveralls.io/github/npm/libnpmorg?branch=latest) + +[`libnpmorg`](https://github.com/npm/libnpmorg) is a Node.js library for +programmatically accessing the [npm Org membership +API](https://github.com/npm/registry/blob/master/docs/orgs/memberships.md#membership-detail). + +## Example + +```js +const org = require('libnpmorg') + +console.log(await org.ls('myorg', {token: 'deadbeef'})) +=> +Roster { + zkat: 'developer', + iarna: 'admin', + isaacs: 'owner' +} +``` + +## Install + +`$ npm install libnpmorg` + +## Table of Contents + +* [Example](#example) +* [Install](#install) +* [API](#api) + * [hook opts](#opts) + * [`set()`](#set) + * [`rm()`](#rm) + * [`ls()`](#ls) + * [`ls.stream()`](#ls-stream) + +### API + +#### `opts` for `libnpmorg` commands + +`libnpmorg` uses [`npm-registry-fetch`](https://npm.im/npm-registry-fetch). +All options are passed through directly to that library, so please refer to [its +own `opts` +documentation](https://www.npmjs.com/package/npm-registry-fetch#fetch-options) +for options that can be passed in. + +A couple of options of note for those in a hurry: + +* `opts.token` - can be passed in and will be used as the authentication token for the registry. For other ways to pass in auth details, see the n-r-f docs. +* `opts.otp` - certain operations will require an OTP token to be passed in. If a `libnpmorg` command fails with `err.code === EOTP`, please retry the request with `{otp: <2fa token>}` +* `opts.Promise` - If you pass this in, the Promises returned by `libnpmorg` commands will use this Promise class instead. For example: `{Promise: require('bluebird')}` + +#### `> org.set(org, user, [role], [opts]) -> Promise` + +The returned Promise resolves to a [Membership +Detail](https://github.com/npm/registry/blob/master/docs/orgs/memberships.md#membership-detail) +object. + +The `role` is optional and should be one of `admin`, `owner`, or `developer`. +`developer` is the default if no `role` is provided. + +`org` and `user` must be scope names for the org name and user name +respectively. They can optionally be prefixed with `@`. + +See also: [`PUT +/-/org/:scope/user`](https://github.com/npm/registry/blob/master/docs/orgs/memberships.md#org-membership-replace) + +##### Example + +```javascript +await org.set('@myorg', '@myuser', 'admin', {token: 'deadbeef'}) +=> +MembershipDetail { + org: { + name: 'myorg', + size: 15 + }, + user: 'myuser', + role: 'admin' +} +``` + +#### `> org.rm(org, user, [opts]) -> Promise` + +The Promise resolves to `null` on success. + +`org` and `user` must be scope names for the org name and user name +respectively. They can optionally be prefixed with `@`. + +See also: [`DELETE +/-/org/:scope/user`](https://github.com/npm/registry/blob/master/docs/orgs/memberships.md#org-membership-delete) + +##### Example + +```javascript +await org.rm('myorg', 'myuser', {token: 'deadbeef'}) +``` + +#### `> org.ls(org, [opts]) -> Promise` + +The Promise resolves to a +[Roster](https://github.com/npm/registry/blob/master/docs/orgs/memberships.md#roster) +object. + +`org` must be a scope name for an org, and can be optionally prefixed with `@`. + +See also: [`GET +/-/org/:scope/user`](https://github.com/npm/registry/blob/master/docs/orgs/memberships.md#org-roster) + +##### Example + +```javascript +await org.ls('myorg', {token: 'deadbeef'}) +=> +Roster { + zkat: 'developer', + iarna: 'admin', + isaacs: 'owner' +} +``` + +#### `> org.ls.stream(org, [opts]) -> Stream` + +Returns a stream of entries for a +[Roster](https://github.com/npm/registry/blob/master/docs/orgs/memberships.md#roster), +with each emitted entry in `[key, value]` format. + +`org` must be a scope name for an org, and can be optionally prefixed with `@`. + +The returned stream is a valid `Symbol.asyncIterator`. + +See also: [`GET +/-/org/:scope/user`](https://github.com/npm/registry/blob/master/docs/orgs/memberships.md#org-roster) + +##### Example + +```javascript +for await (let [user, role] of org.ls.stream('myorg', {token: 'deadbeef'})) { + console.log(`user: ${user} (${role})`) +} +=> +user: zkat (developer) +user: iarna (admin) +user: isaacs (owner) +``` diff --git a/node_modules/libnpmorg/appveyor.yml b/node_modules/libnpmorg/appveyor.yml new file mode 100644 index 0000000000000..9cc64c58e02f9 --- /dev/null +++ b/node_modules/libnpmorg/appveyor.yml @@ -0,0 +1,22 @@ +environment: + matrix: + - nodejs_version: "10" + - nodejs_version: "9" + - nodejs_version: "8" + - nodejs_version: "6" + +platform: + - x64 + +install: + - ps: Install-Product node $env:nodejs_version $env:platform + - npm config set spin false + - npm install + +test_script: + - npm test + +matrix: + fast_finish: true + +build: off diff --git a/node_modules/libnpmorg/index.js b/node_modules/libnpmorg/index.js new file mode 100644 index 0000000000000..cd3e51e6ad172 --- /dev/null +++ b/node_modules/libnpmorg/index.js @@ -0,0 +1,71 @@ +'use strict' + +const eu = encodeURIComponent +const fetch = require('npm-registry-fetch') +const figgyPudding = require('figgy-pudding') +const getStream = require('get-stream') +const validate = require('aproba') + +const OrgConfig = figgyPudding({ + Promise: { default: () => Promise } +}) + +// From https://github.com/npm/registry/blob/master/docs/orgs/memberships.md +const cmd = module.exports = {} + +class MembershipDetail {} +cmd.set = (org, user, role, opts) => { + if (typeof role === 'object' && !opts) { + opts = role + role = undefined + } + opts = OrgConfig(opts) + return new opts.Promise((resolve, reject) => { + validate('SSSO|SSZO', [org, user, role, opts]) + user = user.replace(/^@?/, '') + org = org.replace(/^@?/, '') + fetch.json(`/-/org/${eu(org)}/user`, opts.concat({ + method: 'PUT', + body: { user, role } + })).then(resolve, reject) + }).then(ret => Object.assign(new MembershipDetail(), ret)) +} + +cmd.rm = (org, user, opts) => { + opts = OrgConfig(opts) + return new opts.Promise((resolve, reject) => { + validate('SSO', [org, user, opts]) + user = user.replace(/^@?/, '') + org = org.replace(/^@?/, '') + fetch(`/-/org/${eu(org)}/user`, opts.concat({ + method: 'DELETE', + body: { user }, + ignoreBody: true + })).then(resolve, reject) + }).then(() => null) +} + +class Roster {} +cmd.ls = (org, opts) => { + opts = OrgConfig(opts) + return new opts.Promise((resolve, reject) => { + getStream.array(cmd.ls.stream(org, opts)).then(entries => { + const obj = {} + for (let [key, val] of entries) { + obj[key] = val + } + return obj + }).then(resolve, reject) + }).then(ret => Object.assign(new Roster(), ret)) +} + +cmd.ls.stream = (org, opts) => { + opts = OrgConfig(opts) + validate('SO', [org, opts]) + org = org.replace(/^@?/, '') + return fetch.json.stream(`/-/org/${eu(org)}/user`, '*', opts.concat({ + mapJson (value, [key]) { + return [key, value] + } + })) +} diff --git a/node_modules/libnpmorg/package.json b/node_modules/libnpmorg/package.json new file mode 100644 index 0000000000000..e93fc52a05d57 --- /dev/null +++ b/node_modules/libnpmorg/package.json @@ -0,0 +1,75 @@ +{ + "_from": "libnpmorg@1.0.1", + "_id": "libnpmorg@1.0.1", + "_inBundle": false, + "_integrity": "sha512-0sRUXLh+PLBgZmARvthhYXQAWn0fOsa6T5l3JSe2n9vKG/lCVK4nuG7pDsa7uMq+uTt2epdPK+a2g6btcY11Ww==", + "_location": "/libnpmorg", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "libnpmorg@1.0.1", + "name": "libnpmorg", + "escapedName": "libnpmorg", + "rawSpec": "1.0.1", + "saveSpec": null, + "fetchSpec": "1.0.1" + }, + "_requiredBy": [ + "#USER", + "/" + ], + "_resolved": "https://registry.npmjs.org/libnpmorg/-/libnpmorg-1.0.1.tgz", + "_shasum": "5d2503f6ceb57f33dbdcc718e6698fea6d5ad087", + "_spec": "libnpmorg@1.0.1", + "_where": "/Users/isaacs/dev/npm/cli", + "author": { + "name": "Kat Marchán", + "email": "kzm@zkat.tech" + }, + "bugs": { + "url": "https://github.com/npm/libnpmorg/issues" + }, + "bundleDependencies": false, + "dependencies": { + "aproba": "^2.0.0", + "figgy-pudding": "^3.4.1", + "get-stream": "^4.0.0", + "npm-registry-fetch": "^4.0.0" + }, + "deprecated": false, + "description": "Programmatic api for `npm org` commands", + "devDependencies": { + "nock": "^9.6.1", + "standard": "^12.0.0", + "standard-version": "*", + "tap": "*", + "weallbehave": "*", + "weallcontribute": "*" + }, + "homepage": "https://npmjs.com/package/libnpmorg", + "keywords": [ + "libnpm", + "npm", + "package manager", + "api", + "orgs", + "teams" + ], + "license": "ISC", + "name": "libnpmorg", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/libnpmorg.git" + }, + "scripts": { + "postrelease": "npm publish && git push --follow-tags", + "prerelease": "npm t", + "pretest": "standard", + "release": "standard-version -s", + "test": "tap -J --100 test/*.js", + "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", + "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" + }, + "version": "1.0.1" +} diff --git a/node_modules/libnpmorg/test/index.js b/node_modules/libnpmorg/test/index.js new file mode 100644 index 0000000000000..e6ec33ab8a6c4 --- /dev/null +++ b/node_modules/libnpmorg/test/index.js @@ -0,0 +1,81 @@ +'use strict' + +const figgyPudding = require('figgy-pudding') +const getStream = require('get-stream') +const test = require('tap').test +const tnock = require('./util/tnock.js') + +const org = require('../index.js') + +const OPTS = figgyPudding({ registry: {} })({ + registry: 'https://mock.reg/' +}) + +test('set', t => { + const memDeets = { + org: { + name: 'myorg', + size: 15 + }, + user: 'myuser', + role: 'admin' + } + tnock(t, OPTS.registry).put('/-/org/myorg/user', { + user: 'myuser', + role: 'admin' + }).reply(201, memDeets) + return org.set('myorg', 'myuser', 'admin', OPTS).then(res => { + t.deepEqual(res, memDeets, 'got a membership details object back') + }) +}) + +test('optional role for set', t => { + const memDeets = { + org: { + name: 'myorg', + size: 15 + }, + user: 'myuser', + role: 'developer' + } + tnock(t, OPTS.registry).put('/-/org/myorg/user', { + user: 'myuser' + }).reply(201, memDeets) + return org.set('myorg', 'myuser', OPTS).then(res => { + t.deepEqual(res, memDeets, 'got a membership details object back') + }) +}) + +test('rm', t => { + tnock(t, OPTS.registry).delete('/-/org/myorg/user', { + user: 'myuser' + }).reply(204) + return org.rm('myorg', 'myuser', OPTS).then(() => { + t.ok(true, 'request succeeded') + }) +}) + +test('ls', t => { + const roster = { + 'zkat': 'developer', + 'iarna': 'admin', + 'isaacs': 'owner' + } + tnock(t, OPTS.registry).get('/-/org/myorg/user').reply(200, roster) + return org.ls('myorg', OPTS).then(res => { + t.deepEqual(res, roster, 'got back a roster') + }) +}) + +test('ls stream', t => { + const roster = { + 'zkat': 'developer', + 'iarna': 'admin', + 'isaacs': 'owner' + } + const rosterArr = Object.keys(roster).map(k => [k, roster[k]]) + tnock(t, OPTS.registry).get('/-/org/myorg/user').reply(200, roster) + return getStream.array(org.ls.stream('myorg', OPTS)).then(res => { + t.deepEqual(res, rosterArr, 'got back a roster, in entries format') + }) +}) diff --git a/node_modules/libnpmorg/test/util/tnock.js b/node_modules/libnpmorg/test/util/tnock.js new file mode 100644 index 0000000000000..00b6e160e1019 --- /dev/null +++ b/node_modules/libnpmorg/test/util/tnock.js @@ -0,0 +1,12 @@ +'use strict' + +const nock = require('nock') + +module.exports = tnock +function tnock (t, host) { + const server = nock(host) + t.tearDown(function () { + server.done() + }) + return server +} diff --git a/node_modules/libnpmpublish/.travis.yml b/node_modules/libnpmpublish/.travis.yml new file mode 100644 index 0000000000000..db5ea8b018640 --- /dev/null +++ b/node_modules/libnpmpublish/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +sudo: false +node_js: + - "10" + - "9" + - "8" + - "6" diff --git a/node_modules/libnpmpublish/CHANGELOG.md b/node_modules/libnpmpublish/CHANGELOG.md new file mode 100644 index 0000000000000..974b3fd308a6b --- /dev/null +++ b/node_modules/libnpmpublish/CHANGELOG.md @@ -0,0 +1,55 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + + +## [1.1.2](https://github.com/npm/libnpmpublish/compare/v1.1.1...v1.1.2) (2019-07-16) + + + + +## [1.1.1](https://github.com/npm/libnpmpublish/compare/v1.1.0...v1.1.1) (2019-01-22) + + +### Bug Fixes + +* **auth:** send username in correct key ([#3](https://github.com/npm/libnpmpublish/issues/3)) ([38422d0](https://github.com/npm/libnpmpublish/commit/38422d0)) + + + + +# [1.1.0](https://github.com/npm/libnpmpublish/compare/v1.0.1...v1.1.0) (2018-08-31) + + +### Features + +* **publish:** add support for publishConfig on manifests ([161723b](https://github.com/npm/libnpmpublish/commit/161723b)) + + + + +## [1.0.1](https://github.com/npm/libnpmpublish/compare/v1.0.0...v1.0.1) (2018-08-31) + + +### Bug Fixes + +* **opts:** remove unused opts ([2837098](https://github.com/npm/libnpmpublish/commit/2837098)) + + + + +# 1.0.0 (2018-08-31) + + +### Bug Fixes + +* **api:** use opts.algorithms, return true on success ([80fe34b](https://github.com/npm/libnpmpublish/commit/80fe34b)) +* **publish:** first test pass w/ bugfixes ([74135c9](https://github.com/npm/libnpmpublish/commit/74135c9)) +* **publish:** full coverage test and related fixes ([b5a3446](https://github.com/npm/libnpmpublish/commit/b5a3446)) + + +### Features + +* **docs:** add README with api docs ([553c13d](https://github.com/npm/libnpmpublish/commit/553c13d)) +* **publish:** add initial publish support. tests tbd ([5b3fe94](https://github.com/npm/libnpmpublish/commit/5b3fe94)) +* **unpublish:** add new api with unpublish support ([1c9d594](https://github.com/npm/libnpmpublish/commit/1c9d594)) diff --git a/node_modules/libnpmpublish/CODE_OF_CONDUCT.md b/node_modules/libnpmpublish/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000000..aeb72f598dcb4 --- /dev/null +++ b/node_modules/libnpmpublish/CODE_OF_CONDUCT.md @@ -0,0 +1,151 @@ +# Code of Conduct + +## When Something Happens + +If you see a Code of Conduct violation, follow these steps: + +1. Let the person know that what they did is not appropriate and ask them to stop and/or edit their message(s) or commits. +2. That person should immediately stop the behavior and correct the issue. +3. If this doesn’t happen, or if you're uncomfortable speaking up, [contact the maintainers](#contacting-maintainers). +4. As soon as available, a maintainer will look into the issue, and take [further action (see below)](#further-enforcement), starting with a warning, then temporary block, then long-term repo or organization ban. + +When reporting, please include any relevant details, links, screenshots, context, or other information that may be used to better understand and resolve the situation. + +**The maintainer team will prioritize the well-being and comfort of the recipients of the violation over the comfort of the violator.** See [some examples below](#enforcement-examples). + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers of this project pledge to making participation in our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, technical preferences, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + + * Using welcoming and inclusive language. + * Being respectful of differing viewpoints and experiences. + * Gracefully accepting constructive feedback. + * Focusing on what is best for the community. + * Showing empathy and kindness towards other community members. + * Encouraging and raising up your peers in the project so you can all bask in hacks and glory. + +Examples of unacceptable behavior by participants include: + + * The use of sexualized language or imagery and unwelcome sexual attention or advances, including when simulated online. The only exception to sexual topics is channels/spaces specifically for topics of sexual identity. + * Casual mention of slavery or indentured servitude and/or false comparisons of one's occupation or situation to slavery. Please consider using or asking about alternate terminology when referring to such metaphors in technology. + * Making light of/making mocking comments about trigger warnings and content warnings. + * Trolling, insulting/derogatory comments, and personal or political attacks. + * Public or private harassment, deliberate intimidation, or threats. + * Publishing others' private information, such as a physical or electronic address, without explicit permission. This includes any sort of "outing" of any aspect of someone's identity without their consent. + * Publishing private screenshots or quotes of interactions in the context of this project without all quoted users' *explicit* consent. + * Publishing of private communication that doesn't have to do with reporting harrassment. + * Any of the above even when [presented as "ironic" or "joking"](https://en.wikipedia.org/wiki/Hipster_racism). + * Any attempt to present "reverse-ism" versions of the above as violations. Examples of reverse-isms are "reverse racism", "reverse sexism", "heterophobia", and "cisphobia". + * Unsolicited explanations under the assumption that someone doesn't already know it. Ask before you teach! Don't assume what people's knowledge gaps are. + * [Feigning or exaggerating surprise](https://www.recurse.com/manual#no-feigned-surprise) when someone admits to not knowing something. + * "[Well-actuallies](https://www.recurse.com/manual#no-well-actuallys)" + * Other conduct which could reasonably be considered inappropriate in a professional or community setting. + +## Scope + +This Code of Conduct applies both within spaces involving this project and in other spaces involving community members. This includes the repository, its Pull Requests and Issue tracker, its Twitter community, private email communications in the context of the project, and any events where members of the project are participating, as well as adjacent communities and venues affecting the project's members. + +Depending on the violation, the maintainers may decide that violations of this code of conduct that have happened outside of the scope of the community may deem an individual unwelcome, and take appropriate action to maintain the comfort and safety of its members. + +### Other Community Standards + +As a project on GitHub, this project is additionally covered by the [GitHub Community Guidelines](https://help.github.com/articles/github-community-guidelines/). + +Additionally, as a project hosted on npm, is is covered by [npm, Inc's Code of Conduct](https://www.npmjs.com/policies/conduct). + +Enforcement of those guidelines after violations overlapping with the above are the responsibility of the entities, and enforcement may happen in any or all of the services/communities. + +## Maintainer Enforcement Process + +Once the maintainers get involved, they will follow a documented series of steps and do their best to preserve the well-being of project members. This section covers actual concrete steps. + +### Contacting Maintainers + +You may get in touch with the maintainer team through any of the following methods: + + * Through email: + * [kzm@zkat.tech](mailto:kzm@zkat.tech) (Kat Marchán) + + * Through Twitter: + * [@maybekatz](https://twitter.com/maybekatz) (Kat Marchán) + +### Further Enforcement + +If you've already followed the [initial enforcement steps](#enforcement), these are the steps maintainers will take for further enforcement, as needed: + + 1. Repeat the request to stop. + 2. If the person doubles down, they will have offending messages removed or edited by a maintainers given an official warning. The PR or Issue may be locked. + 3. If the behavior continues or is repeated later, the person will be blocked from participating for 24 hours. + 4. If the behavior continues or is repeated after the temporary block, a long-term (6-12mo) ban will be used. + +On top of this, maintainers may remove any offending messages, images, contributions, etc, as they deem necessary. + +Maintainers reserve full rights to skip any of these steps, at their discretion, if the violation is considered to be a serious and/or immediate threat to the health and well-being of members of the community. These include any threats, serious physical or verbal attacks, and other such behavior that would be completely unacceptable in any social setting that puts our members at risk. + +Members expelled from events or venues with any sort of paid attendance will not be refunded. + +### Who Watches the Watchers? + +Maintainers and other leaders who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. These may include anything from removal from the maintainer team to a permanent ban from the community. + +Additionally, as a project hosted on both GitHub and npm, [their own Codes of Conducts may be applied against maintainers of this project](#other-community-standards), externally of this project's procedures. + +### Enforcement Examples + +#### The Best Case + +The vast majority of situations work out like this. This interaction is common, and generally positive. + +> Alex: "Yeah I used X and it was really crazy!" + +> Patt (not a maintainer): "Hey, could you not use that word? What about 'ridiculous' instead?" + +> Alex: "oh sorry, sure." -> edits old comment to say "it was really confusing!" + +#### The Maintainer Case + +Sometimes, though, you need to get maintainers involved. Maintainers will do their best to resolve conflicts, but people who were harmed by something **will take priority**. + +> Patt: "Honestly, sometimes I just really hate using $library and anyone who uses it probably sucks at their job." + +> Alex: "Whoa there, could you dial it back a bit? There's a CoC thing about attacking folks' tech use like that." + +> Patt: "I'm not attacking anyone, what's your problem?" + +> Alex: "@maintainers hey uh. Can someone look at this issue? Patt is getting a bit aggro. I tried to nudge them about it, but nope." + +> KeeperOfCommitBits: (on issue) "Hey Patt, maintainer here. Could you tone it down? This sort of attack is really not okay in this space." + +> Patt: "Leave me alone I haven't said anything bad wtf is wrong with you." + +> KeeperOfCommitBits: (deletes user's comment), "@patt I mean it. Please refer to the CoC over at (URL to this CoC) if you have questions, but you can consider this an actual warning. I'd appreciate it if you reworded your messages in this thread, since they made folks there uncomfortable. Let's try and be kind, yeah?" + +> Patt: "@keeperofbits Okay sorry. I'm just frustrated and I'm kinda burnt out and I guess I got carried away. I'll DM Alex a note apologizing and edit my messages. Sorry for the trouble." + +> KeeperOfCommitBits: "@patt Thanks for that. I hear you on the stress. Burnout sucks :/. Have a good one!" + +#### The Nope Case + +> PepeTheFrog🐸: "Hi, I am a literal actual nazi and I think white supremacists are quite fashionable." + +> Patt: "NOOOOPE. OH NOPE NOPE." + +> Alex: "JFC NO. NOPE. @keeperofbits NOPE NOPE LOOK HERE" + +> KeeperOfCommitBits: "👀 Nope. NOPE NOPE NOPE. 🔥" + +> PepeTheFrog🐸 has been banned from all organization or user repositories belonging to KeeperOfCommitBits. + +## Attribution + +This Code of Conduct was generated using [WeAllJS Code of Conduct Generator](https://npm.im/weallbehave), which is based on the [WeAllJS Code of +Conduct](https://wealljs.org/code-of-conduct), which is itself based on +[Contributor Covenant](http://contributor-covenant.org), version 1.4, available +at +[http://contributor-covenant.org/version/1/4](http://contributor-covenant.org/version/1/4), +and the LGBTQ in Technology Slack [Code of +Conduct](http://lgbtq.technology/coc.html). diff --git a/node_modules/libnpmpublish/CONTRIBUTING.md b/node_modules/libnpmpublish/CONTRIBUTING.md new file mode 100644 index 0000000000000..780044ffcc0f3 --- /dev/null +++ b/node_modules/libnpmpublish/CONTRIBUTING.md @@ -0,0 +1,256 @@ +# Contributing + +## How do I... + +* [Use This Guide](#introduction)? +* Ask or Say Something? 🤔🐛😱 + * [Request Support](#request-support) + * [Report an Error or Bug](#report-an-error-or-bug) + * [Request a Feature](#request-a-feature) +* Make Something? 🤓👩🏽‍💻📜🍳 + * [Project Setup](#project-setup) + * [Contribute Documentation](#contribute-documentation) + * [Contribute Code](#contribute-code) +* Manage Something ✅🙆🏼💃👔 + * [Provide Support on Issues](#provide-support-on-issues) + * [Label Issues](#label-issues) + * [Clean Up Issues and PRs](#clean-up-issues-and-prs) + * [Review Pull Requests](#review-pull-requests) + * [Merge Pull Requests](#merge-pull-requests) + * [Tag a Release](#tag-a-release) + * [Join the Project Team](#join-the-project-team) +* Add a Guide Like This One [To My Project](#attribution)? 🤖😻👻 + +## Introduction + +Thank you so much for your interest in contributing!. All types of contributions are encouraged and valued. See the [table of contents](#toc) for different ways to help and details about how this project handles them!📝 + +Please make sure to read the relevant section before making your contribution! It will make it a lot easier for us maintainers to make the most of it and smooth out the experience for all involved. 💚 + +The [Project Team](#join-the-project-team) looks forward to your contributions. 🙌🏾✨ + +## Request Support + +If you have a question about this project, how to use it, or just need clarification about something: + +* Open an Issue at https://github.com/npm/libnpmpublish/issues +* Provide as much context as you can about what you're running into. +* Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant. If not, please be ready to provide that information if maintainers ask for it. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* Someone will try to have a response soon. +* If you or the maintainers don't respond to an issue for 30 days, the [issue will be closed](#clean-up-issues-and-prs). If you want to come back to it, reply (once, please), and we'll reopen the existing issue. Please avoid filing new issues as extensions of one you already made. + +## Report an Error or Bug + +If you run into an error or bug with the project: + +* Open an Issue at https://github.com/npm/libnpmpublish/issues +* Include *reproduction steps* that someone else can follow to recreate the bug or error on their own. +* Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant. If not, please be ready to provide that information if maintainers ask for it. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* A team member will try to reproduce the issue with your provided steps. If there are no repro steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs-repro`. Bugs with the `needs-repro` tag will not be addressed until they are reproduced. +* If the team is able to reproduce the issue, it will be marked `needs-fix`, as well as possibly other tags (such as `critical`), and the issue will be left to be [implemented by someone](#contribute-code). +* If you or the maintainers don't respond to an issue for 30 days, the [issue will be closed](#clean-up-issues-and-prs). If you want to come back to it, reply (once, please), and we'll reopen the existing issue. Please avoid filing new issues as extensions of one you already made. +* `critical` issues may be left open, depending on perceived immediacy and severity, even past the 30 day deadline. + +## Request a Feature + +If the project doesn't do something you need or want it to do: + +* Open an Issue at https://github.com/npm/libnpmpublish/issues +* Provide as much context as you can about what you're running into. +* Please try and be clear about why existing features and alternatives would not work for you. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* The project team will evaluate the feature request, possibly asking you more questions to understand its purpose and any relevant requirements. If the issue is closed, the team will convey their reasoning and suggest an alternative path forward. +* If the feature request is accepted, it will be marked for implementation with `feature-accepted`, which can then be done by either by a core team member or by anyone in the community who wants to [contribute code](#contribute-code). + +Note: The team is unlikely to be able to accept every single feature request that is filed. Please understand if they need to say no. + +## Project Setup + +So you wanna contribute some code! That's great! This project uses GitHub Pull Requests to manage contributions, so [read up on how to fork a GitHub project and file a PR](https://guides.github.com/activities/forking) if you've never done it before. + +If this seems like a lot or you aren't able to do all this setup, you might also be able to [edit the files directly](https://help.github.com/articles/editing-files-in-another-user-s-repository/) without having to do any of this setup. Yes, [even code](#contribute-code). + +If you want to go the usual route and run the project locally, though: + +* [Install Node.js](https://nodejs.org/en/download/) +* [Fork the project](https://guides.github.com/activities/forking/#fork) + +Then in your terminal: +* `cd path/to/your/clone` +* `npm install` +* `npm test` + +And you should be ready to go! + +## Contribute Documentation + +Documentation is a super important, critical part of this project. Docs are how we keep track of what we're doing, how, and why. It's how we stay on the same page about our policies. And it's how we tell others everything they need in order to be able to use this project -- or contribute to it. So thank you in advance. + +Documentation contributions of any size are welcome! Feel free to file a PR even if you're just rewording a sentence to be more clear, or fixing a spelling mistake! + +To contribute documentation: + +* [Set up the project](#project-setup). +* Edit or add any relevant documentation. +* Make sure your changes are formatted correctly and consistently with the rest of the documentation. +* Re-read what you wrote, and run a spellchecker on it to make sure you didn't miss anything. +* In your commit message(s), begin the first line with `docs: `. For example: `docs: Adding a doc contrib section to CONTRIBUTING.md`. +* Write clear, concise commit message(s) using [conventional-changelog format](https://github.com/conventional-changelog/conventional-changelog-angular/blob/master/convention.md). Documentation commits should use `docs(): `. +* Go to https://github.com/npm/libnpmpublish/pulls and open a new pull request with your changes. +* If your PR is connected to an open issue, add a line in your PR's description that says `Fixes: #123`, where `#123` is the number of the issue you're fixing. + +Once you've filed the PR: + +* One or more maintainers will use GitHub's review feature to review your PR. +* If the maintainer asks for any changes, edit your changes, push, and ask for another review. +* If the maintainer decides to pass on your PR, they will thank you for the contribution and explain why they won't be accepting the changes. That's ok! We still really appreciate you taking the time to do it, and we don't take that lightly. 💚 +* If your PR gets accepted, it will be marked as such, and merged into the `latest` branch soon after. Your contribution will be distributed to the masses next time the maintainers [tag a release](#tag-a-release) + +## Contribute Code + +We like code commits a lot! They're super handy, and they keep the project going and doing the work it needs to do to be useful to others. + +Code contributions of just about any size are acceptable! + +The main difference between code contributions and documentation contributions is that contributing code requires inclusion of relevant tests for the code being added or changed. Contributions without accompanying tests will be held off until a test is added, unless the maintainers consider the specific tests to be either impossible, or way too much of a burden for such a contribution. + +To contribute code: + +* [Set up the project](#project-setup). +* Make any necessary changes to the source code. +* Include any [additional documentation](#contribute-documentation) the changes might need. +* Write tests that verify that your contribution works as expected. +* Write clear, concise commit message(s) using [conventional-changelog format](https://github.com/conventional-changelog/conventional-changelog-angular/blob/master/convention.md). +* Dependency updates, additions, or removals must be in individual commits, and the message must use the format: `(deps): PKG@VERSION`, where `` is any of the usual `conventional-changelog` prefixes, at your discretion. +* Go to https://github.com/npm/libnpmpublish/pulls and open a new pull request with your changes. +* If your PR is connected to an open issue, add a line in your PR's description that says `Fixes: #123`, where `#123` is the number of the issue you're fixing. + +Once you've filed the PR: + +* Barring special circumstances, maintainers will not review PRs until all checks pass (Travis, AppVeyor, etc). +* One or more maintainers will use GitHub's review feature to review your PR. +* If the maintainer asks for any changes, edit your changes, push, and ask for another review. Additional tags (such as `needs-tests`) will be added depending on the review. +* If the maintainer decides to pass on your PR, they will thank you for the contribution and explain why they won't be accepting the changes. That's ok! We still really appreciate you taking the time to do it, and we don't take that lightly. 💚 +* If your PR gets accepted, it will be marked as such, and merged into the `latest` branch soon after. Your contribution will be distributed to the masses next time the maintainers [tag a release](#tag-a-release) + +## Provide Support on Issues + +[Needs Collaborator](#join-the-project-team): none + +Helping out other users with their questions is a really awesome way of contributing to any community. It's not uncommon for most of the issues on an open source projects being support-related questions by users trying to understand something they ran into, or find their way around a known bug. + +Sometimes, the `support` label will be added to things that turn out to actually be other things, like bugs or feature requests. In that case, suss out the details with the person who filed the original issue, add a comment explaining what the bug is, and change the label from `support` to `bug` or `feature`. If you can't do this yourself, @mention a maintainer so they can do it. + +In order to help other folks out with their questions: + +* Go to the issue tracker and [filter open issues by the `support` label](https://github.com/npm/libnpmpublish/issues?q=is%3Aopen+is%3Aissue+label%3Asupport). +* Read through the list until you find something that you're familiar enough with to give an answer to. +* Respond to the issue with whatever details are needed to clarify the question, or get more details about what's going on. +* Once the discussion wraps up and things are clarified, either close the issue, or ask the original issue filer (or a maintainer) to close it for you. + +Some notes on picking up support issues: + +* Avoid responding to issues you don't know you can answer accurately. +* As much as possible, try to refer to past issues with accepted answers. Link to them from your replies with the `#123` format. +* Be kind and patient with users -- often, folks who have run into confusing things might be upset or impatient. This is ok. Try to understand where they're coming from, and if you're too uncomfortable with the tone, feel free to stay away or withdraw from the issue. (note: if the user is outright hostile or is violating the CoC, [refer to the Code of Conduct](CODE_OF_CONDUCT.md) to resolve the conflict). + +## Label Issues + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +One of the most important tasks in handling issues is labeling them usefully and accurately. All other tasks involving issues ultimately rely on the issue being classified in such a way that relevant parties looking to do their own tasks can find them quickly and easily. + +In order to label issues, [open up the list of unlabeled issues](https://github.com/npm/libnpmpublish/issues?q=is%3Aopen+is%3Aissue+no%3Alabel) and, **from newest to oldest**, read through each one and apply issue labels according to the table below. If you're unsure about what label to apply, skip the issue and try the next one: don't feel obligated to label each and every issue yourself! + +Label | Apply When | Notes +--- | --- | --- +`bug` | Cases where the code (or documentation) is behaving in a way it wasn't intended to. | If something is happening that surprises the *user* but does not go against the way the code is designed, it should use the `enhancement` label. +`critical` | Added to `bug` issues if the problem described makes the code completely unusable in a common situation. | +`documentation` | Added to issues or pull requests that affect any of the documentation for the project. | Can be combined with other labels, such as `bug` or `enhancement`. +`duplicate` | Added to issues or PRs that refer to the exact same issue as another one that's been previously labeled. | Duplicate issues should be marked and closed right away, with a message referencing the issue it's a duplicate of (with `#123`) +`enhancement` | Added to [feature requests](#request-a-feature), PRs, or documentation issues that are purely additive: the code or docs currently work as expected, but a change is being requested or suggested. | +`help wanted` | Applied by [Committers](#join-the-project-team) to issues and PRs that they would like to get outside help for. Generally, this means it's lower priority for the maintainer team to itself implement, but that the community is encouraged to pick up if they so desire | Never applied on first-pass labeling. +`in-progress` | Applied by [Committers](#join-the-project-team) to PRs that are pending some work before they're ready for review. | The original PR submitter should @mention the team member that applied the label once the PR is complete. +`performance` | This issue or PR is directly related to improving performance. | +`refactor` | Added to issues or PRs that deal with cleaning up or modifying the project for the betterment of it. | +`starter` | Applied by [Committers](#join-the-project-team) to issues that they consider good introductions to the project for people who have not contributed before. These are not necessarily "easy", but rather focused around how much context is necessary in order to understand what needs to be done for this project in particular. | Existing project members are expected to stay away from these unless they increase in priority. +`support` | This issue is either asking a question about how to use the project, clarifying the reason for unexpected behavior, or possibly reporting a `bug` but does not have enough detail yet to determine whether it would count as such. | The label should be switched to `bug` if reliable reproduction steps are provided. Issues primarily with unintended configurations of a user's environment are not considered bugs, even if they cause crashes. +`tests` | This issue or PR either requests or adds primarily tests to the project. | If a PR is pending tests, that will be handled through the [PR review process](#review-pull-requests) +`wontfix` | Labelers may apply this label to issues that clearly have nothing at all to do with the project or are otherwise entirely outside of its scope/sphere of influence. [Committers](#join-the-project-team) may apply this label and close an issue or PR if they decide to pass on an otherwise relevant issue. | The issue or PR should be closed as soon as the label is applied, and a clear explanation provided of why the label was used. Contributors are free to contest the labeling, but the decision ultimately falls on committers as to whether to accept something or not. + +## Clean Up Issues and PRs + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +Issues and PRs can go stale after a while. Maybe they're abandoned. Maybe the team will just plain not have time to address them any time soon. + +In these cases, they should be closed until they're brought up again or the interaction starts over. + +To clean up issues and PRs: + +* Search the issue tracker for issues or PRs, and add the term `updated:<=YYYY-MM-DD`, where the date is 30 days before today. +* Go through each issue *from oldest to newest*, and close them if **all of the following are true**: + * not opened by a maintainer + * not marked as `critical` + * not marked as `starter` or `help wanted` (these might stick around for a while, in general, as they're intended to be available) + * no explicit messages in the comments asking for it to be left open + * does not belong to a milestone +* Leave a message when closing saying "Cleaning up stale issue. Please reopen or ping us if and when you're ready to resume this. See https://github.com/npm/libnpmpublish/blob/latest/CONTRIBUTING.md#clean-up-issues-and-prs for more details." + +## Review Pull Requests + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +While anyone can comment on a PR, add feedback, etc, PRs are only *approved* by team members with Issue Tracker or higher permissions. + +PR reviews use [GitHub's own review feature](https://help.github.com/articles/about-pull-request-reviews/), which manages comments, approval, and review iteration. + +Some notes: + +* You may ask for minor changes ("nitpicks"), but consider whether they are really blockers to merging: try to err on the side of "approve, with comments". +* *ALL PULL REQUESTS* should be covered by a test: either by a previously-failing test, an existing test that covers the entire functionality of the submitted code, or new tests to verify any new/changed behavior. All tests must also pass and follow established conventions. Test coverage should not drop, unless the specific case is considered reasonable by maintainers. +* Please make sure you're familiar with the code or documentation being updated, unless it's a minor change (spellchecking, minor formatting, etc). You may @mention another project member who you think is better suited for the review, but still provide a non-approving review of your own. +* Be extra kind: people who submit code/doc contributions are putting themselves in a pretty vulnerable position, and have put time and care into what they've done (even if that's not obvious to you!) -- always respond with respect, be understanding, but don't feel like you need to sacrifice your standards for their sake, either. Just don't be a jerk about it? + +## Merge Pull Requests + +[Needs Collaborator](#join-the-project-team): Committer + +TBD - need to hash out a bit more of this process. + +## Tag A Release + +[Needs Collaborator](#join-the-project-team): Committer + +TBD - need to hash out a bit more of this process. The most important bit here is probably that all tests must pass, and tags must use [semver](https://semver.org). + +## Join the Project Team + +### Ways to Join + +There are many ways to contribute! Most of them don't require any official status unless otherwise noted. That said, there's a couple of positions that grant special repository abilities, and this section describes how they're granted and what they do. + +All of the below positions are granted based on the project team's needs, as well as their consensus opinion about whether they would like to work with the person and think that they would fit well into that position. The process is relatively informal, and it's likely that people who express interest in participating can just be granted the permissions they'd like. + +You can spot a collaborator on the repo by looking for the `[Collaborator]` or `[Owner]` tags next to their names. + +Permission | Description +--- | --- +Issue Tracker | Granted to contributors who express a strong interest in spending time on the project's issue tracker. These tasks are mainly [labeling issues](#label-issues), [cleaning up old ones](#clean-up-issues-and-prs), and [reviewing pull requests](#review-pull-requests), as well as all the usual things non-team-member contributors can do. Issue handlers should not merge pull requests, tag releases, or directly commit code themselves: that should still be done through the usual pull request process. Becoming an Issue Handler means the project team trusts you to understand enough of the team's process and context to implement it on the issue tracker. +Committer | Granted to contributors who want to handle the actual pull request merges, tagging new versions, etc. Committers should have a good level of familiarity with the codebase, and enough context to understand the implications of various changes, as well as a good sense of the will and expectations of the project team. +Admin/Owner | Granted to people ultimately responsible for the project, its community, etc. + +## Attribution + +This guide was generated using the WeAllJS `CONTRIBUTING.md` generator. [Make your own](https://npm.im/weallcontribute)! diff --git a/node_modules/libnpmpublish/LICENSE b/node_modules/libnpmpublish/LICENSE new file mode 100644 index 0000000000000..209e4477f39c1 --- /dev/null +++ b/node_modules/libnpmpublish/LICENSE @@ -0,0 +1,13 @@ +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/libnpmpublish/PULL_REQUEST_TEMPLATE b/node_modules/libnpmpublish/PULL_REQUEST_TEMPLATE new file mode 100644 index 0000000000000..9471c6d325f7e --- /dev/null +++ b/node_modules/libnpmpublish/PULL_REQUEST_TEMPLATE @@ -0,0 +1,7 @@ + diff --git a/node_modules/libnpmpublish/README.md b/node_modules/libnpmpublish/README.md new file mode 100644 index 0000000000000..1511b7c14e926 --- /dev/null +++ b/node_modules/libnpmpublish/README.md @@ -0,0 +1,111 @@ +# libnpmpublish [![npm version](https://img.shields.io/npm/v/libnpmpublish.svg)](https://npm.im/libnpmpublish) [![license](https://img.shields.io/npm/l/libnpmpublish.svg)](https://npm.im/libnpmpublish) [![Travis](https://img.shields.io/travis/npm/libnpmpublish.svg)](https://travis-ci.org/npm/libnpmpublish) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/libnpmpublish?svg=true)](https://ci.appveyor.com/project/zkat/libnpmpublish) [![Coverage Status](https://coveralls.io/repos/github/npm/libnpmpublish/badge.svg?branch=latest)](https://coveralls.io/github/npm/libnpmpublish?branch=latest) + +[`libnpmpublish`](https://github.com/npm/libnpmpublish) is a Node.js library for +programmatically publishing and unpublishing npm packages. It does not take care +of packing tarballs from source code, but once you have a tarball, it can take +care of putting it up on a nice registry for you. + +## Example + +```js +const { publish, unpublish } = require('libnpmpublish') + +``` + +## Install + +`$ npm install libnpmpublish` + +## Table of Contents + +* [Example](#example) +* [Install](#install) +* [API](#api) + * [publish/unpublish opts](#opts) + * [`publish()`](#publish) + * [`unpublish()`](#unpublish) + +### API + +#### `opts` for `libnpmpublish` commands + +`libnpmpublish` uses [`npm-registry-fetch`](https://npm.im/npm-registry-fetch). +Most options are passed through directly to that library, so please refer to +[its own `opts` +documentation](https://www.npmjs.com/package/npm-registry-fetch#fetch-options) +for options that can be passed in. + +A couple of options of note for those in a hurry: + +* `opts.token` - can be passed in and will be used as the authentication token for the registry. For other ways to pass in auth details, see the n-r-f docs. +* `opts.Promise` - If you pass this in, the Promises returned by `libnpmpublish` commands will use this Promise class instead. For example: `{Promise: require('bluebird')}` + +#### `> libpub.publish(pkgJson, tarData, [opts]) -> Promise` + +Publishes `tarData` to the appropriate configured registry. `pkgJson` should be +the parsed `package.json` for the package that is being published. + +`tarData` can be a Buffer, a base64-encoded string, or a binary stream of data. +Note that publishing itself can't be streamed, so the entire stream will be +consumed into RAM before publishing (and are thus limited in how big they can +be). + +Since `libnpmpublish` does not generate tarballs itself, one way to build your +own tarball for publishing is to do `npm pack` in the directory you wish to +pack. You can then `fs.createReadStream('my-proj-1.0.0.tgz')` and pass that to +`libnpmpublish`, along with `require('./package.json')`. + +`publish()` does its best to emulate legacy publish logic in the standard npm +client, and so should generally be compatible with any registry the npm CLI has +been able to publish to in the past. + +If `opts.npmVersion` is passed in, it will be used as the `_npmVersion` field in +the outgoing packument. It's recommended you add your own user agent string in +there! + +If `opts.algorithms` is passed in, it should be an array of hashing algorithms +to generate `integrity` hashes for. The default is `['sha512']`, which means you +end up with `dist.integrity = 'sha512-deadbeefbadc0ffee'`. Any algorithm +supported by your current node version is allowed -- npm clients that do not +support those algorithms will simply ignore the unsupported hashes. + +If `opts.access` is passed in, it must be one of `public` or `restricted`. +Unscoped packages cannot be `restricted`, and the registry may agree or disagree +with whether you're allowed to publish a restricted package. + +##### Example + +```javascript +const pkg = require('./dist/package.json') +const tarball = fs.createReadStream('./dist/pkg-1.0.1.tgz') +await libpub.publish(pkg, tarball, { + npmVersion: 'my-pub-script@1.0.2', + token: 'my-auth-token-here' +}) +// Package has been published to the npm registry. +``` + +#### `> libpub.unpublish(spec, [opts]) -> Promise` + +Unpublishes `spec` from the appropriate registry. The registry in question may +have its own limitations on unpublishing. + +`spec` should be either a string, or a valid +[`npm-package-arg`](https://npm.im/npm-package-arg) parsed spec object. For +legacy compatibility reasons, only `tag` and `version` specs will work as +expected. `range` specs will fail silently in most cases. + +##### Example + +```javascript +await libpub.unpublish('lodash', { token: 'i-am-the-worst'}) +// +// `lodash` has now been unpublished, along with all its versions, and the world +// devolves into utter chaos. +// +// That, or we all go home to our friends and/or family and have a nice time +// doing nothing having to do with programming or JavaScript and realize our +// lives are just so much happier now, and we just can't understand why we ever +// got so into this JavaScript thing but damn did it pay well. I guess you'll +// settle for gardening or something. +``` diff --git a/node_modules/libnpmpublish/appveyor.yml b/node_modules/libnpmpublish/appveyor.yml new file mode 100644 index 0000000000000..9cc64c58e02f9 --- /dev/null +++ b/node_modules/libnpmpublish/appveyor.yml @@ -0,0 +1,22 @@ +environment: + matrix: + - nodejs_version: "10" + - nodejs_version: "9" + - nodejs_version: "8" + - nodejs_version: "6" + +platform: + - x64 + +install: + - ps: Install-Product node $env:nodejs_version $env:platform + - npm config set spin false + - npm install + +test_script: + - npm test + +matrix: + fast_finish: true + +build: off diff --git a/node_modules/libnpmpublish/index.js b/node_modules/libnpmpublish/index.js new file mode 100644 index 0000000000000..38de9ece14e4e --- /dev/null +++ b/node_modules/libnpmpublish/index.js @@ -0,0 +1,4 @@ +module.exports = { + publish: require('./publish.js'), + unpublish: require('./unpublish.js') +} diff --git a/node_modules/libnpmpublish/package.json b/node_modules/libnpmpublish/package.json new file mode 100644 index 0000000000000..58eda1a508928 --- /dev/null +++ b/node_modules/libnpmpublish/package.json @@ -0,0 +1,73 @@ +{ + "_from": "libnpmpublish@^1.1.2", + "_id": "libnpmpublish@1.1.2", + "_inBundle": false, + "_integrity": "sha512-2yIwaXrhTTcF7bkJKIKmaCV9wZOALf/gsTDxVSu/Gu/6wiG3fA8ce8YKstiWKTxSFNC0R7isPUb6tXTVFZHt2g==", + "_location": "/libnpmpublish", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "libnpmpublish@^1.1.2", + "name": "libnpmpublish", + "escapedName": "libnpmpublish", + "rawSpec": "^1.1.2", + "saveSpec": null, + "fetchSpec": "^1.1.2" + }, + "_requiredBy": [ + "/libnpm" + ], + "_resolved": "https://registry.npmjs.org/libnpmpublish/-/libnpmpublish-1.1.2.tgz", + "_shasum": "4201cfc4a69c44e6f454ec548fa1cd90f10df0a0", + "_spec": "libnpmpublish@^1.1.2", + "_where": "/Users/isaacs/dev/npm/cli/node_modules/libnpm", + "author": { + "name": "Kat Marchán", + "email": "kzm@zkat.tech" + }, + "bugs": { + "url": "https://github.com/npm/libnpmpublish/issues" + }, + "bundleDependencies": false, + "dependencies": { + "aproba": "^2.0.0", + "figgy-pudding": "^3.5.1", + "get-stream": "^4.0.0", + "lodash.clonedeep": "^4.5.0", + "normalize-package-data": "^2.4.0", + "npm-package-arg": "^6.1.0", + "npm-registry-fetch": "^4.0.0", + "semver": "^5.5.1", + "ssri": "^6.0.1" + }, + "deprecated": false, + "description": "Programmatic API for the bits behind npm publish and unpublish", + "devDependencies": { + "bluebird": "^3.5.1", + "nock": "^9.6.1", + "standard": "*", + "standard-version": "*", + "tap": "*", + "tar-stream": "^1.6.1", + "weallbehave": "*", + "weallcontribute": "*" + }, + "homepage": "https://npmjs.com/package/libnpmpublish", + "license": "ISC", + "name": "libnpmpublish", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/libnpmpublish.git" + }, + "scripts": { + "postrelease": "npm publish && git push --follow-tags", + "prerelease": "npm t", + "pretest": "standard", + "release": "standard-version -s", + "test": "tap -J --100 test/*.js", + "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", + "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" + }, + "version": "1.1.2" +} diff --git a/node_modules/libnpmpublish/publish.js b/node_modules/libnpmpublish/publish.js new file mode 100644 index 0000000000000..de5af4f5d3d6b --- /dev/null +++ b/node_modules/libnpmpublish/publish.js @@ -0,0 +1,218 @@ +'use strict' + +const cloneDeep = require('lodash.clonedeep') +const figgyPudding = require('figgy-pudding') +const { fixer } = require('normalize-package-data') +const getStream = require('get-stream') +const npa = require('npm-package-arg') +const npmAuth = require('npm-registry-fetch/auth.js') +const npmFetch = require('npm-registry-fetch') +const semver = require('semver') +const ssri = require('ssri') +const url = require('url') +const validate = require('aproba') + +const PublishConfig = figgyPudding({ + access: {}, + algorithms: { default: ['sha512'] }, + npmVersion: {}, + tag: { default: 'latest' }, + Promise: { default: () => Promise } +}) + +module.exports = publish +function publish (manifest, tarball, opts) { + opts = PublishConfig(opts) + return new opts.Promise(resolve => resolve()).then(() => { + validate('OSO|OOO', [manifest, tarball, opts]) + if (manifest.private) { + throw Object.assign(new Error( + 'This package has been marked as private\n' + + "Remove the 'private' field from the package.json to publish it." + ), { code: 'EPRIVATE' }) + } + const spec = npa.resolve(manifest.name, manifest.version) + // NOTE: spec is used to pick the appropriate registry/auth combo. + opts = opts.concat(manifest.publishConfig, { spec }) + const reg = npmFetch.pickRegistry(spec, opts) + const auth = npmAuth(reg, opts) + const pubManifest = patchedManifest(spec, auth, manifest, opts) + + // registry-frontdoor cares about the access level, which is only + // configurable for scoped packages + if (!spec.scope && opts.access === 'restricted') { + throw Object.assign( + new Error("Can't restrict access to unscoped packages."), + { code: 'EUNSCOPED' } + ) + } + + return slurpTarball(tarball, opts).then(tardata => { + const metadata = buildMetadata( + spec, auth, reg, pubManifest, tardata, opts + ) + return npmFetch(spec.escapedName, opts.concat({ + method: 'PUT', + body: metadata, + ignoreBody: true + })).catch(err => { + if (err.code !== 'E409') { throw err } + return npmFetch.json(spec.escapedName, opts.concat({ + query: { write: true } + })).then( + current => patchMetadata(current, metadata, opts) + ).then(newMetadata => { + return npmFetch(spec.escapedName, opts.concat({ + method: 'PUT', + body: newMetadata, + ignoreBody: true + })) + }) + }) + }) + }).then(() => true) +} + +function patchedManifest (spec, auth, base, opts) { + const manifest = cloneDeep(base) + manifest._nodeVersion = process.versions.node + if (opts.npmVersion) { + manifest._npmVersion = opts.npmVersion + } + if (auth.username || auth.email) { + // NOTE: This is basically pointless, but reproduced because it's what + // legacy does: tl;dr `auth.username` and `auth.email` are going to be + // undefined in any auth situation that uses tokens instead of plain + // auth. I can only assume some registries out there decided that + // _npmUser would be of any use to them, but _npmUser in packuments + // currently gets filled in by the npm registry itself, based on auth + // information. + manifest._npmUser = { + name: auth.username, + email: auth.email + } + } + + fixer.fixNameField(manifest, { strict: true, allowLegacyCase: true }) + const version = semver.clean(manifest.version) + if (!version) { + throw Object.assign( + new Error('invalid semver: ' + manifest.version), + { code: 'EBADSEMVER' } + ) + } + manifest.version = version + return manifest +} + +function buildMetadata (spec, auth, registry, manifest, tardata, opts) { + const root = { + _id: manifest.name, + name: manifest.name, + description: manifest.description, + 'dist-tags': {}, + versions: {}, + readme: manifest.readme || '' + } + + if (opts.access) root.access = opts.access + + if (!auth.token) { + root.maintainers = [{ name: auth.username, email: auth.email }] + manifest.maintainers = JSON.parse(JSON.stringify(root.maintainers)) + } + + root.versions[ manifest.version ] = manifest + const tag = manifest.tag || opts.tag + root['dist-tags'][tag] = manifest.version + + const tbName = manifest.name + '-' + manifest.version + '.tgz' + const tbURI = manifest.name + '/-/' + tbName + const integrity = ssri.fromData(tardata, { + algorithms: [...new Set(['sha1'].concat(opts.algorithms))] + }) + + manifest._id = manifest.name + '@' + manifest.version + manifest.dist = manifest.dist || {} + // Don't bother having sha1 in the actual integrity field + manifest.dist.integrity = integrity['sha512'][0].toString() + // Legacy shasum support + manifest.dist.shasum = integrity['sha1'][0].hexDigest() + manifest.dist.tarball = url.resolve(registry, tbURI) + .replace(/^https:\/\//, 'http://') + + root._attachments = {} + root._attachments[ tbName ] = { + 'content_type': 'application/octet-stream', + 'data': tardata.toString('base64'), + 'length': tardata.length + } + + return root +} + +function patchMetadata (current, newData, opts) { + const curVers = Object.keys(current.versions || {}).map(v => { + return semver.clean(v, true) + }).concat(Object.keys(current.time || {}).map(v => { + if (semver.valid(v, true)) { return semver.clean(v, true) } + })).filter(v => v) + + const newVersion = Object.keys(newData.versions)[0] + + if (curVers.indexOf(newVersion) !== -1) { + throw ConflictError(newData.name, newData.version) + } + + current.versions = current.versions || {} + current.versions[newVersion] = newData.versions[newVersion] + for (var i in newData) { + switch (i) { + // objects that copy over the new stuffs + case 'dist-tags': + case 'versions': + case '_attachments': + for (var j in newData[i]) { + current[i] = current[i] || {} + current[i][j] = newData[i][j] + } + break + + // ignore these + case 'maintainers': + break + + // copy + default: + current[i] = newData[i] + } + } + const maint = newData.maintainers && JSON.parse(JSON.stringify(newData.maintainers)) + newData.versions[newVersion].maintainers = maint + return current +} + +function slurpTarball (tarSrc, opts) { + if (Buffer.isBuffer(tarSrc)) { + return opts.Promise.resolve(tarSrc) + } else if (typeof tarSrc === 'string') { + return opts.Promise.resolve(Buffer.from(tarSrc, 'base64')) + } else if (typeof tarSrc.pipe === 'function') { + return getStream.buffer(tarSrc) + } else { + return opts.Promise.reject(Object.assign( + new Error('invalid tarball argument. Must be a Buffer, a base64 string, or a binary stream'), { + code: 'EBADTAR' + })) + } +} + +function ConflictError (pkgid, version) { + return Object.assign(new Error( + `Cannot publish ${pkgid}@${version} over existing version.` + ), { + code: 'EPUBLISHCONFLICT', + pkgid, + version + }) +} diff --git a/node_modules/libnpmpublish/test/publish.js b/node_modules/libnpmpublish/test/publish.js new file mode 100644 index 0000000000000..23eef2181f1fb --- /dev/null +++ b/node_modules/libnpmpublish/test/publish.js @@ -0,0 +1,1048 @@ +'use strict' + +const crypto = require('crypto') +const cloneDeep = require('lodash.clonedeep') +const figgyPudding = require('figgy-pudding') +const mockTar = require('./util/mock-tarball.js') +const { PassThrough } = require('stream') +const ssri = require('ssri') +const { test } = require('tap') +const tnock = require('./util/tnock.js') + +const publish = require('../publish.js') + +const OPTS = figgyPudding({ registry: {} })({ + registry: 'https://mock.reg/' +}) + +const REG = OPTS.registry + +test('basic publish', t => { + const manifest = { + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff' + } + return mockTar({ + 'package.json': JSON.stringify(manifest), + 'index.js': 'console.log("hello world")' + }).then(tarData => { + const shasum = crypto.createHash('sha1').update(tarData).digest('hex') + const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] }) + const packument = { + name: 'libnpmpublish', + description: 'some stuff', + readme: '', + _id: 'libnpmpublish', + 'dist-tags': { + latest: '1.0.0' + }, + versions: { + '1.0.0': { + _id: 'libnpmpublish@1.0.0', + _nodeVersion: process.versions.node, + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff', + dist: { + shasum, + integrity: integrity.toString(), + tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz` + } + } + }, + _attachments: { + 'libnpmpublish-1.0.0.tgz': { + 'content_type': 'application/octet-stream', + data: tarData.toString('base64'), + length: tarData.length + } + } + } + const srv = tnock(t, REG) + srv.put('/libnpmpublish', body => { + t.deepEqual(body, packument, 'posted packument matches expectations') + return true + }, { + authorization: 'Bearer deadbeef' + }).reply(201, {}) + + return publish(manifest, tarData, OPTS.concat({ + token: 'deadbeef' + })).then(ret => { + t.ok(ret, 'publish succeeded') + }) + }) +}) + +test('scoped publish', t => { + const manifest = { + name: '@zkat/libnpmpublish', + version: '1.0.0', + description: 'some stuff' + } + return mockTar({ + 'package.json': JSON.stringify(manifest), + 'index.js': 'console.log("hello world")' + }).then(tarData => { + const shasum = crypto.createHash('sha1').update(tarData).digest('hex') + const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] }) + const packument = { + name: '@zkat/libnpmpublish', + description: 'some stuff', + readme: '', + _id: '@zkat/libnpmpublish', + 'dist-tags': { + latest: '1.0.0' + }, + versions: { + '1.0.0': { + _id: '@zkat/libnpmpublish@1.0.0', + _nodeVersion: process.versions.node, + _npmVersion: '6.9.0', + name: '@zkat/libnpmpublish', + version: '1.0.0', + description: 'some stuff', + dist: { + shasum, + integrity: integrity.toString(), + tarball: `http://mock.reg/@zkat/libnpmpublish/-/@zkat/libnpmpublish-1.0.0.tgz` + } + } + }, + _attachments: { + '@zkat/libnpmpublish-1.0.0.tgz': { + 'content_type': 'application/octet-stream', + data: tarData.toString('base64'), + length: tarData.length + } + } + } + const srv = tnock(t, REG) + srv.put('/@zkat%2flibnpmpublish', body => { + t.deepEqual(body, packument, 'posted packument matches expectations') + return true + }, { + authorization: 'Bearer deadbeef' + }).reply(201, {}) + + return publish(manifest, tarData, OPTS.concat({ + npmVersion: '6.9.0', + token: 'deadbeef' + })).then(() => { + t.ok(true, 'publish succeeded') + }) + }) +}) + +test('retry after a conflict', t => { + const REV = '72-47f2986bfd8e8b55068b204588bbf484' + const manifest = { + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff' + } + return mockTar({ + 'package.json': JSON.stringify(manifest), + 'index.js': 'console.log("hello world")' + }).then(tarData => { + const shasum = crypto.createHash('sha1').update(tarData).digest('hex') + const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] }) + const basePackument = { + name: 'libnpmpublish', + description: 'some stuff', + readme: '', + _id: 'libnpmpublish', + 'dist-tags': {}, + versions: {}, + _attachments: {} + } + const currentPackument = cloneDeep(Object.assign({}, basePackument, { + time: { + modified: new Date().toISOString(), + created: new Date().toISOString(), + '1.0.1': new Date().toISOString() + }, + 'dist-tags': { latest: '1.0.1' }, + maintainers: [{ name: 'zkat', email: 'idk@idk.tech' }], + versions: { + '1.0.1': { + _id: 'libnpmpublish@1.0.1', + _nodeVersion: process.versions.node, + _npmVersion: '6.9.0', + name: 'libnpmpublish', + version: '1.0.1', + description: 'some stuff', + dist: { + shasum, + integrity: integrity.toString(), + tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.1.tgz` + } + } + }, + _attachments: { + 'libnpmpublish-1.0.1.tgz': { + 'content_type': 'application/octet-stream', + data: tarData.toString('base64'), + length: tarData.length + } + } + })) + const newPackument = cloneDeep(Object.assign({}, basePackument, { + 'dist-tags': { latest: '1.0.0' }, + maintainers: [{ name: 'other', email: 'other@idk.tech' }], + versions: { + '1.0.0': { + _id: 'libnpmpublish@1.0.0', + _nodeVersion: process.versions.node, + _npmVersion: '6.9.0', + _npmUser: { + name: 'other', + email: 'other@idk.tech' + }, + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff', + maintainers: [{ name: 'other', email: 'other@idk.tech' }], + dist: { + shasum, + integrity: integrity.toString(), + tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz` + } + } + }, + _attachments: { + 'libnpmpublish-1.0.0.tgz': { + 'content_type': 'application/octet-stream', + data: tarData.toString('base64'), + length: tarData.length + } + } + })) + const mergedPackument = cloneDeep(Object.assign({}, basePackument, { + time: currentPackument.time, + 'dist-tags': { latest: '1.0.0' }, + maintainers: currentPackument.maintainers, + versions: Object.assign({}, currentPackument.versions, newPackument.versions), + _attachments: Object.assign({}, currentPackument._attachments, newPackument._attachments) + })) + const srv = tnock(t, REG) + srv.put('/libnpmpublish', body => { + t.notOk(body._rev, 'no _rev in initial post') + t.deepEqual(body, newPackument, 'got conflicting packument') + return true + }).reply(409, { error: 'gimme _rev plz' }) + srv.get('/libnpmpublish?write=true').reply(200, Object.assign({ + _rev: REV + }, currentPackument)) + srv.put('/libnpmpublish', body => { + t.deepEqual(body, Object.assign({ + _rev: REV + }, mergedPackument), 'posted packument includes _rev and a merged version') + return true + }).reply(201, {}) + return publish(manifest, tarData, OPTS.concat({ + npmVersion: '6.9.0', + username: 'other', + email: 'other@idk.tech' + })).then(() => { + t.ok(true, 'publish succeeded') + }) + }) +}) + +test('retry after a conflict -- no versions on remote', t => { + const REV = '72-47f2986bfd8e8b55068b204588bbf484' + const manifest = { + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff' + } + return mockTar({ + 'package.json': JSON.stringify(manifest), + 'index.js': 'console.log("hello world")' + }).then(tarData => { + const shasum = crypto.createHash('sha1').update(tarData).digest('hex') + const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] }) + const basePackument = { + name: 'libnpmpublish', + description: 'some stuff', + readme: '', + _id: 'libnpmpublish' + } + const currentPackument = cloneDeep(Object.assign({}, basePackument, { + maintainers: [{ name: 'zkat', email: 'idk@idk.tech' }] + })) + const newPackument = cloneDeep(Object.assign({}, basePackument, { + 'dist-tags': { latest: '1.0.0' }, + maintainers: [{ name: 'other', email: 'other@idk.tech' }], + versions: { + '1.0.0': { + _id: 'libnpmpublish@1.0.0', + _nodeVersion: process.versions.node, + _npmVersion: '6.9.0', + _npmUser: { + name: 'other', + email: 'other@idk.tech' + }, + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff', + maintainers: [{ name: 'other', email: 'other@idk.tech' }], + dist: { + shasum, + integrity: integrity.toString(), + tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz` + } + } + }, + _attachments: { + 'libnpmpublish-1.0.0.tgz': { + 'content_type': 'application/octet-stream', + data: tarData.toString('base64'), + length: tarData.length + } + } + })) + const mergedPackument = cloneDeep(Object.assign({}, basePackument, { + 'dist-tags': { latest: '1.0.0' }, + maintainers: currentPackument.maintainers, + versions: Object.assign({}, currentPackument.versions, newPackument.versions), + _attachments: Object.assign({}, currentPackument._attachments, newPackument._attachments) + })) + const srv = tnock(t, REG) + srv.put('/libnpmpublish', body => { + t.notOk(body._rev, 'no _rev in initial post') + t.deepEqual(body, newPackument, 'got conflicting packument') + return true + }).reply(409, { error: 'gimme _rev plz' }) + srv.get('/libnpmpublish?write=true').reply(200, Object.assign({ + _rev: REV + }, currentPackument)) + srv.put('/libnpmpublish', body => { + t.deepEqual(body, Object.assign({ + _rev: REV + }, mergedPackument), 'posted packument includes _rev and a merged version') + return true + }).reply(201, {}) + return publish(manifest, tarData, OPTS.concat({ + npmVersion: '6.9.0', + username: 'other', + email: 'other@idk.tech' + })).then(() => { + t.ok(true, 'publish succeeded') + }) + }) +}) +test('version conflict', t => { + const REV = '72-47f2986bfd8e8b55068b204588bbf484' + const manifest = { + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff' + } + return mockTar({ + 'package.json': JSON.stringify(manifest), + 'index.js': 'console.log("hello world")' + }).then(tarData => { + const shasum = crypto.createHash('sha1').update(tarData).digest('hex') + const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] }) + const basePackument = { + name: 'libnpmpublish', + description: 'some stuff', + readme: '', + _id: 'libnpmpublish', + 'dist-tags': {}, + versions: {}, + _attachments: {} + } + const newPackument = cloneDeep(Object.assign({}, basePackument, { + 'dist-tags': { latest: '1.0.0' }, + versions: { + '1.0.0': { + _id: 'libnpmpublish@1.0.0', + _nodeVersion: process.versions.node, + _npmVersion: '6.9.0', + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff', + dist: { + shasum, + integrity: integrity.toString(), + tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz` + } + } + }, + _attachments: { + 'libnpmpublish-1.0.0.tgz': { + 'content_type': 'application/octet-stream', + data: tarData.toString('base64'), + length: tarData.length + } + } + })) + const srv = tnock(t, REG) + srv.put('/libnpmpublish', body => { + t.notOk(body._rev, 'no _rev in initial post') + t.deepEqual(body, newPackument, 'got conflicting packument') + return true + }).reply(409, { error: 'gimme _rev plz' }) + srv.get('/libnpmpublish?write=true').reply(200, Object.assign({ + _rev: REV + }, newPackument)) + return publish(manifest, tarData, OPTS.concat({ + npmVersion: '6.9.0', + token: 'deadbeef' + })).then( + () => { throw new Error('should not succeed') }, + err => { + t.equal(err.code, 'EPUBLISHCONFLICT', 'got publish conflict code') + } + ) + }) +}) + +test('publish with basic auth', t => { + const manifest = { + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff' + } + return mockTar({ + 'package.json': JSON.stringify(manifest), + 'index.js': 'console.log("hello world")' + }).then(tarData => { + const shasum = crypto.createHash('sha1').update(tarData).digest('hex') + const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] }) + const packument = { + name: 'libnpmpublish', + description: 'some stuff', + readme: '', + _id: 'libnpmpublish', + 'dist-tags': { + latest: '1.0.0' + }, + maintainers: [{ + name: 'zkat', + email: 'kat@example.tech' + }], + versions: { + '1.0.0': { + _id: 'libnpmpublish@1.0.0', + _nodeVersion: process.versions.node, + _npmVersion: '6.9.0', + _npmUser: { + name: 'zkat', + email: 'kat@example.tech' + }, + maintainers: [{ + name: 'zkat', + email: 'kat@example.tech' + }], + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff', + dist: { + shasum, + integrity: integrity.toString(), + tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz` + } + } + }, + _attachments: { + 'libnpmpublish-1.0.0.tgz': { + 'content_type': 'application/octet-stream', + data: tarData.toString('base64'), + length: tarData.length + } + } + } + const srv = tnock(t, REG) + srv.put('/libnpmpublish', body => { + t.deepEqual(body, packument, 'posted packument matches expectations') + return true + }, { + authorization: /^Basic / + }).reply(201, {}) + + return publish(manifest, tarData, OPTS.concat({ + npmVersion: '6.9.0', + username: 'zkat', + email: 'kat@example.tech' + })).then(() => { + t.ok(true, 'publish succeeded') + }) + }) +}) + +test('publish base64 string', t => { + const manifest = { + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff' + } + return mockTar({ + 'package.json': JSON.stringify(manifest), + 'index.js': 'console.log("hello world")' + }).then(tarData => { + const shasum = crypto.createHash('sha1').update(tarData).digest('hex') + const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] }) + const packument = { + name: 'libnpmpublish', + description: 'some stuff', + readme: '', + _id: 'libnpmpublish', + 'dist-tags': { + latest: '1.0.0' + }, + versions: { + '1.0.0': { + _id: 'libnpmpublish@1.0.0', + _nodeVersion: process.versions.node, + _npmVersion: '6.9.0', + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff', + dist: { + shasum, + integrity: integrity.toString(), + tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz` + } + } + }, + _attachments: { + 'libnpmpublish-1.0.0.tgz': { + 'content_type': 'application/octet-stream', + data: tarData.toString('base64'), + length: tarData.length + } + } + } + const srv = tnock(t, REG) + srv.put('/libnpmpublish', body => { + t.deepEqual(body, packument, 'posted packument matches expectations') + return true + }, { + authorization: 'Bearer deadbeef' + }).reply(201, {}) + + return publish(manifest, tarData.toString('base64'), OPTS.concat({ + npmVersion: '6.9.0', + token: 'deadbeef' + })).then(() => { + t.ok(true, 'publish succeeded') + }) + }) +}) + +test('publish tar stream', t => { + const manifest = { + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff' + } + return mockTar({ + 'package.json': JSON.stringify(manifest), + 'index.js': 'console.log("hello world")' + }).then(tarData => { + const shasum = crypto.createHash('sha1').update(tarData).digest('hex') + const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] }) + const packument = { + name: 'libnpmpublish', + description: 'some stuff', + readme: '', + _id: 'libnpmpublish', + 'dist-tags': { + latest: '1.0.0' + }, + versions: { + '1.0.0': { + _id: 'libnpmpublish@1.0.0', + _nodeVersion: process.versions.node, + _npmVersion: '6.9.0', + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff', + dist: { + shasum, + integrity: integrity.toString(), + tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz` + } + } + }, + _attachments: { + 'libnpmpublish-1.0.0.tgz': { + 'content_type': 'application/octet-stream', + data: tarData.toString('base64'), + length: tarData.length + } + } + } + const srv = tnock(t, REG) + srv.put('/libnpmpublish', body => { + t.deepEqual(body, packument, 'posted packument matches expectations') + return true + }, { + authorization: 'Bearer deadbeef' + }).reply(201, {}) + + const stream = new PassThrough() + setTimeout(() => stream.end(tarData), 0) + return publish(manifest, stream, OPTS.concat({ + npmVersion: '6.9.0', + token: 'deadbeef' + })).then(() => { + t.ok(true, 'publish succeeded') + }) + }) +}) + +test('refuse if package marked private', t => { + const manifest = { + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff', + private: true + } + return mockTar({ + 'package.json': JSON.stringify(manifest), + 'index.js': 'console.log("hello world")' + }).then(tarData => { + return publish(manifest, tarData, OPTS.concat({ + npmVersion: '6.9.0', + token: 'deadbeef' + })).then( + () => { throw new Error('should not have succeeded') }, + err => { + t.equal(err.code, 'EPRIVATE', 'got correct error code') + } + ) + }) +}) + +test('publish includes access', t => { + const manifest = { + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff' + } + return mockTar({ + 'package.json': JSON.stringify(manifest), + 'index.js': 'console.log("hello world")' + }).then(tarData => { + const shasum = crypto.createHash('sha1').update(tarData).digest('hex') + const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] }) + const packument = { + name: 'libnpmpublish', + description: 'some stuff', + readme: '', + access: 'public', + _id: 'libnpmpublish', + 'dist-tags': { + latest: '1.0.0' + }, + versions: { + '1.0.0': { + _id: 'libnpmpublish@1.0.0', + _nodeVersion: process.versions.node, + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff', + dist: { + shasum, + integrity: integrity.toString(), + tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz` + } + } + }, + _attachments: { + 'libnpmpublish-1.0.0.tgz': { + 'content_type': 'application/octet-stream', + data: tarData.toString('base64'), + length: tarData.length + } + } + } + const srv = tnock(t, REG) + srv.put('/libnpmpublish', body => { + t.deepEqual(body, packument, 'posted packument matches expectations') + return true + }, { + authorization: 'Bearer deadbeef' + }).reply(201, {}) + + return publish(manifest, tarData, OPTS.concat({ + token: 'deadbeef', + access: 'public' + })).then(() => { + t.ok(true, 'publish succeeded') + }) + }) +}) + +test('refuse if package is unscoped plus `restricted` access', t => { + const manifest = { + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff' + } + return mockTar({ + 'package.json': JSON.stringify(manifest), + 'index.js': 'console.log("hello world")' + }).then(tarData => { + return publish(manifest, tarData, OPTS.concat({ + npmVersion: '6.9.0', + access: 'restricted' + })).then( + () => { throw new Error('should not have succeeded') }, + err => { + t.equal(err.code, 'EUNSCOPED', 'got correct error code') + } + ) + }) +}) + +test('refuse if tarball is wrong type', t => { + const manifest = { + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff' + } + return publish(manifest, { data: 42 }, OPTS.concat({ + npmVersion: '6.9.0', + token: 'deadbeef' + })).then( + () => { throw new Error('should not have succeeded') }, + err => { + t.equal(err.code, 'EBADTAR', 'got correct error code') + } + ) +}) + +test('refuse if bad semver on manifest', t => { + const manifest = { + name: 'libnpmpublish', + version: 'lmao', + description: 'some stuff' + } + return publish(manifest, 'deadbeef', OPTS).then( + () => { throw new Error('should not have succeeded') }, + err => { + t.equal(err.code, 'EBADSEMVER', 'got correct error code') + } + ) +}) + +test('other error code', t => { + const manifest = { + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff' + } + return mockTar({ + 'package.json': JSON.stringify(manifest), + 'index.js': 'console.log("hello world")' + }).then(tarData => { + const shasum = crypto.createHash('sha1').update(tarData).digest('hex') + const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] }) + const packument = { + name: 'libnpmpublish', + description: 'some stuff', + readme: '', + _id: 'libnpmpublish', + 'dist-tags': { + latest: '1.0.0' + }, + versions: { + '1.0.0': { + _id: 'libnpmpublish@1.0.0', + _nodeVersion: process.versions.node, + _npmVersion: '6.9.0', + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff', + dist: { + shasum, + integrity: integrity.toString(), + tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz` + } + } + }, + _attachments: { + 'libnpmpublish-1.0.0.tgz': { + 'content_type': 'application/octet-stream', + data: tarData.toString('base64'), + length: tarData.length + } + } + } + const srv = tnock(t, REG) + srv.put('/libnpmpublish', body => { + t.deepEqual(body, packument, 'posted packument matches expectations') + return true + }, { + authorization: 'Bearer deadbeef' + }).reply(500, { error: 'go away' }) + + return publish(manifest, tarData, OPTS.concat({ + npmVersion: '6.9.0', + token: 'deadbeef' + })).then( + () => { throw new Error('should not succeed') }, + err => { + t.match(err.message, /go away/, 'no retry on non-409') + } + ) + }) +}) + +test('publish includes access', t => { + const manifest = { + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff' + } + return mockTar({ + 'package.json': JSON.stringify(manifest), + 'index.js': 'console.log("hello world")' + }).then(tarData => { + const shasum = crypto.createHash('sha1').update(tarData).digest('hex') + const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] }) + const packument = { + name: 'libnpmpublish', + description: 'some stuff', + readme: '', + access: 'public', + _id: 'libnpmpublish', + 'dist-tags': { + latest: '1.0.0' + }, + versions: { + '1.0.0': { + _id: 'libnpmpublish@1.0.0', + _nodeVersion: process.versions.node, + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff', + dist: { + shasum, + integrity: integrity.toString(), + tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz` + } + } + }, + _attachments: { + 'libnpmpublish-1.0.0.tgz': { + 'content_type': 'application/octet-stream', + data: tarData.toString('base64'), + length: tarData.length + } + } + } + const srv = tnock(t, REG) + srv.put('/libnpmpublish', body => { + t.deepEqual(body, packument, 'posted packument matches expectations') + return true + }, { + authorization: 'Bearer deadbeef' + }).reply(201, {}) + + return publish(manifest, tarData, OPTS.concat({ + token: 'deadbeef', + access: 'public' + })).then(() => { + t.ok(true, 'publish succeeded') + }) + }) +}) + +test('publishConfig on manifest', t => { + const manifest = { + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff', + publishConfig: { + registry: REG + } + } + return mockTar({ + 'package.json': JSON.stringify(manifest), + 'index.js': 'console.log("hello world")' + }).then(tarData => { + const shasum = crypto.createHash('sha1').update(tarData).digest('hex') + const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] }) + const packument = { + name: 'libnpmpublish', + description: 'some stuff', + readme: '', + _id: 'libnpmpublish', + 'dist-tags': { + latest: '1.0.0' + }, + versions: { + '1.0.0': { + _id: 'libnpmpublish@1.0.0', + _nodeVersion: process.versions.node, + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff', + dist: { + shasum, + integrity: integrity.toString(), + tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz` + }, + publishConfig: { + registry: REG + } + } + }, + _attachments: { + 'libnpmpublish-1.0.0.tgz': { + 'content_type': 'application/octet-stream', + data: tarData.toString('base64'), + length: tarData.length + } + } + } + const srv = tnock(t, REG) + srv.put('/libnpmpublish', body => { + t.deepEqual(body, packument, 'posted packument matches expectations') + return true + }, { + authorization: 'Bearer deadbeef' + }).reply(201, {}) + + return publish(manifest, tarData, { token: 'deadbeef' }).then(ret => { + t.ok(ret, 'publish succeeded') + }) + }) +}) + +test('publish with encoded _auth', t => { + const manifest = { + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff' + } + return mockTar({ + 'package.json': JSON.stringify(manifest), + 'index.js': 'console.log("hello world")' + }).then(tarData => { + const shasum = crypto.createHash('sha1').update(tarData).digest('hex') + const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] }) + const packument = { + name: 'libnpmpublish', + description: 'some stuff', + readme: '', + _id: 'libnpmpublish', + 'dist-tags': { + latest: '1.0.0' + }, + maintainers: [ + { name: 'myuser', email: 'my@ema.il' } + ], + versions: { + '1.0.0': { + _id: 'libnpmpublish@1.0.0', + _npmUser: { + name: 'myuser', + email: 'my@ema.il' + }, + maintainers: [ + { name: 'myuser', email: 'my@ema.il' } + ], + _nodeVersion: process.versions.node, + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff', + dist: { + shasum, + integrity: integrity.toString(), + tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz` + } + } + }, + _attachments: { + 'libnpmpublish-1.0.0.tgz': { + 'content_type': 'application/octet-stream', + data: tarData.toString('base64'), + length: tarData.length + } + } + } + const srv = tnock(t, REG) + srv.put('/libnpmpublish', body => { + t.deepEqual(body, packument, 'posted packument matches expectations') + return true + }, { + authorization: 'Bearer deadbeef' + }).reply(201, {}) + + return publish(manifest, tarData, OPTS.concat({ + _auth: Buffer.from('myuser:mypassword', 'utf8').toString('base64'), + email: 'my@ema.il' + })).then(ret => { + t.ok(ret, 'publish succeeded using _auth') + }) + }) +}) + +test('publish with 302 redirect', t => { + const manifest = { + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff' + } + return mockTar({ + 'package.json': JSON.stringify(manifest), + 'index.js': 'console.log("hello world")' + }).then(tarData => { + const shasum = crypto.createHash('sha1').update(tarData).digest('hex') + const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] }) + const packument = { + name: 'libnpmpublish', + description: 'some stuff', + readme: '', + _id: 'libnpmpublish', + 'dist-tags': { + latest: '1.0.0' + }, + versions: { + '1.0.0': { + _id: 'libnpmpublish@1.0.0', + _nodeVersion: process.versions.node, + name: 'libnpmpublish', + version: '1.0.0', + description: 'some stuff', + dist: { + shasum, + integrity: integrity.toString(), + tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz` + } + } + }, + _attachments: { + 'libnpmpublish-1.0.0.tgz': { + 'content_type': 'application/octet-stream', + data: tarData.toString('base64'), + length: tarData.length + } + } + } + tnock(t, REG).put('/libnpmpublish').reply(302, '', { + location: 'http://blah.net/libnpmpublish' + }) + tnock(t, 'http://blah.net').put('/libnpmpublish', body => { + t.deepEqual(body, packument, 'posted packument matches expectations') + return true + }, { + authorization: 'Bearer deadbeef' + }).reply(201, {}) + + return publish(manifest, tarData, OPTS.concat({ + token: 'deadbeef' + })).then(ret => { + t.ok(ret, 'publish succeeded') + }) + }) +}) diff --git a/node_modules/libnpmpublish/test/unpublish.js b/node_modules/libnpmpublish/test/unpublish.js new file mode 100644 index 0000000000000..19ac464a3b732 --- /dev/null +++ b/node_modules/libnpmpublish/test/unpublish.js @@ -0,0 +1,249 @@ +'use strict' + +const figgyPudding = require('figgy-pudding') +const test = require('tap').test +const tnock = require('./util/tnock.js') + +const OPTS = figgyPudding({ registry: {} })({ + registry: 'https://mock.reg/' +}) + +const REG = OPTS.registry +const REV = '72-47f2986bfd8e8b55068b204588bbf484' +const unpub = require('../unpublish.js') + +test('basic test', t => { + const doc = { + _id: 'foo', + _rev: REV, + name: 'foo', + 'dist-tags': { + latest: '1.0.0' + }, + versions: { + '1.0.0': { + name: 'foo', + dist: { + tarball: `${REG}/foo/-/foo-1.0.0.tgz` + } + } + } + } + const srv = tnock(t, REG) + srv.get('/foo?write=true').reply(200, doc) + srv.delete(`/foo/-rev/${REV}`).reply(201) + return unpub('foo', OPTS).then(ret => { + t.ok(ret, 'foo was unpublished') + }) +}) + +test('scoped basic test', t => { + const doc = { + _id: '@foo/bar', + _rev: REV, + name: '@foo/bar', + 'dist-tags': { + latest: '1.0.0' + }, + versions: { + '1.0.0': { + name: '@foo/bar', + dist: { + tarball: `${REG}/@foo/bar/-/foo-1.0.0.tgz` + } + } + } + } + const srv = tnock(t, REG) + srv.get('/@foo%2fbar?write=true').reply(200, doc) + srv.delete(`/@foo%2fbar/-rev/${REV}`).reply(201) + return unpub('@foo/bar', OPTS).then(() => { + t.ok(true, 'foo was unpublished') + }) +}) + +test('unpublish specific, last version', t => { + const doc = { + _id: 'foo', + _rev: REV, + name: 'foo', + 'dist-tags': { + latest: '1.0.0' + }, + versions: { + '1.0.0': { + name: 'foo', + dist: { + tarball: `${REG}/foo/-/foo-1.0.0.tgz` + } + } + } + } + const srv = tnock(t, REG) + srv.get('/foo?write=true').reply(200, doc) + srv.delete(`/foo/-rev/${REV}`).reply(201) + return unpub('foo@1.0.0', OPTS).then(() => { + t.ok(true, 'foo was unpublished') + }) +}) + +test('unpublish specific version', t => { + const doc = { + _id: 'foo', + _rev: REV, + _revisions: [1, 2, 3], + _attachments: [1, 2, 3], + name: 'foo', + 'dist-tags': { + latest: '1.0.1' + }, + versions: { + '1.0.0': { + name: 'foo', + dist: { + tarball: `${REG}/foo/-/foo-1.0.0.tgz` + } + }, + '1.0.1': { + name: 'foo', + dist: { + tarball: `${REG}/foo/-/foo-1.0.1.tgz` + } + } + } + } + const postEdit = { + _id: 'foo', + _rev: REV, + name: 'foo', + 'dist-tags': { + latest: '1.0.0' + }, + versions: { + '1.0.0': { + name: 'foo', + dist: { + tarball: `${REG}/foo/-/foo-1.0.0.tgz` + } + } + } + } + + const srv = tnock(t, REG) + srv.get('/foo?write=true').reply(200, doc) + srv.put(`/foo/-rev/${REV}`, postEdit).reply(200) + srv.get('/foo?write=true').reply(200, postEdit) + srv.delete(`/foo/-/foo-1.0.1.tgz/-rev/${REV}`).reply(200) + return unpub('foo@1.0.1', OPTS).then(() => { + t.ok(true, 'foo was unpublished') + }) +}) + +test('404 considered a success', t => { + const srv = tnock(t, REG) + srv.get('/foo?write=true').reply(404) + return unpub('foo', OPTS).then(() => { + t.ok(true, 'foo was unpublished') + }) +}) + +test('non-404 errors', t => { + const srv = tnock(t, REG) + srv.get('/foo?write=true').reply(500) + return unpub('foo', OPTS).then( + () => { throw new Error('should not have succeeded') }, + err => { t.equal(err.code, 'E500', 'got right error from server') } + ) +}) + +test('packument with missing versions unpublishes whole thing', t => { + const doc = { + _id: 'foo', + _rev: REV, + name: 'foo', + 'dist-tags': { + latest: '1.0.0' + } + } + const srv = tnock(t, REG) + srv.get('/foo?write=true').reply(200, doc) + srv.delete(`/foo/-rev/${REV}`).reply(201) + return unpub('foo@1.0.0', OPTS).then(() => { + t.ok(true, 'foo was unpublished') + }) +}) + +test('packument with missing specific version assumed unpublished', t => { + const doc = { + _id: 'foo', + _rev: REV, + name: 'foo', + 'dist-tags': { + latest: '1.0.0' + }, + versions: { + '1.0.0': { + name: 'foo', + dist: { + tarball: `${REG}/foo/-/foo-1.0.0.tgz` + } + } + } + } + const srv = tnock(t, REG) + srv.get('/foo?write=true').reply(200, doc) + return unpub('foo@1.0.1', OPTS).then(() => { + t.ok(true, 'foo was unpublished') + }) +}) + +test('unpublish specific version without dist-tag update', t => { + const doc = { + _id: 'foo', + _rev: REV, + _revisions: [1, 2, 3], + _attachments: [1, 2, 3], + name: 'foo', + 'dist-tags': { + latest: '1.0.0' + }, + versions: { + '1.0.0': { + name: 'foo', + dist: { + tarball: `${REG}/foo/-/foo-1.0.0.tgz` + } + }, + '1.0.1': { + name: 'foo', + dist: { + tarball: `${REG}/foo/-/foo-1.0.1.tgz` + } + } + } + } + const postEdit = { + _id: 'foo', + _rev: REV, + name: 'foo', + 'dist-tags': { + latest: '1.0.0' + }, + versions: { + '1.0.0': { + name: 'foo', + dist: { + tarball: `${REG}/foo/-/foo-1.0.0.tgz` + } + } + } + } + const srv = tnock(t, REG) + srv.get('/foo?write=true').reply(200, doc) + srv.put(`/foo/-rev/${REV}`, postEdit).reply(200) + srv.get('/foo?write=true').reply(200, postEdit) + srv.delete(`/foo/-/foo-1.0.1.tgz/-rev/${REV}`).reply(200) + return unpub('foo@1.0.1', OPTS).then(() => { + t.ok(true, 'foo was unpublished') + }) +}) diff --git a/node_modules/libnpmpublish/test/util/mock-tarball.js b/node_modules/libnpmpublish/test/util/mock-tarball.js new file mode 100644 index 0000000000000..c6253cd218f5b --- /dev/null +++ b/node_modules/libnpmpublish/test/util/mock-tarball.js @@ -0,0 +1,47 @@ +'use strict' + +const BB = require('bluebird') + +const getStream = require('get-stream') +const tar = require('tar-stream') +const zlib = require('zlib') + +module.exports = makeTarball +function makeTarball (files, opts) { + opts = opts || {} + const pack = tar.pack() + Object.keys(files).forEach(function (filename) { + const entry = files[filename] + pack.entry({ + name: (opts.noPrefix ? '' : 'package/') + filename, + type: entry.type, + size: entry.size, + mode: entry.mode, + mtime: entry.mtime || new Date(0), + linkname: entry.linkname, + uid: entry.uid, + gid: entry.gid, + uname: entry.uname, + gname: entry.gname + }, typeof files[filename] === 'string' + ? files[filename] + : files[filename].data) + }) + pack.finalize() + return BB.try(() => { + if (opts.stream && opts.gzip) { + const gz = zlib.createGzip() + pack.on('error', err => gz.emit('error', err)).pipe(gz) + } else if (opts.stream) { + return pack + } else { + return getStream.buffer(pack).then(ret => { + if (opts.gzip) { + return BB.fromNode(cb => zlib.gzip(ret, cb)) + } else { + return ret + } + }) + } + }) +} diff --git a/node_modules/libnpmpublish/test/util/tnock.js b/node_modules/libnpmpublish/test/util/tnock.js new file mode 100644 index 0000000000000..00b6e160e1019 --- /dev/null +++ b/node_modules/libnpmpublish/test/util/tnock.js @@ -0,0 +1,12 @@ +'use strict' + +const nock = require('nock') + +module.exports = tnock +function tnock (t, host) { + const server = nock(host) + t.tearDown(function () { + server.done() + }) + return server +} diff --git a/node_modules/libnpmpublish/unpublish.js b/node_modules/libnpmpublish/unpublish.js new file mode 100644 index 0000000000000..d7d98243c6b09 --- /dev/null +++ b/node_modules/libnpmpublish/unpublish.js @@ -0,0 +1,86 @@ +'use strict' + +const figgyPudding = require('figgy-pudding') +const npa = require('npm-package-arg') +const npmFetch = require('npm-registry-fetch') +const semver = require('semver') +const url = require('url') + +const UnpublishConfig = figgyPudding({ + force: { default: false }, + Promise: { default: () => Promise } +}) + +module.exports = unpublish +function unpublish (spec, opts) { + opts = UnpublishConfig(opts) + return new opts.Promise(resolve => resolve()).then(() => { + spec = npa(spec) + // NOTE: spec is used to pick the appropriate registry/auth combo. + opts = opts.concat({ spec }) + const pkgUri = spec.escapedName + return npmFetch.json(pkgUri, opts.concat({ + query: { write: true } + })).then(pkg => { + if (!spec.rawSpec || spec.rawSpec === '*') { + return npmFetch(`${pkgUri}/-rev/${pkg._rev}`, opts.concat({ + method: 'DELETE', + ignoreBody: true + })) + } else { + const version = spec.rawSpec + const allVersions = pkg.versions || {} + const versionPublic = allVersions[version] + let dist + if (versionPublic) { + dist = allVersions[version].dist + } + delete allVersions[version] + // if it was the only version, then delete the whole package. + if (!Object.keys(allVersions).length) { + return npmFetch(`${pkgUri}/-rev/${pkg._rev}`, opts.concat({ + method: 'DELETE', + ignoreBody: true + })) + } else if (versionPublic) { + const latestVer = pkg['dist-tags'].latest + Object.keys(pkg['dist-tags']).forEach(tag => { + if (pkg['dist-tags'][tag] === version) { + delete pkg['dist-tags'][tag] + } + }) + + if (latestVer === version) { + pkg['dist-tags'].latest = Object.keys( + allVersions + ).sort(semver.compareLoose).pop() + } + + delete pkg._revisions + delete pkg._attachments + // Update packument with removed versions + return npmFetch(`${pkgUri}/-rev/${pkg._rev}`, opts.concat({ + method: 'PUT', + body: pkg, + ignoreBody: true + })).then(() => { + // Remove the tarball itself + return npmFetch.json(pkgUri, opts.concat({ + query: { write: true } + })).then(({ _rev, _id }) => { + const tarballUrl = url.parse(dist.tarball).pathname.substr(1) + return npmFetch(`${tarballUrl}/-rev/${_rev}`, opts.concat({ + method: 'DELETE', + ignoreBody: true + })) + }) + }) + } + } + }, err => { + if (err.code !== 'E404') { + throw err + } + }) + }).then(() => true) +} diff --git a/node_modules/libnpmsearch/.travis.yml b/node_modules/libnpmsearch/.travis.yml new file mode 100644 index 0000000000000..db5ea8b018640 --- /dev/null +++ b/node_modules/libnpmsearch/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +sudo: false +node_js: + - "10" + - "9" + - "8" + - "6" diff --git a/node_modules/libnpmsearch/CHANGELOG.md b/node_modules/libnpmsearch/CHANGELOG.md new file mode 100644 index 0000000000000..1adf2d0db52ce --- /dev/null +++ b/node_modules/libnpmsearch/CHANGELOG.md @@ -0,0 +1,42 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + + +## [2.0.2](https://github.com/npm/libnpmsearch/compare/v2.0.1...v2.0.2) (2019-07-16) + + + + +## [2.0.1](https://github.com/npm/libnpmsearch/compare/v2.0.0...v2.0.1) (2019-06-10) + + +### Bug Fixes + +* **opts:** support `opts.from` properly ([#2](https://github.com/npm/libnpmsearch/issues/2)) ([da6636c](https://github.com/npm/libnpmsearch/commit/da6636c)) +* **standard:** standard --fix ([beca19c](https://github.com/npm/libnpmsearch/commit/beca19c)) + + + + +# [2.0.0](https://github.com/npm/libnpmsearch/compare/v1.0.0...v2.0.0) (2018-08-28) + + +### Features + +* **opts:** added options for pagination, details, and sorting weights ([ff97eb5](https://github.com/npm/libnpmsearch/commit/ff97eb5)) + + +### BREAKING CHANGES + +* **opts:** this changes default requests and makes libnpmsearch return more complete data for individual packages, without null-defaulting + + + + +# 1.0.0 (2018-08-27) + + +### Features + +* **api:** got API working ([fe90008](https://github.com/npm/libnpmsearch/commit/fe90008)) diff --git a/node_modules/libnpmsearch/CODE_OF_CONDUCT.md b/node_modules/libnpmsearch/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000000..aeb72f598dcb4 --- /dev/null +++ b/node_modules/libnpmsearch/CODE_OF_CONDUCT.md @@ -0,0 +1,151 @@ +# Code of Conduct + +## When Something Happens + +If you see a Code of Conduct violation, follow these steps: + +1. Let the person know that what they did is not appropriate and ask them to stop and/or edit their message(s) or commits. +2. That person should immediately stop the behavior and correct the issue. +3. If this doesn’t happen, or if you're uncomfortable speaking up, [contact the maintainers](#contacting-maintainers). +4. As soon as available, a maintainer will look into the issue, and take [further action (see below)](#further-enforcement), starting with a warning, then temporary block, then long-term repo or organization ban. + +When reporting, please include any relevant details, links, screenshots, context, or other information that may be used to better understand and resolve the situation. + +**The maintainer team will prioritize the well-being and comfort of the recipients of the violation over the comfort of the violator.** See [some examples below](#enforcement-examples). + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers of this project pledge to making participation in our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, technical preferences, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + + * Using welcoming and inclusive language. + * Being respectful of differing viewpoints and experiences. + * Gracefully accepting constructive feedback. + * Focusing on what is best for the community. + * Showing empathy and kindness towards other community members. + * Encouraging and raising up your peers in the project so you can all bask in hacks and glory. + +Examples of unacceptable behavior by participants include: + + * The use of sexualized language or imagery and unwelcome sexual attention or advances, including when simulated online. The only exception to sexual topics is channels/spaces specifically for topics of sexual identity. + * Casual mention of slavery or indentured servitude and/or false comparisons of one's occupation or situation to slavery. Please consider using or asking about alternate terminology when referring to such metaphors in technology. + * Making light of/making mocking comments about trigger warnings and content warnings. + * Trolling, insulting/derogatory comments, and personal or political attacks. + * Public or private harassment, deliberate intimidation, or threats. + * Publishing others' private information, such as a physical or electronic address, without explicit permission. This includes any sort of "outing" of any aspect of someone's identity without their consent. + * Publishing private screenshots or quotes of interactions in the context of this project without all quoted users' *explicit* consent. + * Publishing of private communication that doesn't have to do with reporting harrassment. + * Any of the above even when [presented as "ironic" or "joking"](https://en.wikipedia.org/wiki/Hipster_racism). + * Any attempt to present "reverse-ism" versions of the above as violations. Examples of reverse-isms are "reverse racism", "reverse sexism", "heterophobia", and "cisphobia". + * Unsolicited explanations under the assumption that someone doesn't already know it. Ask before you teach! Don't assume what people's knowledge gaps are. + * [Feigning or exaggerating surprise](https://www.recurse.com/manual#no-feigned-surprise) when someone admits to not knowing something. + * "[Well-actuallies](https://www.recurse.com/manual#no-well-actuallys)" + * Other conduct which could reasonably be considered inappropriate in a professional or community setting. + +## Scope + +This Code of Conduct applies both within spaces involving this project and in other spaces involving community members. This includes the repository, its Pull Requests and Issue tracker, its Twitter community, private email communications in the context of the project, and any events where members of the project are participating, as well as adjacent communities and venues affecting the project's members. + +Depending on the violation, the maintainers may decide that violations of this code of conduct that have happened outside of the scope of the community may deem an individual unwelcome, and take appropriate action to maintain the comfort and safety of its members. + +### Other Community Standards + +As a project on GitHub, this project is additionally covered by the [GitHub Community Guidelines](https://help.github.com/articles/github-community-guidelines/). + +Additionally, as a project hosted on npm, is is covered by [npm, Inc's Code of Conduct](https://www.npmjs.com/policies/conduct). + +Enforcement of those guidelines after violations overlapping with the above are the responsibility of the entities, and enforcement may happen in any or all of the services/communities. + +## Maintainer Enforcement Process + +Once the maintainers get involved, they will follow a documented series of steps and do their best to preserve the well-being of project members. This section covers actual concrete steps. + +### Contacting Maintainers + +You may get in touch with the maintainer team through any of the following methods: + + * Through email: + * [kzm@zkat.tech](mailto:kzm@zkat.tech) (Kat Marchán) + + * Through Twitter: + * [@maybekatz](https://twitter.com/maybekatz) (Kat Marchán) + +### Further Enforcement + +If you've already followed the [initial enforcement steps](#enforcement), these are the steps maintainers will take for further enforcement, as needed: + + 1. Repeat the request to stop. + 2. If the person doubles down, they will have offending messages removed or edited by a maintainers given an official warning. The PR or Issue may be locked. + 3. If the behavior continues or is repeated later, the person will be blocked from participating for 24 hours. + 4. If the behavior continues or is repeated after the temporary block, a long-term (6-12mo) ban will be used. + +On top of this, maintainers may remove any offending messages, images, contributions, etc, as they deem necessary. + +Maintainers reserve full rights to skip any of these steps, at their discretion, if the violation is considered to be a serious and/or immediate threat to the health and well-being of members of the community. These include any threats, serious physical or verbal attacks, and other such behavior that would be completely unacceptable in any social setting that puts our members at risk. + +Members expelled from events or venues with any sort of paid attendance will not be refunded. + +### Who Watches the Watchers? + +Maintainers and other leaders who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. These may include anything from removal from the maintainer team to a permanent ban from the community. + +Additionally, as a project hosted on both GitHub and npm, [their own Codes of Conducts may be applied against maintainers of this project](#other-community-standards), externally of this project's procedures. + +### Enforcement Examples + +#### The Best Case + +The vast majority of situations work out like this. This interaction is common, and generally positive. + +> Alex: "Yeah I used X and it was really crazy!" + +> Patt (not a maintainer): "Hey, could you not use that word? What about 'ridiculous' instead?" + +> Alex: "oh sorry, sure." -> edits old comment to say "it was really confusing!" + +#### The Maintainer Case + +Sometimes, though, you need to get maintainers involved. Maintainers will do their best to resolve conflicts, but people who were harmed by something **will take priority**. + +> Patt: "Honestly, sometimes I just really hate using $library and anyone who uses it probably sucks at their job." + +> Alex: "Whoa there, could you dial it back a bit? There's a CoC thing about attacking folks' tech use like that." + +> Patt: "I'm not attacking anyone, what's your problem?" + +> Alex: "@maintainers hey uh. Can someone look at this issue? Patt is getting a bit aggro. I tried to nudge them about it, but nope." + +> KeeperOfCommitBits: (on issue) "Hey Patt, maintainer here. Could you tone it down? This sort of attack is really not okay in this space." + +> Patt: "Leave me alone I haven't said anything bad wtf is wrong with you." + +> KeeperOfCommitBits: (deletes user's comment), "@patt I mean it. Please refer to the CoC over at (URL to this CoC) if you have questions, but you can consider this an actual warning. I'd appreciate it if you reworded your messages in this thread, since they made folks there uncomfortable. Let's try and be kind, yeah?" + +> Patt: "@keeperofbits Okay sorry. I'm just frustrated and I'm kinda burnt out and I guess I got carried away. I'll DM Alex a note apologizing and edit my messages. Sorry for the trouble." + +> KeeperOfCommitBits: "@patt Thanks for that. I hear you on the stress. Burnout sucks :/. Have a good one!" + +#### The Nope Case + +> PepeTheFrog🐸: "Hi, I am a literal actual nazi and I think white supremacists are quite fashionable." + +> Patt: "NOOOOPE. OH NOPE NOPE." + +> Alex: "JFC NO. NOPE. @keeperofbits NOPE NOPE LOOK HERE" + +> KeeperOfCommitBits: "👀 Nope. NOPE NOPE NOPE. 🔥" + +> PepeTheFrog🐸 has been banned from all organization or user repositories belonging to KeeperOfCommitBits. + +## Attribution + +This Code of Conduct was generated using [WeAllJS Code of Conduct Generator](https://npm.im/weallbehave), which is based on the [WeAllJS Code of +Conduct](https://wealljs.org/code-of-conduct), which is itself based on +[Contributor Covenant](http://contributor-covenant.org), version 1.4, available +at +[http://contributor-covenant.org/version/1/4](http://contributor-covenant.org/version/1/4), +and the LGBTQ in Technology Slack [Code of +Conduct](http://lgbtq.technology/coc.html). diff --git a/node_modules/libnpmsearch/CONTRIBUTING.md b/node_modules/libnpmsearch/CONTRIBUTING.md new file mode 100644 index 0000000000000..1a61601a16dba --- /dev/null +++ b/node_modules/libnpmsearch/CONTRIBUTING.md @@ -0,0 +1,256 @@ +# Contributing + +## How do I... + +* [Use This Guide](#introduction)? +* Ask or Say Something? 🤔🐛😱 + * [Request Support](#request-support) + * [Report an Error or Bug](#report-an-error-or-bug) + * [Request a Feature](#request-a-feature) +* Make Something? 🤓👩🏽‍💻📜🍳 + * [Project Setup](#project-setup) + * [Contribute Documentation](#contribute-documentation) + * [Contribute Code](#contribute-code) +* Manage Something ✅🙆🏼💃👔 + * [Provide Support on Issues](#provide-support-on-issues) + * [Label Issues](#label-issues) + * [Clean Up Issues and PRs](#clean-up-issues-and-prs) + * [Review Pull Requests](#review-pull-requests) + * [Merge Pull Requests](#merge-pull-requests) + * [Tag a Release](#tag-a-release) + * [Join the Project Team](#join-the-project-team) +* Add a Guide Like This One [To My Project](#attribution)? 🤖😻👻 + +## Introduction + +Thank you so much for your interest in contributing!. All types of contributions are encouraged and valued. See the [table of contents](#toc) for different ways to help and details about how this project handles them!📝 + +Please make sure to read the relevant section before making your contribution! It will make it a lot easier for us maintainers to make the most of it and smooth out the experience for all involved. 💚 + +The [Project Team](#join-the-project-team) looks forward to your contributions. 🙌🏾✨ + +## Request Support + +If you have a question about this project, how to use it, or just need clarification about something: + +* Open an Issue at https://github.com/npm/libnpmsearch/issues +* Provide as much context as you can about what you're running into. +* Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant. If not, please be ready to provide that information if maintainers ask for it. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* Someone will try to have a response soon. +* If you or the maintainers don't respond to an issue for 30 days, the [issue will be closed](#clean-up-issues-and-prs). If you want to come back to it, reply (once, please), and we'll reopen the existing issue. Please avoid filing new issues as extensions of one you already made. + +## Report an Error or Bug + +If you run into an error or bug with the project: + +* Open an Issue at https://github.com/npm/libnpmsearch/issues +* Include *reproduction steps* that someone else can follow to recreate the bug or error on their own. +* Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant. If not, please be ready to provide that information if maintainers ask for it. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* A team member will try to reproduce the issue with your provided steps. If there are no repro steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs-repro`. Bugs with the `needs-repro` tag will not be addressed until they are reproduced. +* If the team is able to reproduce the issue, it will be marked `needs-fix`, as well as possibly other tags (such as `critical`), and the issue will be left to be [implemented by someone](#contribute-code). +* If you or the maintainers don't respond to an issue for 30 days, the [issue will be closed](#clean-up-issues-and-prs). If you want to come back to it, reply (once, please), and we'll reopen the existing issue. Please avoid filing new issues as extensions of one you already made. +* `critical` issues may be left open, depending on perceived immediacy and severity, even past the 30 day deadline. + +## Request a Feature + +If the project doesn't do something you need or want it to do: + +* Open an Issue at https://github.com/npm/libnpmsearch/issues +* Provide as much context as you can about what you're running into. +* Please try and be clear about why existing features and alternatives would not work for you. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* The project team will evaluate the feature request, possibly asking you more questions to understand its purpose and any relevant requirements. If the issue is closed, the team will convey their reasoning and suggest an alternative path forward. +* If the feature request is accepted, it will be marked for implementation with `feature-accepted`, which can then be done by either by a core team member or by anyone in the community who wants to [contribute code](#contribute-code). + +Note: The team is unlikely to be able to accept every single feature request that is filed. Please understand if they need to say no. + +## Project Setup + +So you wanna contribute some code! That's great! This project uses GitHub Pull Requests to manage contributions, so [read up on how to fork a GitHub project and file a PR](https://guides.github.com/activities/forking) if you've never done it before. + +If this seems like a lot or you aren't able to do all this setup, you might also be able to [edit the files directly](https://help.github.com/articles/editing-files-in-another-user-s-repository/) without having to do any of this setup. Yes, [even code](#contribute-code). + +If you want to go the usual route and run the project locally, though: + +* [Install Node.js](https://nodejs.org/en/download/) +* [Fork the project](https://guides.github.com/activities/forking/#fork) + +Then in your terminal: +* `cd path/to/your/clone` +* `npm install` +* `npm test` + +And you should be ready to go! + +## Contribute Documentation + +Documentation is a super important, critical part of this project. Docs are how we keep track of what we're doing, how, and why. It's how we stay on the same page about our policies. And it's how we tell others everything they need in order to be able to use this project -- or contribute to it. So thank you in advance. + +Documentation contributions of any size are welcome! Feel free to file a PR even if you're just rewording a sentence to be more clear, or fixing a spelling mistake! + +To contribute documentation: + +* [Set up the project](#project-setup). +* Edit or add any relevant documentation. +* Make sure your changes are formatted correctly and consistently with the rest of the documentation. +* Re-read what you wrote, and run a spellchecker on it to make sure you didn't miss anything. +* In your commit message(s), begin the first line with `docs: `. For example: `docs: Adding a doc contrib section to CONTRIBUTING.md`. +* Write clear, concise commit message(s) using [conventional-changelog format](https://github.com/conventional-changelog/conventional-changelog-angular/blob/master/convention.md). Documentation commits should use `docs(): `. +* Go to https://github.com/npm/libnpmsearch/pulls and open a new pull request with your changes. +* If your PR is connected to an open issue, add a line in your PR's description that says `Fixes: #123`, where `#123` is the number of the issue you're fixing. + +Once you've filed the PR: + +* One or more maintainers will use GitHub's review feature to review your PR. +* If the maintainer asks for any changes, edit your changes, push, and ask for another review. +* If the maintainer decides to pass on your PR, they will thank you for the contribution and explain why they won't be accepting the changes. That's ok! We still really appreciate you taking the time to do it, and we don't take that lightly. 💚 +* If your PR gets accepted, it will be marked as such, and merged into the `latest` branch soon after. Your contribution will be distributed to the masses next time the maintainers [tag a release](#tag-a-release) + +## Contribute Code + +We like code commits a lot! They're super handy, and they keep the project going and doing the work it needs to do to be useful to others. + +Code contributions of just about any size are acceptable! + +The main difference between code contributions and documentation contributions is that contributing code requires inclusion of relevant tests for the code being added or changed. Contributions without accompanying tests will be held off until a test is added, unless the maintainers consider the specific tests to be either impossible, or way too much of a burden for such a contribution. + +To contribute code: + +* [Set up the project](#project-setup). +* Make any necessary changes to the source code. +* Include any [additional documentation](#contribute-documentation) the changes might need. +* Write tests that verify that your contribution works as expected. +* Write clear, concise commit message(s) using [conventional-changelog format](https://github.com/conventional-changelog/conventional-changelog-angular/blob/master/convention.md). +* Dependency updates, additions, or removals must be in individual commits, and the message must use the format: `(deps): PKG@VERSION`, where `` is any of the usual `conventional-changelog` prefixes, at your discretion. +* Go to https://github.com/npm/libnpmsearch/pulls and open a new pull request with your changes. +* If your PR is connected to an open issue, add a line in your PR's description that says `Fixes: #123`, where `#123` is the number of the issue you're fixing. + +Once you've filed the PR: + +* Barring special circumstances, maintainers will not review PRs until all checks pass (Travis, AppVeyor, etc). +* One or more maintainers will use GitHub's review feature to review your PR. +* If the maintainer asks for any changes, edit your changes, push, and ask for another review. Additional tags (such as `needs-tests`) will be added depending on the review. +* If the maintainer decides to pass on your PR, they will thank you for the contribution and explain why they won't be accepting the changes. That's ok! We still really appreciate you taking the time to do it, and we don't take that lightly. 💚 +* If your PR gets accepted, it will be marked as such, and merged into the `latest` branch soon after. Your contribution will be distributed to the masses next time the maintainers [tag a release](#tag-a-release) + +## Provide Support on Issues + +[Needs Collaborator](#join-the-project-team): none + +Helping out other users with their questions is a really awesome way of contributing to any community. It's not uncommon for most of the issues on an open source projects being support-related questions by users trying to understand something they ran into, or find their way around a known bug. + +Sometimes, the `support` label will be added to things that turn out to actually be other things, like bugs or feature requests. In that case, suss out the details with the person who filed the original issue, add a comment explaining what the bug is, and change the label from `support` to `bug` or `feature`. If you can't do this yourself, @mention a maintainer so they can do it. + +In order to help other folks out with their questions: + +* Go to the issue tracker and [filter open issues by the `support` label](https://github.com/npm/libnpmsearch/issues?q=is%3Aopen+is%3Aissue+label%3Asupport). +* Read through the list until you find something that you're familiar enough with to give an answer to. +* Respond to the issue with whatever details are needed to clarify the question, or get more details about what's going on. +* Once the discussion wraps up and things are clarified, either close the issue, or ask the original issue filer (or a maintainer) to close it for you. + +Some notes on picking up support issues: + +* Avoid responding to issues you don't know you can answer accurately. +* As much as possible, try to refer to past issues with accepted answers. Link to them from your replies with the `#123` format. +* Be kind and patient with users -- often, folks who have run into confusing things might be upset or impatient. This is ok. Try to understand where they're coming from, and if you're too uncomfortable with the tone, feel free to stay away or withdraw from the issue. (note: if the user is outright hostile or is violating the CoC, [refer to the Code of Conduct](CODE_OF_CONDUCT.md) to resolve the conflict). + +## Label Issues + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +One of the most important tasks in handling issues is labeling them usefully and accurately. All other tasks involving issues ultimately rely on the issue being classified in such a way that relevant parties looking to do their own tasks can find them quickly and easily. + +In order to label issues, [open up the list of unlabeled issues](https://github.com/npm/libnpmsearch/issues?q=is%3Aopen+is%3Aissue+no%3Alabel) and, **from newest to oldest**, read through each one and apply issue labels according to the table below. If you're unsure about what label to apply, skip the issue and try the next one: don't feel obligated to label each and every issue yourself! + +Label | Apply When | Notes +--- | --- | --- +`bug` | Cases where the code (or documentation) is behaving in a way it wasn't intended to. | If something is happening that surprises the *user* but does not go against the way the code is designed, it should use the `enhancement` label. +`critical` | Added to `bug` issues if the problem described makes the code completely unusable in a common situation. | +`documentation` | Added to issues or pull requests that affect any of the documentation for the project. | Can be combined with other labels, such as `bug` or `enhancement`. +`duplicate` | Added to issues or PRs that refer to the exact same issue as another one that's been previously labeled. | Duplicate issues should be marked and closed right away, with a message referencing the issue it's a duplicate of (with `#123`) +`enhancement` | Added to [feature requests](#request-a-feature), PRs, or documentation issues that are purely additive: the code or docs currently work as expected, but a change is being requested or suggested. | +`help wanted` | Applied by [Committers](#join-the-project-team) to issues and PRs that they would like to get outside help for. Generally, this means it's lower priority for the maintainer team to itself implement, but that the community is encouraged to pick up if they so desire | Never applied on first-pass labeling. +`in-progress` | Applied by [Committers](#join-the-project-team) to PRs that are pending some work before they're ready for review. | The original PR submitter should @mention the team member that applied the label once the PR is complete. +`performance` | This issue or PR is directly related to improving performance. | +`refactor` | Added to issues or PRs that deal with cleaning up or modifying the project for the betterment of it. | +`starter` | Applied by [Committers](#join-the-project-team) to issues that they consider good introductions to the project for people who have not contributed before. These are not necessarily "easy", but rather focused around how much context is necessary in order to understand what needs to be done for this project in particular. | Existing project members are expected to stay away from these unless they increase in priority. +`support` | This issue is either asking a question about how to use the project, clarifying the reason for unexpected behavior, or possibly reporting a `bug` but does not have enough detail yet to determine whether it would count as such. | The label should be switched to `bug` if reliable reproduction steps are provided. Issues primarily with unintended configurations of a user's environment are not considered bugs, even if they cause crashes. +`tests` | This issue or PR either requests or adds primarily tests to the project. | If a PR is pending tests, that will be handled through the [PR review process](#review-pull-requests) +`wontfix` | Labelers may apply this label to issues that clearly have nothing at all to do with the project or are otherwise entirely outside of its scope/sphere of influence. [Committers](#join-the-project-team) may apply this label and close an issue or PR if they decide to pass on an otherwise relevant issue. | The issue or PR should be closed as soon as the label is applied, and a clear explanation provided of why the label was used. Contributors are free to contest the labeling, but the decision ultimately falls on committers as to whether to accept something or not. + +## Clean Up Issues and PRs + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +Issues and PRs can go stale after a while. Maybe they're abandoned. Maybe the team will just plain not have time to address them any time soon. + +In these cases, they should be closed until they're brought up again or the interaction starts over. + +To clean up issues and PRs: + +* Search the issue tracker for issues or PRs, and add the term `updated:<=YYYY-MM-DD`, where the date is 30 days before today. +* Go through each issue *from oldest to newest*, and close them if **all of the following are true**: + * not opened by a maintainer + * not marked as `critical` + * not marked as `starter` or `help wanted` (these might stick around for a while, in general, as they're intended to be available) + * no explicit messages in the comments asking for it to be left open + * does not belong to a milestone +* Leave a message when closing saying "Cleaning up stale issue. Please reopen or ping us if and when you're ready to resume this. See https://github.com/npm/libnpmsearch/blob/latest/CONTRIBUTING.md#clean-up-issues-and-prs for more details." + +## Review Pull Requests + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +While anyone can comment on a PR, add feedback, etc, PRs are only *approved* by team members with Issue Tracker or higher permissions. + +PR reviews use [GitHub's own review feature](https://help.github.com/articles/about-pull-request-reviews/), which manages comments, approval, and review iteration. + +Some notes: + +* You may ask for minor changes ("nitpicks"), but consider whether they are really blockers to merging: try to err on the side of "approve, with comments". +* *ALL PULL REQUESTS* should be covered by a test: either by a previously-failing test, an existing test that covers the entire functionality of the submitted code, or new tests to verify any new/changed behavior. All tests must also pass and follow established conventions. Test coverage should not drop, unless the specific case is considered reasonable by maintainers. +* Please make sure you're familiar with the code or documentation being updated, unless it's a minor change (spellchecking, minor formatting, etc). You may @mention another project member who you think is better suited for the review, but still provide a non-approving review of your own. +* Be extra kind: people who submit code/doc contributions are putting themselves in a pretty vulnerable position, and have put time and care into what they've done (even if that's not obvious to you!) -- always respond with respect, be understanding, but don't feel like you need to sacrifice your standards for their sake, either. Just don't be a jerk about it? + +## Merge Pull Requests + +[Needs Collaborator](#join-the-project-team): Committer + +TBD - need to hash out a bit more of this process. + +## Tag A Release + +[Needs Collaborator](#join-the-project-team): Committer + +TBD - need to hash out a bit more of this process. The most important bit here is probably that all tests must pass, and tags must use [semver](https://semver.org). + +## Join the Project Team + +### Ways to Join + +There are many ways to contribute! Most of them don't require any official status unless otherwise noted. That said, there's a couple of positions that grant special repository abilities, and this section describes how they're granted and what they do. + +All of the below positions are granted based on the project team's needs, as well as their consensus opinion about whether they would like to work with the person and think that they would fit well into that position. The process is relatively informal, and it's likely that people who express interest in participating can just be granted the permissions they'd like. + +You can spot a collaborator on the repo by looking for the `[Collaborator]` or `[Owner]` tags next to their names. + +Permission | Description +--- | --- +Issue Tracker | Granted to contributors who express a strong interest in spending time on the project's issue tracker. These tasks are mainly [labeling issues](#label-issues), [cleaning up old ones](#clean-up-issues-and-prs), and [reviewing pull requests](#review-pull-requests), as well as all the usual things non-team-member contributors can do. Issue handlers should not merge pull requests, tag releases, or directly commit code themselves: that should still be done through the usual pull request process. Becoming an Issue Handler means the project team trusts you to understand enough of the team's process and context to implement it on the issue tracker. +Committer | Granted to contributors who want to handle the actual pull request merges, tagging new versions, etc. Committers should have a good level of familiarity with the codebase, and enough context to understand the implications of various changes, as well as a good sense of the will and expectations of the project team. +Admin/Owner | Granted to people ultimately responsible for the project, its community, etc. + +## Attribution + +This guide was generated using the WeAllJS `CONTRIBUTING.md` generator. [Make your own](https://npm.im/weallcontribute)! diff --git a/node_modules/libnpmsearch/LICENSE b/node_modules/libnpmsearch/LICENSE new file mode 100644 index 0000000000000..209e4477f39c1 --- /dev/null +++ b/node_modules/libnpmsearch/LICENSE @@ -0,0 +1,13 @@ +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/libnpmsearch/PULL_REQUEST_TEMPLATE b/node_modules/libnpmsearch/PULL_REQUEST_TEMPLATE new file mode 100644 index 0000000000000..9471c6d325f7e --- /dev/null +++ b/node_modules/libnpmsearch/PULL_REQUEST_TEMPLATE @@ -0,0 +1,7 @@ + diff --git a/node_modules/libnpmsearch/README.md b/node_modules/libnpmsearch/README.md new file mode 100644 index 0000000000000..a83988cc2867d --- /dev/null +++ b/node_modules/libnpmsearch/README.md @@ -0,0 +1,169 @@ +# libnpmsearch [![npm version](https://img.shields.io/npm/v/libnpmsearch.svg)](https://npm.im/libnpmsearch) [![license](https://img.shields.io/npm/l/libnpmsearch.svg)](https://npm.im/libnpmsearch) [![Travis](https://img.shields.io/travis/npm/libnpmsearch.svg)](https://travis-ci.org/npm/libnpmsearch) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/libnpmsearch?svg=true)](https://ci.appveyor.com/project/zkat/libnpmsearch) [![Coverage Status](https://coveralls.io/repos/github/npm/libnpmsearch/badge.svg?branch=latest)](https://coveralls.io/github/npm/libnpmsearch?branch=latest) + +[`libnpmsearch`](https://github.com/npm/libnpmsearch) is a Node.js library for +programmatically accessing the npm search endpoint. It does **not** support +legacy search through `/-/all`. + +## Example + +```js +const search = require('libnpmsearch') + +console.log(await search('libnpm')) +=> +[ + { + name: 'libnpm', + description: 'programmatic npm API', + ...etc + }, + { + name: 'libnpmsearch', + description: 'Programmatic API for searching in npm and compatible registries', + ...etc + }, + ...more +] +``` + +## Install + +`$ npm install libnpmsearch` + +## Table of Contents + +* [Example](#example) +* [Install](#install) +* [API](#api) + * [search opts](#opts) + * [`search()`](#search) + * [`search.stream()`](#search-stream) + +### API + +#### `opts` for `libnpmsearch` commands + +The following opts are used directly by `libnpmsearch` itself: + +* `opts.limit` - Number of results to limit the query to. Default: 20 +* `opts.from` - Offset number for results. Used with `opts.limit` for pagination. Default: 0 +* `opts.detailed` - If true, returns an object with `package`, `score`, and `searchScore` fields, with `package` being what would usually be returned, and the other two containing details about how that package scored. Useful for UIs. Default: false +* `opts.sortBy` - Used as a shorthand to set `opts.quality`, `opts.maintenance`, and `opts.popularity` with values that prioritize each one. Should be one of `'optimal'`, `'quality'`, `'maintenance'`, or `'popularity'`. Default: `'optimal'` +* `opts.maintenance` - Decimal number between `0` and `1` that defines the weight of `maintenance` metrics when scoring and sorting packages. Default: `0.65` (same as `opts.sortBy: 'optimal'`) +* `opts.popularity` - Decimal number between `0` and `1` that defines the weight of `popularity` metrics when scoring and sorting packages. Default: `0.98` (same as `opts.sortBy: 'optimal'`) +* `opts.quality` - Decimal number between `0` and `1` that defines the weight of `quality` metrics when scoring and sorting packages. Default: `0.5` (same as `opts.sortBy: 'optimal'`) + +`libnpmsearch` uses [`npm-registry-fetch`](https://npm.im/npm-registry-fetch). +Most options are passed through directly to that library, so please refer to +[its own `opts` +documentation](https://www.npmjs.com/package/npm-registry-fetch#fetch-options) +for options that can be passed in. + +A couple of options of note for those in a hurry: + +* `opts.token` - can be passed in and will be used as the authentication token for the registry. For other ways to pass in auth details, see the n-r-f docs. +* `opts.Promise` - If you pass this in, the Promises returned by `libnpmsearch` commands will use this Promise class instead. For example: `{Promise: require('bluebird')}` + +#### `> search(query, [opts]) -> Promise` + +`query` must be either a String or an Array of search terms. + +If `opts.limit` is provided, it will be sent to the API to constrain the number +of returned results. You may receive more, or fewer results, at the endpoint's +discretion. + +The returned Promise resolved to an Array of search results with the following +format: + +```js +{ + name: String, + version: SemverString, + description: String || null, + maintainers: [ + { + username: String, + email: String + }, + ...etc + ] || null, + keywords: [String] || null, + date: Date || null +} +``` + +If `opts.limit` is provided, it will be sent to the API to constrain the number +of returned results. You may receive more, or fewer results, at the endpoint's +discretion. + +For streamed results, see [`search.stream`](#search-stream). + +##### Example + +```javascript +await search('libnpm') +=> +[ + { + name: 'libnpm', + description: 'programmatic npm API', + ...etc + }, + { + name: 'libnpmsearch', + description: 'Programmatic API for searching in npm and compatible registries', + ...etc + }, + ...more +] +``` + +#### `> search.stream(query, [opts]) -> Stream` + +`query` must be either a String or an Array of search terms. + +If `opts.limit` is provided, it will be sent to the API to constrain the number +of returned results. You may receive more, or fewer results, at the endpoint's +discretion. + +The returned Stream emits one entry per search result, with each entry having +the following format: + +```js +{ + name: String, + version: SemverString, + description: String || null, + maintainers: [ + { + username: String, + email: String + }, + ...etc + ] || null, + keywords: [String] || null, + date: Date || null +} +``` + +For getting results in one chunk, see [`search`](#search-stream). + +##### Example + +```javascript +search.stream('libnpm').on('data', console.log) +=> +// entry 1 +{ + name: 'libnpm', + description: 'programmatic npm API', + ...etc +} +// entry 2 +{ + name: 'libnpmsearch', + description: 'Programmatic API for searching in npm and compatible registries', + ...etc +} +// etc +``` diff --git a/node_modules/libnpmsearch/appveyor.yml b/node_modules/libnpmsearch/appveyor.yml new file mode 100644 index 0000000000000..9cc64c58e02f9 --- /dev/null +++ b/node_modules/libnpmsearch/appveyor.yml @@ -0,0 +1,22 @@ +environment: + matrix: + - nodejs_version: "10" + - nodejs_version: "9" + - nodejs_version: "8" + - nodejs_version: "6" + +platform: + - x64 + +install: + - ps: Install-Product node $env:nodejs_version $env:platform + - npm config set spin false + - npm install + +test_script: + - npm test + +matrix: + fast_finish: true + +build: off diff --git a/node_modules/libnpmsearch/index.js b/node_modules/libnpmsearch/index.js new file mode 100644 index 0000000000000..995549aeee03d --- /dev/null +++ b/node_modules/libnpmsearch/index.js @@ -0,0 +1,80 @@ +'use strict' + +const figgyPudding = require('figgy-pudding') +const getStream = require('get-stream') +const npmFetch = require('npm-registry-fetch') + +const SearchOpts = figgyPudding({ + detailed: { default: false }, + limit: { default: 20 }, + from: { default: 0 }, + quality: { default: 0.65 }, + popularity: { default: 0.98 }, + maintenance: { default: 0.5 }, + sortBy: {} +}) + +module.exports = search +function search (query, opts) { + return getStream.array(search.stream(query, opts)) +} +search.stream = searchStream +function searchStream (query, opts) { + opts = SearchOpts(opts) + switch (opts.sortBy) { + case 'optimal': { + opts = opts.concat({ + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + break + } + case 'quality': { + opts = opts.concat({ + quality: 1, + popularity: 0, + maintenance: 0 + }) + break + } + case 'popularity': { + opts = opts.concat({ + quality: 0, + popularity: 1, + maintenance: 0 + }) + break + } + case 'maintenance': { + opts = opts.concat({ + quality: 0, + popularity: 0, + maintenance: 1 + }) + break + } + } + return npmFetch.json.stream('/-/v1/search', 'objects.*', + opts.concat({ + query: { + text: Array.isArray(query) ? query.join(' ') : query, + size: opts.limit, + from: opts.from, + quality: opts.quality, + popularity: opts.popularity, + maintenance: opts.maintenance + }, + mapJson (obj) { + if (obj.package.date) { + obj.package.date = new Date(obj.package.date) + } + if (opts.detailed) { + return obj + } else { + return obj.package + } + } + }) + ) +} diff --git a/node_modules/libnpmsearch/package.json b/node_modules/libnpmsearch/package.json new file mode 100644 index 0000000000000..3ad48cb72f593 --- /dev/null +++ b/node_modules/libnpmsearch/package.json @@ -0,0 +1,72 @@ +{ + "_from": "libnpmsearch@2.0.2", + "_id": "libnpmsearch@2.0.2", + "_inBundle": false, + "_integrity": "sha512-VTBbV55Q6fRzTdzziYCr64+f8AopQ1YZ+BdPOv16UegIEaE8C0Kch01wo4s3kRTFV64P121WZJwgmBwrq68zYg==", + "_location": "/libnpmsearch", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "libnpmsearch@2.0.2", + "name": "libnpmsearch", + "escapedName": "libnpmsearch", + "rawSpec": "2.0.2", + "saveSpec": null, + "fetchSpec": "2.0.2" + }, + "_requiredBy": [ + "#USER", + "/" + ], + "_resolved": "https://registry.npmjs.org/libnpmsearch/-/libnpmsearch-2.0.2.tgz", + "_shasum": "9a4f059102d38e3dd44085bdbfe5095f2a5044cf", + "_spec": "libnpmsearch@2.0.2", + "_where": "/Users/isaacs/dev/npm/cli", + "author": { + "name": "Kat Marchán", + "email": "kzm@zkat.tech" + }, + "bugs": { + "url": "https://github.com/npm/libnpmsearch/issues" + }, + "bundleDependencies": false, + "dependencies": { + "figgy-pudding": "^3.5.1", + "get-stream": "^4.0.0", + "npm-registry-fetch": "^4.0.0" + }, + "deprecated": false, + "description": "Programmatic API for searching in npm and compatible registries.", + "devDependencies": { + "nock": "^9.6.1", + "standard": "^12.0.0", + "standard-version": "*", + "tap": "^12.0.1", + "weallbehave": "*", + "weallcontribute": "*" + }, + "homepage": "https://npmjs.com/package/libnpmsearch", + "keywords": [ + "npm", + "search", + "api", + "libnpm" + ], + "license": "ISC", + "name": "libnpmsearch", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/libnpmsearch.git" + }, + "scripts": { + "postrelease": "npm publish && git push --follow-tags", + "prerelease": "npm t", + "pretest": "standard", + "release": "standard-version -s", + "test": "tap -J --100 test/*.js", + "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", + "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" + }, + "version": "2.0.2" +} diff --git a/node_modules/libnpmsearch/test/index.js b/node_modules/libnpmsearch/test/index.js new file mode 100644 index 0000000000000..bec5c70e46424 --- /dev/null +++ b/node_modules/libnpmsearch/test/index.js @@ -0,0 +1,300 @@ +'use strict' + +const figgyPudding = require('figgy-pudding') +const getStream = require('get-stream') +const qs = require('querystring') +const test = require('tap').test +const tnock = require('./util/tnock.js') + +const OPTS = figgyPudding({ registry: {} })({ + registry: 'https://mock.reg/' +}) + +const REG = OPTS.registry +const search = require('../index.js') + +test('basic test', t => { + const query = qs.stringify({ + text: 'oo', + size: 20, + from: 0, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + tnock(t, REG).get(`/-/v1/search?${query}`).once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'foo', version: '2.0.0' } } + ] + }) + return search('oo', OPTS).then(results => { + t.similar(results, [{ + name: 'cool', + version: '1.0.0' + }, { + name: 'foo', + version: '2.0.0' + }], 'got back an array of search results') + }) +}) + +test('search.stream', t => { + const query = qs.stringify({ + text: 'oo', + size: 20, + from: 0, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + tnock(t, REG).get(`/-/v1/search?${query}`).once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0', date: new Date().toISOString() } }, + { package: { name: 'foo', version: '2.0.0' } } + ] + }) + return getStream.array( + search.stream('oo', OPTS) + ).then(results => { + t.similar(results, [{ + name: 'cool', + version: '1.0.0' + }, { + name: 'foo', + version: '2.0.0' + }], 'has a stream-based API function with identical results') + }) +}) + +test('accepts a limit option', t => { + const query = qs.stringify({ + text: 'oo', + size: 3, + from: 0, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + tnock(t, REG).get(`/-/v1/search?${query}`).once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } } + ] + }) + return search('oo', OPTS.concat({ limit: 3 })).then(results => { + t.equal(results.length, 4, 'returns more results if endpoint does so') + }) +}) + +test('accepts a from option', t => { + const query = qs.stringify({ + text: 'oo', + size: 20, + from: 1, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + tnock(t, REG).get(`/-/v1/search?${query}`).once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.1.0' } }, + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } } + ] + }) + return search('oo', OPTS.concat({ from: 1 })).then(results => { + t.equal(results.length, 4, 'returns more results if endpoint does so') + }) +}) + +test('accepts quality/mainenance/popularity options', t => { + const query = qs.stringify({ + text: 'oo', + size: 20, + from: 0, + quality: 1, + popularity: 2, + maintenance: 3 + }) + tnock(t, REG).get(`/-/v1/search?${query}`).once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } } + ] + }) + return search('oo', OPTS.concat({ + quality: 1, + popularity: 2, + maintenance: 3 + })).then(results => { + t.equal(results.length, 4, 'returns more results if endpoint does so') + }) +}) + +test('sortBy: quality', t => { + const query = qs.stringify({ + text: 'oo', + size: 20, + from: 0, + quality: 1, + popularity: 0, + maintenance: 0 + }) + tnock(t, REG).get(`/-/v1/search?${query}`).once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } } + ] + }) + return search('oo', OPTS.concat({ + sortBy: 'quality' + })).then(results => { + t.equal(results.length, 4, 'returns more results if endpoint does so') + }) +}) + +test('sortBy: popularity', t => { + const query = qs.stringify({ + text: 'oo', + size: 20, + from: 0, + quality: 0, + popularity: 1, + maintenance: 0 + }) + tnock(t, REG).get(`/-/v1/search?${query}`).once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } } + ] + }) + return search('oo', OPTS.concat({ + sortBy: 'popularity' + })).then(results => { + t.equal(results.length, 4, 'returns more results if endpoint does so') + }) +}) + +test('sortBy: maintenance', t => { + const query = qs.stringify({ + text: 'oo', + size: 20, + from: 0, + quality: 0, + popularity: 0, + maintenance: 1 + }) + tnock(t, REG).get(`/-/v1/search?${query}`).once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } } + ] + }) + return search('oo', OPTS.concat({ + sortBy: 'maintenance' + })).then(results => { + t.equal(results.length, 4, 'returns more results if endpoint does so') + }) +}) + +test('sortBy: optimal', t => { + const query = qs.stringify({ + text: 'oo', + size: 20, + from: 0, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + tnock(t, REG).get(`/-/v1/search?${query}`).once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } } + ] + }) + return search('oo', OPTS.concat({ + sortBy: 'optimal' + })).then(results => { + t.equal(results.length, 4, 'returns more results if endpoint does so') + }) +}) + +test('detailed format', t => { + const query = qs.stringify({ + text: 'oo', + size: 20, + from: 0, + quality: 0, + popularity: 0, + maintenance: 1 + }) + const results = [ + { + package: { name: 'cool', version: '1.0.0' }, + score: { + final: 0.9237841281241451, + detail: { + quality: 0.9270640902288084, + popularity: 0.8484861649808381, + maintenance: 0.9962706951777409 + } + }, + searchScore: 100000.914 + }, + { + package: { name: 'ok', version: '2.0.0' }, + score: { + final: 0.9237841281451, + detail: { + quality: 0.9270602288084, + popularity: 0.8461649808381, + maintenance: 0.9706951777409 + } + }, + searchScore: 1000.91 + } + ] + tnock(t, REG).get(`/-/v1/search?${query}`).once().reply(200, { + objects: results + }) + return search('oo', OPTS.concat({ + sortBy: 'maintenance', + detailed: true + })).then(res => { + t.deepEqual(res, results, 'return full-format results with opts.detailed') + }) +}) + +test('space-separates and URI-encodes multiple search params', t => { + const query = qs.stringify({ + text: 'foo bar:baz quux?=', + size: 1, + from: 0, + quality: 1, + popularity: 2, + maintenance: 3 + }) + tnock(t, REG).get(`/-/v1/search?${query}`).reply(200, { objects: [] }) + return search(['foo', 'bar:baz', 'quux?='], OPTS.concat({ + limit: 1, + quality: 1, + popularity: 2, + maintenance: 3 + })).then( + () => t.ok(true, 'sent parameters correctly urlencoded') + ) +}) diff --git a/node_modules/libnpmsearch/test/util/tnock.js b/node_modules/libnpmsearch/test/util/tnock.js new file mode 100644 index 0000000000000..00b6e160e1019 --- /dev/null +++ b/node_modules/libnpmsearch/test/util/tnock.js @@ -0,0 +1,12 @@ +'use strict' + +const nock = require('nock') + +module.exports = tnock +function tnock (t, host) { + const server = nock(host) + t.tearDown(function () { + server.done() + }) + return server +} diff --git a/node_modules/libnpmteam/.travis.yml b/node_modules/libnpmteam/.travis.yml new file mode 100644 index 0000000000000..db5ea8b018640 --- /dev/null +++ b/node_modules/libnpmteam/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +sudo: false +node_js: + - "10" + - "9" + - "8" + - "6" diff --git a/node_modules/libnpmteam/CHANGELOG.md b/node_modules/libnpmteam/CHANGELOG.md new file mode 100644 index 0000000000000..c5a8630a91164 --- /dev/null +++ b/node_modules/libnpmteam/CHANGELOG.md @@ -0,0 +1,28 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + + +## [1.0.2](https://github.com/npm/libnpmteam/compare/v1.0.1...v1.0.2) (2019-07-16) + + +### Bug Fixes + +* **standard:** standard --fix ([3dc9144](https://github.com/npm/libnpmteam/commit/3dc9144)) + + + + +## [1.0.1](https://github.com/npm/libnpmteam/compare/v1.0.0...v1.0.1) (2018-08-24) + + + + +# 1.0.0 (2018-08-22) + + +### Features + +* **api:** implement team api ([50dd0e1](https://github.com/npm/libnpmteam/commit/50dd0e1)) +* **docs:** add fully-documented readme ([b1370f3](https://github.com/npm/libnpmteam/commit/b1370f3)) +* **test:** test --100 ftw ([9d3bdc3](https://github.com/npm/libnpmteam/commit/9d3bdc3)) diff --git a/node_modules/libnpmteam/CODE_OF_CONDUCT.md b/node_modules/libnpmteam/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000000..aeb72f598dcb4 --- /dev/null +++ b/node_modules/libnpmteam/CODE_OF_CONDUCT.md @@ -0,0 +1,151 @@ +# Code of Conduct + +## When Something Happens + +If you see a Code of Conduct violation, follow these steps: + +1. Let the person know that what they did is not appropriate and ask them to stop and/or edit their message(s) or commits. +2. That person should immediately stop the behavior and correct the issue. +3. If this doesn’t happen, or if you're uncomfortable speaking up, [contact the maintainers](#contacting-maintainers). +4. As soon as available, a maintainer will look into the issue, and take [further action (see below)](#further-enforcement), starting with a warning, then temporary block, then long-term repo or organization ban. + +When reporting, please include any relevant details, links, screenshots, context, or other information that may be used to better understand and resolve the situation. + +**The maintainer team will prioritize the well-being and comfort of the recipients of the violation over the comfort of the violator.** See [some examples below](#enforcement-examples). + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers of this project pledge to making participation in our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, technical preferences, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + + * Using welcoming and inclusive language. + * Being respectful of differing viewpoints and experiences. + * Gracefully accepting constructive feedback. + * Focusing on what is best for the community. + * Showing empathy and kindness towards other community members. + * Encouraging and raising up your peers in the project so you can all bask in hacks and glory. + +Examples of unacceptable behavior by participants include: + + * The use of sexualized language or imagery and unwelcome sexual attention or advances, including when simulated online. The only exception to sexual topics is channels/spaces specifically for topics of sexual identity. + * Casual mention of slavery or indentured servitude and/or false comparisons of one's occupation or situation to slavery. Please consider using or asking about alternate terminology when referring to such metaphors in technology. + * Making light of/making mocking comments about trigger warnings and content warnings. + * Trolling, insulting/derogatory comments, and personal or political attacks. + * Public or private harassment, deliberate intimidation, or threats. + * Publishing others' private information, such as a physical or electronic address, without explicit permission. This includes any sort of "outing" of any aspect of someone's identity without their consent. + * Publishing private screenshots or quotes of interactions in the context of this project without all quoted users' *explicit* consent. + * Publishing of private communication that doesn't have to do with reporting harrassment. + * Any of the above even when [presented as "ironic" or "joking"](https://en.wikipedia.org/wiki/Hipster_racism). + * Any attempt to present "reverse-ism" versions of the above as violations. Examples of reverse-isms are "reverse racism", "reverse sexism", "heterophobia", and "cisphobia". + * Unsolicited explanations under the assumption that someone doesn't already know it. Ask before you teach! Don't assume what people's knowledge gaps are. + * [Feigning or exaggerating surprise](https://www.recurse.com/manual#no-feigned-surprise) when someone admits to not knowing something. + * "[Well-actuallies](https://www.recurse.com/manual#no-well-actuallys)" + * Other conduct which could reasonably be considered inappropriate in a professional or community setting. + +## Scope + +This Code of Conduct applies both within spaces involving this project and in other spaces involving community members. This includes the repository, its Pull Requests and Issue tracker, its Twitter community, private email communications in the context of the project, and any events where members of the project are participating, as well as adjacent communities and venues affecting the project's members. + +Depending on the violation, the maintainers may decide that violations of this code of conduct that have happened outside of the scope of the community may deem an individual unwelcome, and take appropriate action to maintain the comfort and safety of its members. + +### Other Community Standards + +As a project on GitHub, this project is additionally covered by the [GitHub Community Guidelines](https://help.github.com/articles/github-community-guidelines/). + +Additionally, as a project hosted on npm, is is covered by [npm, Inc's Code of Conduct](https://www.npmjs.com/policies/conduct). + +Enforcement of those guidelines after violations overlapping with the above are the responsibility of the entities, and enforcement may happen in any or all of the services/communities. + +## Maintainer Enforcement Process + +Once the maintainers get involved, they will follow a documented series of steps and do their best to preserve the well-being of project members. This section covers actual concrete steps. + +### Contacting Maintainers + +You may get in touch with the maintainer team through any of the following methods: + + * Through email: + * [kzm@zkat.tech](mailto:kzm@zkat.tech) (Kat Marchán) + + * Through Twitter: + * [@maybekatz](https://twitter.com/maybekatz) (Kat Marchán) + +### Further Enforcement + +If you've already followed the [initial enforcement steps](#enforcement), these are the steps maintainers will take for further enforcement, as needed: + + 1. Repeat the request to stop. + 2. If the person doubles down, they will have offending messages removed or edited by a maintainers given an official warning. The PR or Issue may be locked. + 3. If the behavior continues or is repeated later, the person will be blocked from participating for 24 hours. + 4. If the behavior continues or is repeated after the temporary block, a long-term (6-12mo) ban will be used. + +On top of this, maintainers may remove any offending messages, images, contributions, etc, as they deem necessary. + +Maintainers reserve full rights to skip any of these steps, at their discretion, if the violation is considered to be a serious and/or immediate threat to the health and well-being of members of the community. These include any threats, serious physical or verbal attacks, and other such behavior that would be completely unacceptable in any social setting that puts our members at risk. + +Members expelled from events or venues with any sort of paid attendance will not be refunded. + +### Who Watches the Watchers? + +Maintainers and other leaders who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. These may include anything from removal from the maintainer team to a permanent ban from the community. + +Additionally, as a project hosted on both GitHub and npm, [their own Codes of Conducts may be applied against maintainers of this project](#other-community-standards), externally of this project's procedures. + +### Enforcement Examples + +#### The Best Case + +The vast majority of situations work out like this. This interaction is common, and generally positive. + +> Alex: "Yeah I used X and it was really crazy!" + +> Patt (not a maintainer): "Hey, could you not use that word? What about 'ridiculous' instead?" + +> Alex: "oh sorry, sure." -> edits old comment to say "it was really confusing!" + +#### The Maintainer Case + +Sometimes, though, you need to get maintainers involved. Maintainers will do their best to resolve conflicts, but people who were harmed by something **will take priority**. + +> Patt: "Honestly, sometimes I just really hate using $library and anyone who uses it probably sucks at their job." + +> Alex: "Whoa there, could you dial it back a bit? There's a CoC thing about attacking folks' tech use like that." + +> Patt: "I'm not attacking anyone, what's your problem?" + +> Alex: "@maintainers hey uh. Can someone look at this issue? Patt is getting a bit aggro. I tried to nudge them about it, but nope." + +> KeeperOfCommitBits: (on issue) "Hey Patt, maintainer here. Could you tone it down? This sort of attack is really not okay in this space." + +> Patt: "Leave me alone I haven't said anything bad wtf is wrong with you." + +> KeeperOfCommitBits: (deletes user's comment), "@patt I mean it. Please refer to the CoC over at (URL to this CoC) if you have questions, but you can consider this an actual warning. I'd appreciate it if you reworded your messages in this thread, since they made folks there uncomfortable. Let's try and be kind, yeah?" + +> Patt: "@keeperofbits Okay sorry. I'm just frustrated and I'm kinda burnt out and I guess I got carried away. I'll DM Alex a note apologizing and edit my messages. Sorry for the trouble." + +> KeeperOfCommitBits: "@patt Thanks for that. I hear you on the stress. Burnout sucks :/. Have a good one!" + +#### The Nope Case + +> PepeTheFrog🐸: "Hi, I am a literal actual nazi and I think white supremacists are quite fashionable." + +> Patt: "NOOOOPE. OH NOPE NOPE." + +> Alex: "JFC NO. NOPE. @keeperofbits NOPE NOPE LOOK HERE" + +> KeeperOfCommitBits: "👀 Nope. NOPE NOPE NOPE. 🔥" + +> PepeTheFrog🐸 has been banned from all organization or user repositories belonging to KeeperOfCommitBits. + +## Attribution + +This Code of Conduct was generated using [WeAllJS Code of Conduct Generator](https://npm.im/weallbehave), which is based on the [WeAllJS Code of +Conduct](https://wealljs.org/code-of-conduct), which is itself based on +[Contributor Covenant](http://contributor-covenant.org), version 1.4, available +at +[http://contributor-covenant.org/version/1/4](http://contributor-covenant.org/version/1/4), +and the LGBTQ in Technology Slack [Code of +Conduct](http://lgbtq.technology/coc.html). diff --git a/node_modules/libnpmteam/CONTRIBUTING.md b/node_modules/libnpmteam/CONTRIBUTING.md new file mode 100644 index 0000000000000..3fd40076caae8 --- /dev/null +++ b/node_modules/libnpmteam/CONTRIBUTING.md @@ -0,0 +1,256 @@ +# Contributing + +## How do I... + +* [Use This Guide](#introduction)? +* Ask or Say Something? 🤔🐛😱 + * [Request Support](#request-support) + * [Report an Error or Bug](#report-an-error-or-bug) + * [Request a Feature](#request-a-feature) +* Make Something? 🤓👩🏽‍💻📜🍳 + * [Project Setup](#project-setup) + * [Contribute Documentation](#contribute-documentation) + * [Contribute Code](#contribute-code) +* Manage Something ✅🙆🏼💃👔 + * [Provide Support on Issues](#provide-support-on-issues) + * [Label Issues](#label-issues) + * [Clean Up Issues and PRs](#clean-up-issues-and-prs) + * [Review Pull Requests](#review-pull-requests) + * [Merge Pull Requests](#merge-pull-requests) + * [Tag a Release](#tag-a-release) + * [Join the Project Team](#join-the-project-team) +* Add a Guide Like This One [To My Project](#attribution)? 🤖😻👻 + +## Introduction + +Thank you so much for your interest in contributing!. All types of contributions are encouraged and valued. See the [table of contents](#toc) for different ways to help and details about how this project handles them!📝 + +Please make sure to read the relevant section before making your contribution! It will make it a lot easier for us maintainers to make the most of it and smooth out the experience for all involved. 💚 + +The [Project Team](#join-the-project-team) looks forward to your contributions. 🙌🏾✨ + +## Request Support + +If you have a question about this project, how to use it, or just need clarification about something: + +* Open an Issue at https://github.com/npm/libnpmteam/issues +* Provide as much context as you can about what you're running into. +* Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant. If not, please be ready to provide that information if maintainers ask for it. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* Someone will try to have a response soon. +* If you or the maintainers don't respond to an issue for 30 days, the [issue will be closed](#clean-up-issues-and-prs). If you want to come back to it, reply (once, please), and we'll reopen the existing issue. Please avoid filing new issues as extensions of one you already made. + +## Report an Error or Bug + +If you run into an error or bug with the project: + +* Open an Issue at https://github.com/npm/libnpmteam/issues +* Include *reproduction steps* that someone else can follow to recreate the bug or error on their own. +* Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant. If not, please be ready to provide that information if maintainers ask for it. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* A team member will try to reproduce the issue with your provided steps. If there are no repro steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs-repro`. Bugs with the `needs-repro` tag will not be addressed until they are reproduced. +* If the team is able to reproduce the issue, it will be marked `needs-fix`, as well as possibly other tags (such as `critical`), and the issue will be left to be [implemented by someone](#contribute-code). +* If you or the maintainers don't respond to an issue for 30 days, the [issue will be closed](#clean-up-issues-and-prs). If you want to come back to it, reply (once, please), and we'll reopen the existing issue. Please avoid filing new issues as extensions of one you already made. +* `critical` issues may be left open, depending on perceived immediacy and severity, even past the 30 day deadline. + +## Request a Feature + +If the project doesn't do something you need or want it to do: + +* Open an Issue at https://github.com/npm/libnpmteam/issues +* Provide as much context as you can about what you're running into. +* Please try and be clear about why existing features and alternatives would not work for you. + +Once it's filed: + +* The project team will [label the issue](#label-issues). +* The project team will evaluate the feature request, possibly asking you more questions to understand its purpose and any relevant requirements. If the issue is closed, the team will convey their reasoning and suggest an alternative path forward. +* If the feature request is accepted, it will be marked for implementation with `feature-accepted`, which can then be done by either by a core team member or by anyone in the community who wants to [contribute code](#contribute-code). + +Note: The team is unlikely to be able to accept every single feature request that is filed. Please understand if they need to say no. + +## Project Setup + +So you wanna contribute some code! That's great! This project uses GitHub Pull Requests to manage contributions, so [read up on how to fork a GitHub project and file a PR](https://guides.github.com/activities/forking) if you've never done it before. + +If this seems like a lot or you aren't able to do all this setup, you might also be able to [edit the files directly](https://help.github.com/articles/editing-files-in-another-user-s-repository/) without having to do any of this setup. Yes, [even code](#contribute-code). + +If you want to go the usual route and run the project locally, though: + +* [Install Node.js](https://nodejs.org/en/download/) +* [Fork the project](https://guides.github.com/activities/forking/#fork) + +Then in your terminal: +* `cd path/to/your/clone` +* `npm install` +* `npm test` + +And you should be ready to go! + +## Contribute Documentation + +Documentation is a super important, critical part of this project. Docs are how we keep track of what we're doing, how, and why. It's how we stay on the same page about our policies. And it's how we tell others everything they need in order to be able to use this project -- or contribute to it. So thank you in advance. + +Documentation contributions of any size are welcome! Feel free to file a PR even if you're just rewording a sentence to be more clear, or fixing a spelling mistake! + +To contribute documentation: + +* [Set up the project](#project-setup). +* Edit or add any relevant documentation. +* Make sure your changes are formatted correctly and consistently with the rest of the documentation. +* Re-read what you wrote, and run a spellchecker on it to make sure you didn't miss anything. +* In your commit message(s), begin the first line with `docs: `. For example: `docs: Adding a doc contrib section to CONTRIBUTING.md`. +* Write clear, concise commit message(s) using [conventional-changelog format](https://github.com/conventional-changelog/conventional-changelog-angular/blob/master/convention.md). Documentation commits should use `docs(): `. +* Go to https://github.com/npm/libnpmteam/pulls and open a new pull request with your changes. +* If your PR is connected to an open issue, add a line in your PR's description that says `Fixes: #123`, where `#123` is the number of the issue you're fixing. + +Once you've filed the PR: + +* One or more maintainers will use GitHub's review feature to review your PR. +* If the maintainer asks for any changes, edit your changes, push, and ask for another review. +* If the maintainer decides to pass on your PR, they will thank you for the contribution and explain why they won't be accepting the changes. That's ok! We still really appreciate you taking the time to do it, and we don't take that lightly. 💚 +* If your PR gets accepted, it will be marked as such, and merged into the `latest` branch soon after. Your contribution will be distributed to the masses next time the maintainers [tag a release](#tag-a-release) + +## Contribute Code + +We like code commits a lot! They're super handy, and they keep the project going and doing the work it needs to do to be useful to others. + +Code contributions of just about any size are acceptable! + +The main difference between code contributions and documentation contributions is that contributing code requires inclusion of relevant tests for the code being added or changed. Contributions without accompanying tests will be held off until a test is added, unless the maintainers consider the specific tests to be either impossible, or way too much of a burden for such a contribution. + +To contribute code: + +* [Set up the project](#project-setup). +* Make any necessary changes to the source code. +* Include any [additional documentation](#contribute-documentation) the changes might need. +* Write tests that verify that your contribution works as expected. +* Write clear, concise commit message(s) using [conventional-changelog format](https://github.com/conventional-changelog/conventional-changelog-angular/blob/master/convention.md). +* Dependency updates, additions, or removals must be in individual commits, and the message must use the format: `(deps): PKG@VERSION`, where `` is any of the usual `conventional-changelog` prefixes, at your discretion. +* Go to https://github.com/npm/libnpmteam/pulls and open a new pull request with your changes. +* If your PR is connected to an open issue, add a line in your PR's description that says `Fixes: #123`, where `#123` is the number of the issue you're fixing. + +Once you've filed the PR: + +* Barring special circumstances, maintainers will not review PRs until all checks pass (Travis, AppVeyor, etc). +* One or more maintainers will use GitHub's review feature to review your PR. +* If the maintainer asks for any changes, edit your changes, push, and ask for another review. Additional tags (such as `needs-tests`) will be added depending on the review. +* If the maintainer decides to pass on your PR, they will thank you for the contribution and explain why they won't be accepting the changes. That's ok! We still really appreciate you taking the time to do it, and we don't take that lightly. 💚 +* If your PR gets accepted, it will be marked as such, and merged into the `latest` branch soon after. Your contribution will be distributed to the masses next time the maintainers [tag a release](#tag-a-release) + +## Provide Support on Issues + +[Needs Collaborator](#join-the-project-team): none + +Helping out other users with their questions is a really awesome way of contributing to any community. It's not uncommon for most of the issues on an open source projects being support-related questions by users trying to understand something they ran into, or find their way around a known bug. + +Sometimes, the `support` label will be added to things that turn out to actually be other things, like bugs or feature requests. In that case, suss out the details with the person who filed the original issue, add a comment explaining what the bug is, and change the label from `support` to `bug` or `feature`. If you can't do this yourself, @mention a maintainer so they can do it. + +In order to help other folks out with their questions: + +* Go to the issue tracker and [filter open issues by the `support` label](https://github.com/npm/libnpmteam/issues?q=is%3Aopen+is%3Aissue+label%3Asupport). +* Read through the list until you find something that you're familiar enough with to give an answer to. +* Respond to the issue with whatever details are needed to clarify the question, or get more details about what's going on. +* Once the discussion wraps up and things are clarified, either close the issue, or ask the original issue filer (or a maintainer) to close it for you. + +Some notes on picking up support issues: + +* Avoid responding to issues you don't know you can answer accurately. +* As much as possible, try to refer to past issues with accepted answers. Link to them from your replies with the `#123` format. +* Be kind and patient with users -- often, folks who have run into confusing things might be upset or impatient. This is ok. Try to understand where they're coming from, and if you're too uncomfortable with the tone, feel free to stay away or withdraw from the issue. (note: if the user is outright hostile or is violating the CoC, [refer to the Code of Conduct](CODE_OF_CONDUCT.md) to resolve the conflict). + +## Label Issues + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +One of the most important tasks in handling issues is labeling them usefully and accurately. All other tasks involving issues ultimately rely on the issue being classified in such a way that relevant parties looking to do their own tasks can find them quickly and easily. + +In order to label issues, [open up the list of unlabeled issues](https://github.com/npm/libnpmteam/issues?q=is%3Aopen+is%3Aissue+no%3Alabel) and, **from newest to oldest**, read through each one and apply issue labels according to the table below. If you're unsure about what label to apply, skip the issue and try the next one: don't feel obligated to label each and every issue yourself! + +Label | Apply When | Notes +--- | --- | --- +`bug` | Cases where the code (or documentation) is behaving in a way it wasn't intended to. | If something is happening that surprises the *user* but does not go against the way the code is designed, it should use the `enhancement` label. +`critical` | Added to `bug` issues if the problem described makes the code completely unusable in a common situation. | +`documentation` | Added to issues or pull requests that affect any of the documentation for the project. | Can be combined with other labels, such as `bug` or `enhancement`. +`duplicate` | Added to issues or PRs that refer to the exact same issue as another one that's been previously labeled. | Duplicate issues should be marked and closed right away, with a message referencing the issue it's a duplicate of (with `#123`) +`enhancement` | Added to [feature requests](#request-a-feature), PRs, or documentation issues that are purely additive: the code or docs currently work as expected, but a change is being requested or suggested. | +`help wanted` | Applied by [Committers](#join-the-project-team) to issues and PRs that they would like to get outside help for. Generally, this means it's lower priority for the maintainer team to itself implement, but that the community is encouraged to pick up if they so desire | Never applied on first-pass labeling. +`in-progress` | Applied by [Committers](#join-the-project-team) to PRs that are pending some work before they're ready for review. | The original PR submitter should @mention the team member that applied the label once the PR is complete. +`performance` | This issue or PR is directly related to improving performance. | +`refactor` | Added to issues or PRs that deal with cleaning up or modifying the project for the betterment of it. | +`starter` | Applied by [Committers](#join-the-project-team) to issues that they consider good introductions to the project for people who have not contributed before. These are not necessarily "easy", but rather focused around how much context is necessary in order to understand what needs to be done for this project in particular. | Existing project members are expected to stay away from these unless they increase in priority. +`support` | This issue is either asking a question about how to use the project, clarifying the reason for unexpected behavior, or possibly reporting a `bug` but does not have enough detail yet to determine whether it would count as such. | The label should be switched to `bug` if reliable reproduction steps are provided. Issues primarily with unintended configurations of a user's environment are not considered bugs, even if they cause crashes. +`tests` | This issue or PR either requests or adds primarily tests to the project. | If a PR is pending tests, that will be handled through the [PR review process](#review-pull-requests) +`wontfix` | Labelers may apply this label to issues that clearly have nothing at all to do with the project or are otherwise entirely outside of its scope/sphere of influence. [Committers](#join-the-project-team) may apply this label and close an issue or PR if they decide to pass on an otherwise relevant issue. | The issue or PR should be closed as soon as the label is applied, and a clear explanation provided of why the label was used. Contributors are free to contest the labeling, but the decision ultimately falls on committers as to whether to accept something or not. + +## Clean Up Issues and PRs + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +Issues and PRs can go stale after a while. Maybe they're abandoned. Maybe the team will just plain not have time to address them any time soon. + +In these cases, they should be closed until they're brought up again or the interaction starts over. + +To clean up issues and PRs: + +* Search the issue tracker for issues or PRs, and add the term `updated:<=YYYY-MM-DD`, where the date is 30 days before today. +* Go through each issue *from oldest to newest*, and close them if **all of the following are true**: + * not opened by a maintainer + * not marked as `critical` + * not marked as `starter` or `help wanted` (these might stick around for a while, in general, as they're intended to be available) + * no explicit messages in the comments asking for it to be left open + * does not belong to a milestone +* Leave a message when closing saying "Cleaning up stale issue. Please reopen or ping us if and when you're ready to resume this. See https://github.com/npm/libnpmteam/blob/latest/CONTRIBUTING.md#clean-up-issues-and-prs for more details." + +## Review Pull Requests + +[Needs Collaborator](#join-the-project-team): Issue Tracker + +While anyone can comment on a PR, add feedback, etc, PRs are only *approved* by team members with Issue Tracker or higher permissions. + +PR reviews use [GitHub's own review feature](https://help.github.com/articles/about-pull-request-reviews/), which manages comments, approval, and review iteration. + +Some notes: + +* You may ask for minor changes ("nitpicks"), but consider whether they are really blockers to merging: try to err on the side of "approve, with comments". +* *ALL PULL REQUESTS* should be covered by a test: either by a previously-failing test, an existing test that covers the entire functionality of the submitted code, or new tests to verify any new/changed behavior. All tests must also pass and follow established conventions. Test coverage should not drop, unless the specific case is considered reasonable by maintainers. +* Please make sure you're familiar with the code or documentation being updated, unless it's a minor change (spellchecking, minor formatting, etc). You may @mention another project member who you think is better suited for the review, but still provide a non-approving review of your own. +* Be extra kind: people who submit code/doc contributions are putting themselves in a pretty vulnerable position, and have put time and care into what they've done (even if that's not obvious to you!) -- always respond with respect, be understanding, but don't feel like you need to sacrifice your standards for their sake, either. Just don't be a jerk about it? + +## Merge Pull Requests + +[Needs Collaborator](#join-the-project-team): Committer + +TBD - need to hash out a bit more of this process. + +## Tag A Release + +[Needs Collaborator](#join-the-project-team): Committer + +TBD - need to hash out a bit more of this process. The most important bit here is probably that all tests must pass, and tags must use [semver](https://semver.org). + +## Join the Project Team + +### Ways to Join + +There are many ways to contribute! Most of them don't require any official status unless otherwise noted. That said, there's a couple of positions that grant special repository abilities, and this section describes how they're granted and what they do. + +All of the below positions are granted based on the project team's needs, as well as their consensus opinion about whether they would like to work with the person and think that they would fit well into that position. The process is relatively informal, and it's likely that people who express interest in participating can just be granted the permissions they'd like. + +You can spot a collaborator on the repo by looking for the `[Collaborator]` or `[Owner]` tags next to their names. + +Permission | Description +--- | --- +Issue Tracker | Granted to contributors who express a strong interest in spending time on the project's issue tracker. These tasks are mainly [labeling issues](#label-issues), [cleaning up old ones](#clean-up-issues-and-prs), and [reviewing pull requests](#review-pull-requests), as well as all the usual things non-team-member contributors can do. Issue handlers should not merge pull requests, tag releases, or directly commit code themselves: that should still be done through the usual pull request process. Becoming an Issue Handler means the project team trusts you to understand enough of the team's process and context to implement it on the issue tracker. +Committer | Granted to contributors who want to handle the actual pull request merges, tagging new versions, etc. Committers should have a good level of familiarity with the codebase, and enough context to understand the implications of various changes, as well as a good sense of the will and expectations of the project team. +Admin/Owner | Granted to people ultimately responsible for the project, its community, etc. + +## Attribution + +This guide was generated using the WeAllJS `CONTRIBUTING.md` generator. [Make your own](https://npm.im/weallcontribute)! diff --git a/node_modules/libnpmteam/LICENSE b/node_modules/libnpmteam/LICENSE new file mode 100644 index 0000000000000..209e4477f39c1 --- /dev/null +++ b/node_modules/libnpmteam/LICENSE @@ -0,0 +1,13 @@ +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/libnpmteam/PULL_REQUEST_TEMPLATE b/node_modules/libnpmteam/PULL_REQUEST_TEMPLATE new file mode 100644 index 0000000000000..9471c6d325f7e --- /dev/null +++ b/node_modules/libnpmteam/PULL_REQUEST_TEMPLATE @@ -0,0 +1,7 @@ + diff --git a/node_modules/libnpmteam/README.md b/node_modules/libnpmteam/README.md new file mode 100644 index 0000000000000..e0e771c7fac86 --- /dev/null +++ b/node_modules/libnpmteam/README.md @@ -0,0 +1,185 @@ +# libnpmteam [![npm version](https://img.shields.io/npm/v/libnpmteam.svg)](https://npm.im/libnpmteam) [![license](https://img.shields.io/npm/l/libnpmteam.svg)](https://npm.im/libnpmteam) [![Travis](https://img.shields.io/travis/npm/libnpmteam/latest.svg)](https://travis-ci.org/npm/libnpmteam) [![AppVeyor](https://img.shields.io/appveyor/ci/zkat/libnpmteam/latest.svg)](https://ci.appveyor.com/project/zkat/libnpmteam) [![Coverage Status](https://coveralls.io/repos/github/npm/libnpmteam/badge.svg?branch=latest)](https://coveralls.io/github/npm/libnpmteam?branch=latest) + +[`libnpmteam`](https://github.com/npm/libnpmteam) is a Node.js +library that provides programmatic access to the guts of the npm CLI's `npm +team` command and its various subcommands. + +## Example + +```javascript +const access = require('libnpmteam') + +// List all teams for the @npm org. +console.log(await team.lsTeams('npm')) +``` + +## Table of Contents + +* [Installing](#install) +* [Example](#example) +* [Contributing](#contributing) +* [API](#api) + * [team opts](#opts) + * [`create()`](#create) + * [`destroy()`](#destroy) + * [`add()`](#add) + * [`rm()`](#rm) + * [`lsTeams()`](#ls-teams) + * [`lsTeams.stream()`](#ls-teams-stream) + * [`lsUsers()`](#ls-users) + * [`lsUsers.stream()`](#ls-users-stream) + +### Install + +`$ npm install libnpmteam` + +### Contributing + +The npm team enthusiastically welcomes contributions and project participation! +There's a bunch of things you can do if you want to contribute! The [Contributor +Guide](CONTRIBUTING.md) has all the information you need for everything from +reporting bugs to contributing entire new features. Please don't hesitate to +jump in if you'd like to, or even ask us questions if something isn't clear. + +All participants and maintainers in this project are expected to follow [Code of +Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other. + +Please refer to the [Changelog](CHANGELOG.md) for project history details, too. + +Happy hacking! + +### API + +#### `opts` for `libnpmteam` commands + +`libnpmteam` uses [`npm-registry-fetch`](https://npm.im/npm-registry-fetch). +All options are passed through directly to that library, so please refer to [its +own `opts` +documentation](https://www.npmjs.com/package/npm-registry-fetch#fetch-options) +for options that can be passed in. + +A couple of options of note for those in a hurry: + +* `opts.token` - can be passed in and will be used as the authentication token for the registry. For other ways to pass in auth details, see the n-r-f docs. +* `opts.otp` - certain operations will require an OTP token to be passed in. If a `libnpmteam` command fails with `err.code === EOTP`, please retry the request with `{otp: <2fa token>}` +* `opts.Promise` - If you pass this in, the Promises returned by `libnpmteam` commands will use this Promise class instead. For example: `{Promise: require('bluebird')}` + +#### `> team.create(team, [opts]) -> Promise` + +Creates a team named `team`. Team names use the format `@:`, with +the `@` being optional. + +Additionally, `opts.description` may be passed in to include a description. + +##### Example + +```javascript +await team.create('@npm:cli', {token: 'myregistrytoken'}) +// The @npm:cli team now exists. +``` + +#### `> team.destroy(team, [opts]) -> Promise` + +Destroys a team named `team`. Team names use the format `@:`, with +the `@` being optional. + +##### Example + +```javascript +await team.destroy('@npm:cli', {token: 'myregistrytoken'}) +// The @npm:cli team has been destroyed. +``` + +#### `> team.add(user, team, [opts]) -> Promise` + +Adds `user` to `team`. + +##### Example + +```javascript +await team.add('zkat', '@npm:cli', {token: 'myregistrytoken'}) +// @zkat now belongs to the @npm:cli team. +``` + +#### `> team.rm(user, team, [opts]) -> Promise` + +Removes `user` from `team`. + +##### Example + +```javascript +await team.rm('zkat', '@npm:cli', {token: 'myregistrytoken'}) +// @zkat is no longer part of the @npm:cli team. +``` + +#### `> team.lsTeams(scope, [opts]) -> Promise` + +Resolves to an array of team names belonging to `scope`. + +##### Example + +```javascript +await team.lsTeams('@npm', {token: 'myregistrytoken'}) +=> +[ + 'npm:cli', + 'npm:web', + 'npm:registry', + 'npm:developers' +] +``` + +#### `> team.lsTeams.stream(scope, [opts]) -> Stream` + +Returns a stream of teams belonging to `scope`. + +For a Promise-based version of these results, see [`team.lsTeams()`](#ls-teams). + +##### Example + +```javascript +for await (let team of team.lsTeams.stream('@npm', {token: 'myregistrytoken'})) { + console.log(team) +} + +// outputs +// npm:cli +// npm:web +// npm:registry +// npm:developers +``` + +#### `> team.lsUsers(team, [opts]) -> Promise` + +Resolves to an array of usernames belonging to `team`. + +For a streamed version of these results, see [`team.lsUsers.stream()`](#ls-users-stream). + +##### Example + +```javascript +await team.lsUsers('@npm:cli', {token: 'myregistrytoken'}) +=> +[ + 'iarna', + 'zkat' +] +``` + +#### `> team.lsUsers.stream(team, [opts]) -> Stream` + +Returns a stream of usernames belonging to `team`. + +For a Promise-based version of these results, see [`team.lsUsers()`](#ls-users). + +##### Example + +```javascript +for await (let user of team.lsUsers.stream('@npm:cli', {token: 'myregistrytoken'})) { + console.log(user) +} + +// outputs +// iarna +// zkat +``` diff --git a/node_modules/libnpmteam/appveyor.yml b/node_modules/libnpmteam/appveyor.yml new file mode 100644 index 0000000000000..9cc64c58e02f9 --- /dev/null +++ b/node_modules/libnpmteam/appveyor.yml @@ -0,0 +1,22 @@ +environment: + matrix: + - nodejs_version: "10" + - nodejs_version: "9" + - nodejs_version: "8" + - nodejs_version: "6" + +platform: + - x64 + +install: + - ps: Install-Product node $env:nodejs_version $env:platform + - npm config set spin false + - npm install + +test_script: + - npm test + +matrix: + fast_finish: true + +build: off diff --git a/node_modules/libnpmteam/index.js b/node_modules/libnpmteam/index.js new file mode 100644 index 0000000000000..3d9c906791c87 --- /dev/null +++ b/node_modules/libnpmteam/index.js @@ -0,0 +1,106 @@ +'use strict' + +const eu = encodeURIComponent +const figgyPudding = require('figgy-pudding') +const getStream = require('get-stream') +const npmFetch = require('npm-registry-fetch') +const validate = require('aproba') + +const TeamConfig = figgyPudding({ + description: {}, + Promise: { default: () => Promise } +}) + +const cmd = module.exports = {} + +cmd.create = (entity, opts) => { + opts = TeamConfig(opts) + return pwrap(opts, () => { + const { scope, team } = splitEntity(entity) + validate('SSO', [scope, team, opts]) + return npmFetch.json(`/-/org/${eu(scope)}/team`, opts.concat({ + method: 'PUT', + scope, + body: { name: team, description: opts.description } + })) + }) +} + +cmd.destroy = (entity, opts) => { + opts = TeamConfig(opts) + return pwrap(opts, () => { + const { scope, team } = splitEntity(entity) + validate('SSO', [scope, team, opts]) + return npmFetch.json(`/-/team/${eu(scope)}/${eu(team)}`, opts.concat({ + method: 'DELETE', + scope + })) + }) +} + +cmd.add = (user, entity, opts) => { + opts = TeamConfig(opts) + return pwrap(opts, () => { + const { scope, team } = splitEntity(entity) + validate('SSO', [scope, team, opts]) + return npmFetch.json(`/-/team/${eu(scope)}/${eu(team)}/user`, opts.concat({ + method: 'PUT', + scope, + body: { user } + })) + }) +} + +cmd.rm = (user, entity, opts) => { + opts = TeamConfig(opts) + return pwrap(opts, () => { + const { scope, team } = splitEntity(entity) + validate('SSO', [scope, team, opts]) + return npmFetch.json(`/-/team/${eu(scope)}/${eu(team)}/user`, opts.concat({ + method: 'DELETE', + scope, + body: { user } + })) + }) +} + +cmd.lsTeams = (scope, opts) => { + opts = TeamConfig(opts) + return pwrap(opts, () => getStream.array(cmd.lsTeams.stream(scope, opts))) +} +cmd.lsTeams.stream = (scope, opts) => { + opts = TeamConfig(opts) + validate('SO', [scope, opts]) + return npmFetch.json.stream(`/-/org/${eu(scope)}/team`, '.*', opts.concat({ + query: { format: 'cli' } + })) +} + +cmd.lsUsers = (entity, opts) => { + opts = TeamConfig(opts) + return pwrap(opts, () => getStream.array(cmd.lsUsers.stream(entity, opts))) +} +cmd.lsUsers.stream = (entity, opts) => { + opts = TeamConfig(opts) + const { scope, team } = splitEntity(entity) + validate('SSO', [scope, team, opts]) + const uri = `/-/team/${eu(scope)}/${eu(team)}/user` + return npmFetch.json.stream(uri, '.*', opts.concat({ + query: { format: 'cli' } + })) +} + +cmd.edit = () => { + throw new Error('edit is not implemented yet') +} + +function splitEntity (entity = '') { + let [, scope, team] = entity.match(/^@?([^:]+):(.*)$/) || [] + return { scope, team } +} + +function pwrap (opts, fn) { + return new opts.Promise((resolve, reject) => { + fn().then(resolve, reject) + }) +} diff --git a/node_modules/libnpmteam/package.json b/node_modules/libnpmteam/package.json new file mode 100644 index 0000000000000..47bcedb8dc676 --- /dev/null +++ b/node_modules/libnpmteam/package.json @@ -0,0 +1,67 @@ +{ + "_from": "libnpmteam@1.0.2", + "_id": "libnpmteam@1.0.2", + "_inBundle": false, + "_integrity": "sha512-p420vM28Us04NAcg1rzgGW63LMM6rwe+6rtZpfDxCcXxM0zUTLl7nPFEnRF3JfFBF5skF/yuZDUthTsHgde8QA==", + "_location": "/libnpmteam", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "libnpmteam@1.0.2", + "name": "libnpmteam", + "escapedName": "libnpmteam", + "rawSpec": "1.0.2", + "saveSpec": null, + "fetchSpec": "1.0.2" + }, + "_requiredBy": [ + "#USER", + "/" + ], + "_resolved": "https://registry.npmjs.org/libnpmteam/-/libnpmteam-1.0.2.tgz", + "_shasum": "8b48bcbb6ce70dd8150c950fcbdbf3feb6eec820", + "_spec": "libnpmteam@1.0.2", + "_where": "/Users/isaacs/dev/npm/cli", + "author": { + "name": "Kat Marchán", + "email": "kzm@zkat.tech" + }, + "bugs": { + "url": "https://github.com/npm/libnpmteam/issues" + }, + "bundleDependencies": false, + "dependencies": { + "aproba": "^2.0.0", + "figgy-pudding": "^3.4.1", + "get-stream": "^4.0.0", + "npm-registry-fetch": "^4.0.0" + }, + "deprecated": false, + "description": "npm Team management APIs", + "devDependencies": { + "nock": "^9.6.1", + "standard": "^12.0.0", + "standard-version": "*", + "tap": "*", + "weallbehave": "*", + "weallcontribute": "*" + }, + "homepage": "https://npmjs.com/package/libnpmteam", + "license": "ISC", + "name": "libnpmteam", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/libnpmteam.git" + }, + "scripts": { + "postrelease": "npm publish && git push --follow-tags", + "prerelease": "npm t", + "pretest": "standard", + "release": "standard-version -s", + "test": "tap -J --100 test/*.js", + "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", + "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" + }, + "version": "1.0.2" +} diff --git a/node_modules/libnpmteam/test/index.js b/node_modules/libnpmteam/test/index.js new file mode 100644 index 0000000000000..7bc79d5181223 --- /dev/null +++ b/node_modules/libnpmteam/test/index.js @@ -0,0 +1,138 @@ +'use strict' + +const figgyPudding = require('figgy-pudding') +const getStream = require('get-stream') +const { test } = require('tap') +const tnock = require('./util/tnock.js') + +const team = require('../index.js') + +const REG = 'http://localhost:1337' +const OPTS = figgyPudding({})({ + registry: REG +}) + +test('create', t => { + tnock(t, REG).put( + '/-/org/foo/team', { name: 'cli' } + ).reply(201, { name: 'cli' }) + return team.create('@foo:cli', OPTS).then(ret => { + t.deepEqual(ret, { name: 'cli' }, 'request succeeded') + }) +}) + +test('create bad entity name', t => { + return team.create('go away', OPTS).then( + () => { throw new Error('should not succeed') }, + err => { t.ok(err, 'error on bad entity name') } + ) +}) + +test('create empty entity', t => { + return team.create(undefined, OPTS).then( + () => { throw new Error('should not succeed') }, + err => { t.ok(err, 'error on bad entity name') } + ) +}) + +test('create w/ description', t => { + tnock(t, REG).put('/-/org/foo/team', { + name: 'cli', + description: 'just some cool folx' + }).reply(201, { name: 'cli' }) + return team.create('@foo:cli', OPTS.concat({ + description: 'just some cool folx' + })).then(ret => { + t.deepEqual(ret, { name: 'cli' }, 'no desc in return') + }) +}) + +test('destroy', t => { + tnock(t, REG).delete( + '/-/team/foo/cli' + ).reply(204, {}) + return team.destroy('@foo:cli', OPTS).then(ret => { + t.deepEqual(ret, {}, 'request succeeded') + }) +}) + +test('add', t => { + tnock(t, REG).put( + '/-/team/foo/cli/user', { user: 'zkat' } + ).reply(201, {}) + return team.add('zkat', '@foo:cli', OPTS).then(ret => { + t.deepEqual(ret, {}, 'request succeeded') + }) +}) + +test('rm', t => { + tnock(t, REG).delete( + '/-/team/foo/cli/user', { user: 'zkat' } + ).reply(204, {}) + return team.rm('zkat', '@foo:cli', OPTS).then(ret => { + t.deepEqual(ret, {}, 'request succeeded') + }) +}) + +test('lsTeams', t => { + tnock(t, REG).get( + '/-/org/foo/team?format=cli' + ).reply(200, ['foo:bar', 'foo:cli']) + return team.lsTeams('foo', OPTS).then(ret => { + t.deepEqual(ret, ['foo:bar', 'foo:cli'], 'got teams') + }) +}) + +test('lsTeams error', t => { + tnock(t, REG).get( + '/-/org/foo/team?format=cli' + ).reply(500) + return team.lsTeams('foo', OPTS).then( + () => { throw new Error('should not succeed') }, + err => { t.equal(err.code, 'E500', 'got error code') } + ) +}) + +test('lsTeams.stream', t => { + tnock(t, REG).get( + '/-/org/foo/team?format=cli' + ).reply(200, ['foo:bar', 'foo:cli']) + return getStream.array(team.lsTeams.stream('foo', OPTS)).then(ret => { + t.deepEqual(ret, ['foo:bar', 'foo:cli'], 'got teams') + }) +}) + +test('lsUsers', t => { + tnock(t, REG).get( + '/-/team/foo/cli/user?format=cli' + ).reply(500) + return team.lsUsers('@foo:cli', OPTS).then( + () => { throw new Error('should not succeed') }, + err => { t.equal(err.code, 'E500', 'got error code') } + ) +}) + +test('lsUsers error', t => { + tnock(t, REG).get( + '/-/team/foo/cli/user?format=cli' + ).reply(200, ['iarna', 'zkat']) + return team.lsUsers('@foo:cli', OPTS).then(ret => { + t.deepEqual(ret, ['iarna', 'zkat'], 'got team members') + }) +}) + +test('lsUsers.stream', t => { + tnock(t, REG).get( + '/-/team/foo/cli/user?format=cli' + ).reply(200, ['iarna', 'zkat']) + return getStream.array(team.lsUsers.stream('@foo:cli', OPTS)).then(ret => { + t.deepEqual(ret, ['iarna', 'zkat'], 'got team members') + }) +}) + +test('edit', t => { + t.throws(() => { + team.edit() + }, /not implemented/) + t.done() +}) diff --git a/node_modules/libnpmteam/test/util/tnock.js b/node_modules/libnpmteam/test/util/tnock.js new file mode 100644 index 0000000000000..00b6e160e1019 --- /dev/null +++ b/node_modules/libnpmteam/test/util/tnock.js @@ -0,0 +1,12 @@ +'use strict' + +const nock = require('nock') + +module.exports = tnock +function tnock (t, host) { + const server = nock(host) + t.tearDown(function () { + server.done() + }) + return server +} diff --git a/node_modules/libnpx/CHANGELOG.md b/node_modules/libnpx/CHANGELOG.md index 5fa91fac96ae7..74da62235d696 100644 --- a/node_modules/libnpx/CHANGELOG.md +++ b/node_modules/libnpx/CHANGELOG.md @@ -2,61 +2,73 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [10.2.2](https://github.com/npm/npx/compare/v10.2.1...v10.2.2) (2020-01-28) + + +### Bug Fixes + +* correct Kat's github url ([9a23db1](https://github.com/npm/npx/commit/9a23db1)) +* install latest npm on travis for node 6 ([e0eb3cb](https://github.com/npm/npx/commit/e0eb3cb)) +* Update changelog to fix old issue links ([3733137](https://github.com/npm/npx/commit/3733137)) + + + -# [10.2.0](https://github.com/zkat/npx/compare/v10.1.1...v10.2.0) (2018-04-13) +# [10.2.0](https://github.com/npm/npx/compare/v10.1.1...v10.2.0) (2018-04-13) ### Bug Fixes -* **i18n:** fix korean; 쉘 -> 셸 ([#163](https://github.com/zkat/npx/issues/163)) ([11d9fe0](https://github.com/zkat/npx/commit/11d9fe0)) -* **spawn:** spawn child processes with node without relying on the shebang. ([#174](https://github.com/zkat/npx/issues/174)) ([cba97bb](https://github.com/zkat/npx/commit/cba97bb)) -* **windows:** Allow spaces in the node path when using --node-arg ([#173](https://github.com/zkat/npx/issues/173)) ([fe0d48a](https://github.com/zkat/npx/commit/fe0d48a)), closes [#170](https://github.com/zkat/npx/issues/170) +* **i18n:** fix korean; 쉘 -> 셸 ([#163](https://github.com/zkat/npx/issues/163)) ([11d9fe0](https://github.com/npm/npx/commit/11d9fe0)) +* **spawn:** spawn child processes with node without relying on the shebang. ([#174](https://github.com/zkat/npx/issues/174)) ([cba97bb](https://github.com/npm/npx/commit/cba97bb)) +* **windows:** Allow spaces in the node path when using --node-arg ([#173](https://github.com/zkat/npx/issues/173)) ([fe0d48a](https://github.com/npm/npx/commit/fe0d48a)), closes [#170](https://github.com/zkat/npx/issues/170) ### Features -* **i18n:** add translation ([#159](https://github.com/zkat/npx/issues/159)) ([5da008b](https://github.com/zkat/npx/commit/5da008b)) +* **i18n:** add translation ([#159](https://github.com/zkat/npx/issues/159)) ([5da008b](https://github.com/npm/npx/commit/5da008b)) -## [10.1.1](https://github.com/zkat/npx/compare/v10.1.0...v10.1.1) (2018-04-12) +## [10.1.1](https://github.com/npm/npx/compare/v10.1.0...v10.1.1) (2018-04-12) -# [10.1.0](https://github.com/zkat/npx/compare/v10.0.1...v10.1.0) (2018-04-12) +# [10.1.0](https://github.com/npm/npx/compare/v10.0.1...v10.1.0) (2018-04-12) ### Features -* **spawn:** add --always-spawn to opt out of process takeover optimization feature ([#172](https://github.com/zkat/npx/issues/172)) ([c0d6abc](https://github.com/zkat/npx/commit/c0d6abc)) +* **spawn:** add --always-spawn to opt out of process takeover optimization feature ([#172](https://github.com/zkat/npx/issues/172)) ([c0d6abc](https://github.com/npm/npx/commit/c0d6abc)) -## [10.0.1](https://github.com/zkat/npx/compare/v10.0.0...v10.0.1) (2018-03-08) +## [10.0.1](https://github.com/npm/npx/compare/v10.0.0...v10.0.1) (2018-03-08) ### Bug Fixes -* **i18n:** Improve French localization ([#158](https://github.com/zkat/npx/issues/158)) ([c88823e](https://github.com/zkat/npx/commit/c88823e)) -* **windows:** on Windows, throw useful error when package contains no binaries([#142](https://github.com/zkat/npx/issues/142)) ([a69276e](https://github.com/zkat/npx/commit/a69276e)), closes [#137](https://github.com/zkat/npx/issues/137) +* **i18n:** Improve French localization ([#158](https://github.com/zkat/npx/issues/158)) ([c88823e](https://github.com/npm/npx/commit/c88823e)) +* **windows:** on Windows, throw useful error when package contains no binaries([#142](https://github.com/zkat/npx/issues/142)) ([a69276e](https://github.com/npm/npx/commit/a69276e)), closes [#137](https://github.com/zkat/npx/issues/137) -# [10.0.0](https://github.com/zkat/npx/compare/v9.7.1...v10.0.0) (2018-03-08) +# [10.0.0](https://github.com/npm/npx/compare/v9.7.1...v10.0.0) (2018-03-08) ### Bug Fixes -* **i18n:** Fix Korean locale ([#130](https://github.com/zkat/npx/issues/130)) ([752db48](https://github.com/zkat/npx/commit/752db48)) -* **index:** remove extraneous logging on Windows ([#136](https://github.com/zkat/npx/issues/136)) ([357e6ab](https://github.com/zkat/npx/commit/357e6ab)), closes [#131](https://github.com/zkat/npx/issues/131) -* **license:** change npx license to ISC ([a617d7b](https://github.com/zkat/npx/commit/a617d7b)) -* **parse-args:** fix version thing for yargs ([30677ed](https://github.com/zkat/npx/commit/30677ed)) -* **prefix:** Handle node_modules without package.json ([#128](https://github.com/zkat/npx/issues/128)) ([f64ae43](https://github.com/zkat/npx/commit/f64ae43)), closes [/github.com/babel/babel/issues/4066#issuecomment-336705199](https://github.com//github.com/babel/babel/issues/4066/issues/issuecomment-336705199) -* **standard:** get things in line with standard 11 ([6cf8e88](https://github.com/zkat/npx/commit/6cf8e88)) +* **i18n:** Fix Korean locale ([#130](https://github.com/zkat/npx/issues/130)) ([752db48](https://github.com/npm/npx/commit/752db48)) +* **index:** remove extraneous logging on Windows ([#136](https://github.com/zkat/npx/issues/136)) ([357e6ab](https://github.com/npm/npx/commit/357e6ab)), closes [#131](https://github.com/zkat/npx/issues/131) +* **license:** change npx license to ISC ([a617d7b](https://github.com/npm/npx/commit/a617d7b)) +* **parse-args:** fix version thing for yargs ([30677ed](https://github.com/npm/npx/commit/30677ed)) +* **prefix:** Handle node_modules without package.json ([#128](https://github.com/zkat/npx/issues/128)) ([f64ae43](https://github.com/npm/npx/commit/f64ae43)), closes [/github.com/babel/babel/issues/4066#issuecomment-336705199](https://github.com//github.com/babel/babel/issues/4066/issues/issuecomment-336705199) +* **standard:** get things in line with standard 11 ([6cf8e88](https://github.com/npm/npx/commit/6cf8e88)) ### BREAKING CHANGES @@ -66,271 +78,271 @@ All notable changes to this project will be documented in this file. See [standa -## [9.7.1](https://github.com/zkat/npx/compare/v9.7.0...v9.7.1) (2017-10-19) +## [9.7.1](https://github.com/npm/npx/compare/v9.7.0...v9.7.1) (2017-10-19) ### Bug Fixes -* **main:** err... oops? ([f24b4e3](https://github.com/zkat/npx/commit/f24b4e3)) +* **main:** err... oops? ([f24b4e3](https://github.com/npm/npx/commit/f24b4e3)) -# [9.7.0](https://github.com/zkat/npx/compare/v9.6.0...v9.7.0) (2017-10-19) +# [9.7.0](https://github.com/npm/npx/compare/v9.6.0...v9.7.0) (2017-10-19) ### Bug Fixes -* **exec:** fixed unix binary pathing issues (#120) ([f80a970](https://github.com/zkat/npx/commit/f80a970)), closes [#120](https://github.com/zkat/npx/issues/120) +* **exec:** fixed unix binary pathing issues (#120) ([f80a970](https://github.com/npm/npx/commit/f80a970)), closes [#120](https://github.com/zkat/npx/issues/120) ### Features -* **child:** add opts.installerStdio (#126) ([ade03f7](https://github.com/zkat/npx/commit/ade03f7)) +* **child:** add opts.installerStdio (#126) ([ade03f7](https://github.com/npm/npx/commit/ade03f7)) -# [9.6.0](https://github.com/zkat/npx/compare/v9.5.0...v9.6.0) (2017-08-17) +# [9.6.0](https://github.com/npm/npx/compare/v9.5.0...v9.6.0) (2017-08-17) ### Features -* **i18n:** add Arabic translation (#111) ([3c5b99a](https://github.com/zkat/npx/commit/3c5b99a)) -* **i18n:** add Dutch (#108) ([ed116fd](https://github.com/zkat/npx/commit/ed116fd)) +* **i18n:** add Arabic translation (#111) ([3c5b99a](https://github.com/npm/npx/commit/3c5b99a)) +* **i18n:** add Dutch (#108) ([ed116fd](https://github.com/npm/npx/commit/ed116fd)) -# [9.5.0](https://github.com/zkat/npx/compare/v9.4.1...v9.5.0) (2017-07-28) +# [9.5.0](https://github.com/npm/npx/compare/v9.4.1...v9.5.0) (2017-07-28) ### Features -* **i18n:** add Polish translations (#99) ([8442f59](https://github.com/zkat/npx/commit/8442f59)) +* **i18n:** add Polish translations (#99) ([8442f59](https://github.com/npm/npx/commit/8442f59)) -## [9.4.1](https://github.com/zkat/npx/compare/v9.4.0...v9.4.1) (2017-07-21) +## [9.4.1](https://github.com/npm/npx/compare/v9.4.0...v9.4.1) (2017-07-21) ### Bug Fixes -* **i18n:** fix filename for uk.json locale ([2c770e4](https://github.com/zkat/npx/commit/2c770e4)) +* **i18n:** fix filename for uk.json locale ([2c770e4](https://github.com/npm/npx/commit/2c770e4)) -# [9.4.0](https://github.com/zkat/npx/compare/v9.3.2...v9.4.0) (2017-07-21) +# [9.4.0](https://github.com/npm/npx/compare/v9.3.2...v9.4.0) (2017-07-21) ### Bug Fixes -* **i18n:** minor fixes to ru locale (#92) ([f4d5051](https://github.com/zkat/npx/commit/f4d5051)), closes [#92](https://github.com/zkat/npx/issues/92) +* **i18n:** minor fixes to ru locale (#92) ([f4d5051](https://github.com/npm/npx/commit/f4d5051)), closes [#92](https://github.com/zkat/npx/issues/92) ### Features -* **i18n:** `no` locale fallback for Norwegian bokmål ⚠️ In case of weird setups ⚠️ (#91) ([74f0e4c](https://github.com/zkat/npx/commit/74f0e4c)) -* **i18n:** add Bahasa Indonesia locale (#95) ([80dceeb](https://github.com/zkat/npx/commit/80dceeb)) -* **i18n:** add serbian translation (#96) ([040de7a](https://github.com/zkat/npx/commit/040de7a)) -* **i18n:** add Ukrainian locale (#93) ([9a3ef33](https://github.com/zkat/npx/commit/9a3ef33)) -* **i18n:** Added Norwegian (bokmål and nynorsk) translations (#90) ([6c5c733](https://github.com/zkat/npx/commit/6c5c733)) +* **i18n:** `no` locale fallback for Norwegian bokmål ⚠️ In case of weird setups ⚠️ (#91) ([74f0e4c](https://github.com/npm/npx/commit/74f0e4c)) +* **i18n:** add Bahasa Indonesia locale (#95) ([80dceeb](https://github.com/npm/npx/commit/80dceeb)) +* **i18n:** add serbian translation (#96) ([040de7a](https://github.com/npm/npx/commit/040de7a)) +* **i18n:** add Ukrainian locale (#93) ([9a3ef33](https://github.com/npm/npx/commit/9a3ef33)) +* **i18n:** Added Norwegian (bokmål and nynorsk) translations (#90) ([6c5c733](https://github.com/npm/npx/commit/6c5c733)) -## [9.3.2](https://github.com/zkat/npx/compare/v9.3.1...v9.3.2) (2017-07-17) +## [9.3.2](https://github.com/npm/npx/compare/v9.3.1...v9.3.2) (2017-07-17) ### Bug Fixes -* **exec:** detect a wider range of shebang lines for node scripts (#89) ([1841b6f](https://github.com/zkat/npx/commit/1841b6f)) -* **windows:** escape spawn args because windows is picky (#87) ([314e5eb](https://github.com/zkat/npx/commit/314e5eb)) -* **windows:** get magic shim detection working on Windows (#88) ([255aeeb](https://github.com/zkat/npx/commit/255aeeb)) +* **exec:** detect a wider range of shebang lines for node scripts (#89) ([1841b6f](https://github.com/npm/npx/commit/1841b6f)) +* **windows:** escape spawn args because windows is picky (#87) ([314e5eb](https://github.com/npm/npx/commit/314e5eb)) +* **windows:** get magic shim detection working on Windows (#88) ([255aeeb](https://github.com/npm/npx/commit/255aeeb)) -## [9.3.1](https://github.com/zkat/npx/compare/v9.3.0...v9.3.1) (2017-07-17) +## [9.3.1](https://github.com/npm/npx/compare/v9.3.0...v9.3.1) (2017-07-17) ### Bug Fixes -* **deps:** update to npm[@5](https://github.com/5).3.0 ([2b14de2](https://github.com/zkat/npx/commit/2b14de2)) +* **deps:** update to npm[@5](https://github.com/5).3.0 ([2b14de2](https://github.com/npm/npx/commit/2b14de2)) -# [9.3.0](https://github.com/zkat/npx/compare/v9.2.3...v9.3.0) (2017-07-17) +# [9.3.0](https://github.com/npm/npx/compare/v9.2.3...v9.3.0) (2017-07-17) ### Features -* **i18n:** add Korean locale (#86) ([3655314](https://github.com/zkat/npx/commit/3655314)) +* **i18n:** add Korean locale (#86) ([3655314](https://github.com/npm/npx/commit/3655314)) -## [9.2.3](https://github.com/zkat/npx/compare/v9.2.2...v9.2.3) (2017-07-17) +## [9.2.3](https://github.com/npm/npx/compare/v9.2.2...v9.2.3) (2017-07-17) ### Bug Fixes -* **paths:** support npm/npx paths with spaces in them ([8f3b829](https://github.com/zkat/npx/commit/8f3b829)) +* **paths:** support npm/npx paths with spaces in them ([8f3b829](https://github.com/npm/npx/commit/8f3b829)) -## [9.2.2](https://github.com/zkat/npx/compare/v9.2.1...v9.2.2) (2017-07-15) +## [9.2.2](https://github.com/npm/npx/compare/v9.2.1...v9.2.2) (2017-07-15) ### Bug Fixes -* **npm:** escape path to npm, too ([333d2ff](https://github.com/zkat/npx/commit/333d2ff)) +* **npm:** escape path to npm, too ([333d2ff](https://github.com/npm/npx/commit/333d2ff)) -## [9.2.1](https://github.com/zkat/npx/compare/v9.2.0...v9.2.1) (2017-07-14) +## [9.2.1](https://github.com/npm/npx/compare/v9.2.0...v9.2.1) (2017-07-14) ### Bug Fixes -* **windows:** fixed windows binary pathing issues ([761dfe9](https://github.com/zkat/npx/commit/761dfe9)) +* **windows:** fixed windows binary pathing issues ([761dfe9](https://github.com/npm/npx/commit/761dfe9)) -# [9.2.0](https://github.com/zkat/npx/compare/v9.1.0...v9.2.0) (2017-07-14) +# [9.2.0](https://github.com/npm/npx/compare/v9.1.0...v9.2.0) (2017-07-14) ### Bug Fixes -* **binpath:** fix calling binaries from subdirectories ([f185d0d](https://github.com/zkat/npx/commit/f185d0d)) -* **i18n:** Fix typos in french locale (#78) ([f277fc7](https://github.com/zkat/npx/commit/f277fc7)), closes [#78](https://github.com/zkat/npx/issues/78) +* **binpath:** fix calling binaries from subdirectories ([f185d0d](https://github.com/npm/npx/commit/f185d0d)) +* **i18n:** Fix typos in french locale (#78) ([f277fc7](https://github.com/npm/npx/commit/f277fc7)), closes [#78](https://github.com/zkat/npx/issues/78) ### Features -* **i18n:** Add German translations (#79) ([c81e26d](https://github.com/zkat/npx/commit/c81e26d)) -* **i18n:** add zh_TW translation (#80) ([98288d8](https://github.com/zkat/npx/commit/98288d8)) +* **i18n:** Add German translations (#79) ([c81e26d](https://github.com/npm/npx/commit/c81e26d)) +* **i18n:** add zh_TW translation (#80) ([98288d8](https://github.com/npm/npx/commit/98288d8)) -# [9.1.0](https://github.com/zkat/npx/compare/v9.0.7...v9.1.0) (2017-07-12) +# [9.1.0](https://github.com/npm/npx/compare/v9.0.7...v9.1.0) (2017-07-12) ### Bug Fixes -* **call:** only npm run env if package.json exists ([370f395](https://github.com/zkat/npx/commit/370f395)) -* **i18n:** Fix grammar and spelling for de.json (#63) ([b14020f](https://github.com/zkat/npx/commit/b14020f)), closes [#63](https://github.com/zkat/npx/issues/63) -* **i18n:** wording revisions for Brazilian Portuguese (#75) ([b5dc536](https://github.com/zkat/npx/commit/b5dc536)) -* **npm:** path directly to the npm-cli.js script ([d531206](https://github.com/zkat/npx/commit/d531206)) -* **rimraf:** fix rimraf.sync is not a function issue ([d2ecba3](https://github.com/zkat/npx/commit/d2ecba3)) -* **windows:** get npx working well on Windows again (#69) ([6cfb8de](https://github.com/zkat/npx/commit/6cfb8de)), closes [#60](https://github.com/zkat/npx/issues/60) [#58](https://github.com/zkat/npx/issues/58) [#62](https://github.com/zkat/npx/issues/62) +* **call:** only npm run env if package.json exists ([370f395](https://github.com/npm/npx/commit/370f395)) +* **i18n:** Fix grammar and spelling for de.json (#63) ([b14020f](https://github.com/npm/npx/commit/b14020f)), closes [#63](https://github.com/zkat/npx/issues/63) +* **i18n:** wording revisions for Brazilian Portuguese (#75) ([b5dc536](https://github.com/npm/npx/commit/b5dc536)) +* **npm:** path directly to the npm-cli.js script ([d531206](https://github.com/npm/npx/commit/d531206)) +* **rimraf:** fix rimraf.sync is not a function issue ([d2ecba3](https://github.com/npm/npx/commit/d2ecba3)) +* **windows:** get npx working well on Windows again (#69) ([6cfb8de](https://github.com/npm/npx/commit/6cfb8de)), closes [#60](https://github.com/zkat/npx/issues/60) [#58](https://github.com/zkat/npx/issues/58) [#62](https://github.com/zkat/npx/issues/62) ### Features -* **i18n:** add Czech translation (#76) ([8a0b3f6](https://github.com/zkat/npx/commit/8a0b3f6)) -* **i18n:** Add Turkish translation (#73) ([26e5edf](https://github.com/zkat/npx/commit/26e5edf)) -* **i18n:** Added support for Italian language (#71) ([6883e75](https://github.com/zkat/npx/commit/6883e75)) -* **i18n:** Fix Romanian translation (#70) ([fd6bbcf](https://github.com/zkat/npx/commit/fd6bbcf)), closes [#70](https://github.com/zkat/npx/issues/70) -* **node:** add --node-arg support to pass flags to node for script binaries (#77) ([65665bd](https://github.com/zkat/npx/commit/65665bd)) +* **i18n:** add Czech translation (#76) ([8a0b3f6](https://github.com/npm/npx/commit/8a0b3f6)) +* **i18n:** Add Turkish translation (#73) ([26e5edf](https://github.com/npm/npx/commit/26e5edf)) +* **i18n:** Added support for Italian language (#71) ([6883e75](https://github.com/npm/npx/commit/6883e75)) +* **i18n:** Fix Romanian translation (#70) ([fd6bbcf](https://github.com/npm/npx/commit/fd6bbcf)), closes [#70](https://github.com/zkat/npx/issues/70) +* **node:** add --node-arg support to pass flags to node for script binaries (#77) ([65665bd](https://github.com/npm/npx/commit/65665bd)) -## [9.0.7](https://github.com/zkat/npx/compare/v9.0.6...v9.0.7) (2017-07-11) +## [9.0.7](https://github.com/npm/npx/compare/v9.0.6...v9.0.7) (2017-07-11) ### Bug Fixes -* **i18n:** Fix some Catalan translations (#59) ([11c8a19](https://github.com/zkat/npx/commit/11c8a19)), closes [#59](https://github.com/zkat/npx/issues/59) +* **i18n:** Fix some Catalan translations (#59) ([11c8a19](https://github.com/npm/npx/commit/11c8a19)), closes [#59](https://github.com/zkat/npx/issues/59) -## [9.0.6](https://github.com/zkat/npx/compare/v9.0.5...v9.0.6) (2017-07-11) +## [9.0.6](https://github.com/npm/npx/compare/v9.0.5...v9.0.6) (2017-07-11) ### Bug Fixes -* **auto-fallback:** fix syntax error in bash/zsh auto-fallback ([d8b19db](https://github.com/zkat/npx/commit/d8b19db)) +* **auto-fallback:** fix syntax error in bash/zsh auto-fallback ([d8b19db](https://github.com/npm/npx/commit/d8b19db)) -## [9.0.5](https://github.com/zkat/npx/compare/v9.0.4...v9.0.5) (2017-07-11) +## [9.0.5](https://github.com/npm/npx/compare/v9.0.4...v9.0.5) (2017-07-11) ### Bug Fixes -* **npx:** something went wrong with the 9.0.4 build and bundledeps ([75fc436](https://github.com/zkat/npx/commit/75fc436)) +* **npx:** something went wrong with the 9.0.4 build and bundledeps ([75fc436](https://github.com/npm/npx/commit/75fc436)) -## [9.0.4](https://github.com/zkat/npx/compare/v9.0.3...v9.0.4) (2017-07-11) +## [9.0.4](https://github.com/npm/npx/compare/v9.0.3...v9.0.4) (2017-07-11) ### Bug Fixes -* **auto-fallback:** prevent infinite loop if npx disappears ([6c24e58](https://github.com/zkat/npx/commit/6c24e58)) -* **bin:** add repository and more detailed author info ([906574e](https://github.com/zkat/npx/commit/906574e)) -* **bin:** pin the npx bin's dependencies ([ae62f7a](https://github.com/zkat/npx/commit/ae62f7a)) -* **build:** make sure changelog and license are copied to bin ([4fbb599](https://github.com/zkat/npx/commit/4fbb599)) -* **deps:** stop bundling deps in libnpx itself ([c3e56e9](https://github.com/zkat/npx/commit/c3e56e9)) -* **errors:** print command not found for packages without valid binaries ([9b24359](https://github.com/zkat/npx/commit/9b24359)) -* **help:** --no-install help text was contradicting itself ([9d96f5e](https://github.com/zkat/npx/commit/9d96f5e)) -* **install:** prevent concurrent npx runs from clobbering each other ([6b35c91](https://github.com/zkat/npx/commit/6b35c91)) -* **npx:** npx npx npx npx npx npx npx npx npx works again ([875d4cd](https://github.com/zkat/npx/commit/875d4cd)) -* **updater:** dependency injection for update-notifier target ([c3027a9](https://github.com/zkat/npx/commit/c3027a9)) -* **updater:** ignore some kinds of update-notifier errors ([7631bbe](https://github.com/zkat/npx/commit/7631bbe)) +* **auto-fallback:** prevent infinite loop if npx disappears ([6c24e58](https://github.com/npm/npx/commit/6c24e58)) +* **bin:** add repository and more detailed author info ([906574e](https://github.com/npm/npx/commit/906574e)) +* **bin:** pin the npx bin's dependencies ([ae62f7a](https://github.com/npm/npx/commit/ae62f7a)) +* **build:** make sure changelog and license are copied to bin ([4fbb599](https://github.com/npm/npx/commit/4fbb599)) +* **deps:** stop bundling deps in libnpx itself ([c3e56e9](https://github.com/npm/npx/commit/c3e56e9)) +* **errors:** print command not found for packages without valid binaries ([9b24359](https://github.com/npm/npx/commit/9b24359)) +* **help:** --no-install help text was contradicting itself ([9d96f5e](https://github.com/npm/npx/commit/9d96f5e)) +* **install:** prevent concurrent npx runs from clobbering each other ([6b35c91](https://github.com/npm/npx/commit/6b35c91)) +* **npx:** npx npx npx npx npx npx npx npx npx works again ([875d4cd](https://github.com/npm/npx/commit/875d4cd)) +* **updater:** dependency injection for update-notifier target ([c3027a9](https://github.com/npm/npx/commit/c3027a9)) +* **updater:** ignore some kinds of update-notifier errors ([7631bbe](https://github.com/npm/npx/commit/7631bbe)) -## [9.0.3](https://github.com/zkat/npx/compare/v9.0.2...v9.0.3) (2017-07-08) +## [9.0.3](https://github.com/npm/npx/compare/v9.0.2...v9.0.3) (2017-07-08) ### Bug Fixes -* **version:** hand version to yargs directly ([e0b5eeb](https://github.com/zkat/npx/commit/e0b5eeb)) +* **version:** hand version to yargs directly ([e0b5eeb](https://github.com/npm/npx/commit/e0b5eeb)) -## [9.0.2](https://github.com/zkat/npx/compare/v9.0.1...v9.0.2) (2017-07-08) +## [9.0.2](https://github.com/npm/npx/compare/v9.0.1...v9.0.2) (2017-07-08) ### Bug Fixes -* **manpage:** fix manpage for real because files syntax is weird ([9145e2a](https://github.com/zkat/npx/commit/9145e2a)) +* **manpage:** fix manpage for real because files syntax is weird ([9145e2a](https://github.com/npm/npx/commit/9145e2a)) -## [9.0.1](https://github.com/zkat/npx/compare/v9.0.0...v9.0.1) (2017-07-08) +## [9.0.1](https://github.com/npm/npx/compare/v9.0.0...v9.0.1) (2017-07-08) ### Bug Fixes -* **man:** make sure manpage is used in npx bin ([704b94f](https://github.com/zkat/npx/commit/704b94f)) +* **man:** make sure manpage is used in npx bin ([704b94f](https://github.com/npm/npx/commit/704b94f)) -# [9.0.0](https://github.com/zkat/npx/compare/v8.1.1...v9.0.0) (2017-07-08) +# [9.0.0](https://github.com/npm/npx/compare/v8.1.1...v9.0.0) (2017-07-08) ### Features -* **libnpx:** libify main npx codebase ([643f58e](https://github.com/zkat/npx/commit/643f58e)) -* **npx:** create a new binary for standalone publishing ([da5a3b7](https://github.com/zkat/npx/commit/da5a3b7)) +* **libnpx:** libify main npx codebase ([643f58e](https://github.com/npm/npx/commit/643f58e)) +* **npx:** create a new binary for standalone publishing ([da5a3b7](https://github.com/npm/npx/commit/da5a3b7)) ### BREAKING CHANGES @@ -342,49 +354,49 @@ and a separate project will take over the role of the main `npx` binary. -## [8.1.1](https://github.com/zkat/npx/compare/v8.1.0...v8.1.1) (2017-07-06) +## [8.1.1](https://github.com/npm/npx/compare/v8.1.0...v8.1.1) (2017-07-06) ### Bug Fixes -* **deps:** bump all deps ([6ea24bf](https://github.com/zkat/npx/commit/6ea24bf)) -* **npm:** bump npm to 5.1.0 for a bunch of fixes ([18e4587](https://github.com/zkat/npx/commit/18e4587)) +* **deps:** bump all deps ([6ea24bf](https://github.com/npm/npx/commit/6ea24bf)) +* **npm:** bump npm to 5.1.0 for a bunch of fixes ([18e4587](https://github.com/npm/npx/commit/18e4587)) -# [8.1.0](https://github.com/zkat/npx/compare/v8.0.1...v8.1.0) (2017-06-27) +# [8.1.0](https://github.com/npm/npx/compare/v8.0.1...v8.1.0) (2017-06-27) ### Bug Fixes -* **i18n:** minor tweaks to ja.json (#46) ([1ed63c2](https://github.com/zkat/npx/commit/1ed63c2)) +* **i18n:** minor tweaks to ja.json (#46) ([1ed63c2](https://github.com/npm/npx/commit/1ed63c2)) ### Features -* **i18n:** Update pt_BR.json (#51) ([d292f22](https://github.com/zkat/npx/commit/d292f22)) +* **i18n:** Update pt_BR.json (#51) ([d292f22](https://github.com/npm/npx/commit/d292f22)) -## [8.0.1](https://github.com/zkat/npx/compare/v8.0.0...v8.0.1) (2017-06-27) +## [8.0.1](https://github.com/npm/npx/compare/v8.0.0...v8.0.1) (2017-06-27) ### Bug Fixes -* **npm:** bump npm version for more bugfixes ([30711a8](https://github.com/zkat/npx/commit/30711a8)) -* **npm:** Use --parseable option to work around output quirks ([8cb75a2](https://github.com/zkat/npx/commit/8cb75a2)) +* **npm:** bump npm version for more bugfixes ([30711a8](https://github.com/npm/npx/commit/30711a8)) +* **npm:** Use --parseable option to work around output quirks ([8cb75a2](https://github.com/npm/npx/commit/8cb75a2)) -# [8.0.0](https://github.com/zkat/npx/compare/v7.0.0...v8.0.0) (2017-06-24) +# [8.0.0](https://github.com/npm/npx/compare/v7.0.0...v8.0.0) (2017-06-24) ### Features -* **exec:** auto-guess binaries when different from pkg name ([139c434](https://github.com/zkat/npx/commit/139c434)) +* **exec:** auto-guess binaries when different from pkg name ([139c434](https://github.com/npm/npx/commit/139c434)) ### BREAKING CHANGES @@ -398,17 +410,17 @@ multiple non-matching binaries, but that should be rare. -# [7.0.0](https://github.com/zkat/npx/compare/v6.2.0...v7.0.0) (2017-06-24) +# [7.0.0](https://github.com/npm/npx/compare/v6.2.0...v7.0.0) (2017-06-24) ### Bug Fixes -* **win32:** improve win32 situation a bit (#50) ([b7ad934](https://github.com/zkat/npx/commit/b7ad934)) +* **win32:** improve win32 situation a bit (#50) ([b7ad934](https://github.com/npm/npx/commit/b7ad934)) ### Features -* **local:** improve the behavior when calling ./local paths (#48) ([2e418d1](https://github.com/zkat/npx/commit/2e418d1)) +* **local:** improve the behavior when calling ./local paths (#48) ([2e418d1](https://github.com/npm/npx/commit/2e418d1)) ### BREAKING CHANGES @@ -422,71 +434,71 @@ of all the various cases and how each of them is handled. -# [6.2.0](https://github.com/zkat/npx/compare/v6.1.0...v6.2.0) (2017-06-23) +# [6.2.0](https://github.com/npm/npx/compare/v6.1.0...v6.2.0) (2017-06-23) ### Bug Fixes -* **child:** iron out a few crinkles and add tests ([b3b5ef6](https://github.com/zkat/npx/commit/b3b5ef6)) -* **execCmd:** only reuse the current process if no shell passed in ([e413cff](https://github.com/zkat/npx/commit/e413cff)) -* **execCmd:** use the module built-in directly ([6f741c2](https://github.com/zkat/npx/commit/6f741c2)) -* **help:** fuck it. just hard-code it ([d5d5085](https://github.com/zkat/npx/commit/d5d5085)) -* **main:** only exec if this is the main module ([9631e2a](https://github.com/zkat/npx/commit/9631e2a)) +* **child:** iron out a few crinkles and add tests ([b3b5ef6](https://github.com/npm/npx/commit/b3b5ef6)) +* **execCmd:** only reuse the current process if no shell passed in ([e413cff](https://github.com/npm/npx/commit/e413cff)) +* **execCmd:** use the module built-in directly ([6f741c2](https://github.com/npm/npx/commit/6f741c2)) +* **help:** fuck it. just hard-code it ([d5d5085](https://github.com/npm/npx/commit/d5d5085)) +* **main:** only exec if this is the main module ([9631e2a](https://github.com/npm/npx/commit/9631e2a)) ### Features -* **i18n:** Update fr.json (#44) ([ea47c4f](https://github.com/zkat/npx/commit/ea47c4f)) -* **i18n:** update the Romanian translation. (#42) ([2ed36b6](https://github.com/zkat/npx/commit/2ed36b6)) +* **i18n:** Update fr.json (#44) ([ea47c4f](https://github.com/npm/npx/commit/ea47c4f)) +* **i18n:** update the Romanian translation. (#42) ([2ed36b6](https://github.com/npm/npx/commit/2ed36b6)) -# [6.1.0](https://github.com/zkat/npx/compare/v6.0.0...v6.1.0) (2017-06-21) +# [6.1.0](https://github.com/npm/npx/compare/v6.0.0...v6.1.0) (2017-06-21) ### Bug Fixes -* **deps:** remove unused gauge dep ([aa40a34](https://github.com/zkat/npx/commit/aa40a34)) +* **deps:** remove unused gauge dep ([aa40a34](https://github.com/npm/npx/commit/aa40a34)) ### Features -* **i18n:** update ru locale (#41) ([7c84dee](https://github.com/zkat/npx/commit/7c84dee)) -* **i18n:** update zh_CN (#40) ([da4ec67](https://github.com/zkat/npx/commit/da4ec67)) -* **perf:** run node-based commands in the current process ([6efcde4](https://github.com/zkat/npx/commit/6efcde4)) +* **i18n:** update ru locale (#41) ([7c84dee](https://github.com/npm/npx/commit/7c84dee)) +* **i18n:** update zh_CN (#40) ([da4ec67](https://github.com/npm/npx/commit/da4ec67)) +* **perf:** run node-based commands in the current process ([6efcde4](https://github.com/npm/npx/commit/6efcde4)) -# [6.0.0](https://github.com/zkat/npx/compare/v5.4.0...v6.0.0) (2017-06-20) +# [6.0.0](https://github.com/npm/npx/compare/v5.4.0...v6.0.0) (2017-06-20) ### Bug Fixes -* **call:** stop parsing -c for commands + fix corner cases ([bd4e538](https://github.com/zkat/npx/commit/bd4e538)) -* **child:** exec does not have the information needed to correctly escape its args ([6714992](https://github.com/zkat/npx/commit/6714992)) -* **guessCmdName:** tests failed because of lazy npa ([53a0119](https://github.com/zkat/npx/commit/53a0119)) -* **i18n:** gender inclusiveness fix for french version (#37) ([04920ae](https://github.com/zkat/npx/commit/04920ae)), closes [#37](https://github.com/zkat/npx/issues/37) -* **i18n:** typo 😇 (#38) ([ede4a53](https://github.com/zkat/npx/commit/ede4a53)) -* **install:** handle JSON parsing failures ([bec2887](https://github.com/zkat/npx/commit/bec2887)) -* **output:** stop printing out Command Failed messages ([873cffe](https://github.com/zkat/npx/commit/873cffe)) -* **parseArgs:** fix booboo in fast path ([d1e5487](https://github.com/zkat/npx/commit/d1e5487)) -* **perf:** fast-path `npx foo` arg parsing ([ba4fe71](https://github.com/zkat/npx/commit/ba4fe71)) -* **perf:** remove bluebird and defer some requires for SPEED ([00fc313](https://github.com/zkat/npx/commit/00fc313)) +* **call:** stop parsing -c for commands + fix corner cases ([bd4e538](https://github.com/npm/npx/commit/bd4e538)) +* **child:** exec does not have the information needed to correctly escape its args ([6714992](https://github.com/npm/npx/commit/6714992)) +* **guessCmdName:** tests failed because of lazy npa ([53a0119](https://github.com/npm/npx/commit/53a0119)) +* **i18n:** gender inclusiveness fix for french version (#37) ([04920ae](https://github.com/npm/npx/commit/04920ae)), closes [#37](https://github.com/zkat/npx/issues/37) +* **i18n:** typo 😇 (#38) ([ede4a53](https://github.com/npm/npx/commit/ede4a53)) +* **install:** handle JSON parsing failures ([bec2887](https://github.com/npm/npx/commit/bec2887)) +* **output:** stop printing out Command Failed messages ([873cffe](https://github.com/npm/npx/commit/873cffe)) +* **parseArgs:** fix booboo in fast path ([d1e5487](https://github.com/npm/npx/commit/d1e5487)) +* **perf:** fast-path `npx foo` arg parsing ([ba4fe71](https://github.com/npm/npx/commit/ba4fe71)) +* **perf:** remove bluebird and defer some requires for SPEED ([00fc313](https://github.com/npm/npx/commit/00fc313)) ### Features -* **i18n:** add Romanian translations. (#34) ([9e98bd0](https://github.com/zkat/npx/commit/9e98bd0)) -* **i18n:** added a few more localizable strings ([779d950](https://github.com/zkat/npx/commit/779d950)) -* **i18n:** updated ca.json ([af7a035](https://github.com/zkat/npx/commit/af7a035)) -* **i18n:** updated es.json ([414644f](https://github.com/zkat/npx/commit/414644f)) -* **i18n:** updated ja.json ([448b082](https://github.com/zkat/npx/commit/448b082)) -* **i18n:** Ze German Translation (#35) ([6f003f5](https://github.com/zkat/npx/commit/6f003f5)) -* **package:** report number of temp packages installed ([5b7fe8d](https://github.com/zkat/npx/commit/5b7fe8d)) -* **perf:** only launch update-notifier when npx installs stuff ([549d413](https://github.com/zkat/npx/commit/549d413)) -* **quiet:** added -q/--quiet to suppress output from npx itself ([16607d9](https://github.com/zkat/npx/commit/16607d9)) +* **i18n:** add Romanian translations. (#34) ([9e98bd0](https://github.com/npm/npx/commit/9e98bd0)) +* **i18n:** added a few more localizable strings ([779d950](https://github.com/npm/npx/commit/779d950)) +* **i18n:** updated ca.json ([af7a035](https://github.com/npm/npx/commit/af7a035)) +* **i18n:** updated es.json ([414644f](https://github.com/npm/npx/commit/414644f)) +* **i18n:** updated ja.json ([448b082](https://github.com/npm/npx/commit/448b082)) +* **i18n:** Ze German Translation (#35) ([6f003f5](https://github.com/npm/npx/commit/6f003f5)) +* **package:** report number of temp packages installed ([5b7fe8d](https://github.com/npm/npx/commit/5b7fe8d)) +* **perf:** only launch update-notifier when npx installs stuff ([549d413](https://github.com/npm/npx/commit/549d413)) +* **quiet:** added -q/--quiet to suppress output from npx itself ([16607d9](https://github.com/npm/npx/commit/16607d9)) ### BREAKING CHANGES @@ -496,124 +508,124 @@ of all the various cases and how each of them is handled. -# [5.4.0](https://github.com/zkat/npx/compare/v5.3.0...v5.4.0) (2017-06-17) +# [5.4.0](https://github.com/npm/npx/compare/v5.3.0...v5.4.0) (2017-06-17) ### Bug Fixes -* **i18n:** some corrections for es.json ([4d50b71](https://github.com/zkat/npx/commit/4d50b71)) -* **i18n:** update locale files with bugfixes ([77caf82](https://github.com/zkat/npx/commit/77caf82)) -* **i18n:** Y utility was ignoring falsy entries ([f22a4d0](https://github.com/zkat/npx/commit/f22a4d0)) -* **i18n:** してください -> します ([01671af](https://github.com/zkat/npx/commit/01671af)) +* **i18n:** some corrections for es.json ([4d50b71](https://github.com/npm/npx/commit/4d50b71)) +* **i18n:** update locale files with bugfixes ([77caf82](https://github.com/npm/npx/commit/77caf82)) +* **i18n:** Y utility was ignoring falsy entries ([f22a4d0](https://github.com/npm/npx/commit/f22a4d0)) +* **i18n:** してください -> します ([01671af](https://github.com/npm/npx/commit/01671af)) ### Features -* **i18n:** add catalan translation ([579efa1](https://github.com/zkat/npx/commit/579efa1)) -* **i18n:** add pt-br translation (#33) ([6142551](https://github.com/zkat/npx/commit/6142551)) -* **i18n:** added largely machine-translated ja.json ([827705f](https://github.com/zkat/npx/commit/827705f)) -* **i18n:** adds russian translation (#32) ([b2619c1](https://github.com/zkat/npx/commit/b2619c1)) +* **i18n:** add catalan translation ([579efa1](https://github.com/npm/npx/commit/579efa1)) +* **i18n:** add pt-br translation (#33) ([6142551](https://github.com/npm/npx/commit/6142551)) +* **i18n:** added largely machine-translated ja.json ([827705f](https://github.com/npm/npx/commit/827705f)) +* **i18n:** adds russian translation (#32) ([b2619c1](https://github.com/npm/npx/commit/b2619c1)) -# [5.3.0](https://github.com/zkat/npx/compare/v5.2.0...v5.3.0) (2017-06-13) +# [5.3.0](https://github.com/npm/npx/compare/v5.2.0...v5.3.0) (2017-06-13) ### Features -* **i18n:** add Chinese translation (#31) ([24e1b31](https://github.com/zkat/npx/commit/24e1b31)) +* **i18n:** add Chinese translation (#31) ([24e1b31](https://github.com/npm/npx/commit/24e1b31)) -# [5.2.0](https://github.com/zkat/npx/compare/v5.1.3...v5.2.0) (2017-06-12) +# [5.2.0](https://github.com/npm/npx/compare/v5.1.3...v5.2.0) (2017-06-12) ### Bug Fixes -* **i18n:** removing extra spacing in fr.json ([002e2b8](https://github.com/zkat/npx/commit/002e2b8)) +* **i18n:** removing extra spacing in fr.json ([002e2b8](https://github.com/npm/npx/commit/002e2b8)) ### Features -* **i18n:** add french locale (#29) ([662395b](https://github.com/zkat/npx/commit/662395b)) +* **i18n:** add french locale (#29) ([662395b](https://github.com/npm/npx/commit/662395b)) -## [5.1.3](https://github.com/zkat/npx/compare/v5.1.2...v5.1.3) (2017-06-12) +## [5.1.3](https://github.com/npm/npx/compare/v5.1.2...v5.1.3) (2017-06-12) ### Bug Fixes -* **fallback:** put the Y in the wrong place lol ([d6bf8aa](https://github.com/zkat/npx/commit/d6bf8aa)) +* **fallback:** put the Y in the wrong place lol ([d6bf8aa](https://github.com/npm/npx/commit/d6bf8aa)) -## [5.1.2](https://github.com/zkat/npx/compare/v5.1.1...v5.1.2) (2017-06-10) +## [5.1.2](https://github.com/npm/npx/compare/v5.1.1...v5.1.2) (2017-06-10) -## [5.1.1](https://github.com/zkat/npx/compare/v5.1.0...v5.1.1) (2017-06-10) +## [5.1.1](https://github.com/npm/npx/compare/v5.1.0...v5.1.1) (2017-06-10) ### Bug Fixes -* **i18n:** forgot to add locales to files ([4118d6a](https://github.com/zkat/npx/commit/4118d6a)) +* **i18n:** forgot to add locales to files ([4118d6a](https://github.com/npm/npx/commit/4118d6a)) -# [5.1.0](https://github.com/zkat/npx/compare/v5.0.3...v5.1.0) (2017-06-10) +# [5.1.0](https://github.com/npm/npx/compare/v5.0.3...v5.1.0) (2017-06-10) ### Bug Fixes -* **exit:** let process exit normally to finish writes ([c50a398](https://github.com/zkat/npx/commit/c50a398)) +* **exit:** let process exit normally to finish writes ([c50a398](https://github.com/npm/npx/commit/c50a398)) ### Features -* **i18n:** added es.json ([6cf58b9](https://github.com/zkat/npx/commit/6cf58b9)) -* **i18n:** set up i18n plus baseline en.json locale ([b67bb3a](https://github.com/zkat/npx/commit/b67bb3a)) +* **i18n:** added es.json ([6cf58b9](https://github.com/npm/npx/commit/6cf58b9)) +* **i18n:** set up i18n plus baseline en.json locale ([b67bb3a](https://github.com/npm/npx/commit/b67bb3a)) -## [5.0.3](https://github.com/zkat/npx/compare/v5.0.2...v5.0.3) (2017-06-09) +## [5.0.3](https://github.com/npm/npx/compare/v5.0.2...v5.0.3) (2017-06-09) ### Bug Fixes -* **fallback:** exec is no ([42c1d30](https://github.com/zkat/npx/commit/42c1d30)) +* **fallback:** exec is no ([42c1d30](https://github.com/npm/npx/commit/42c1d30)) -## [5.0.2](https://github.com/zkat/npx/compare/v5.0.1...v5.0.2) (2017-06-09) +## [5.0.2](https://github.com/npm/npx/compare/v5.0.1...v5.0.2) (2017-06-09) ### Bug Fixes -* **fallback:** allow fallback to local anyway ([569cf2c](https://github.com/zkat/npx/commit/569cf2c)) +* **fallback:** allow fallback to local anyway ([569cf2c](https://github.com/npm/npx/commit/569cf2c)) -## [5.0.1](https://github.com/zkat/npx/compare/v5.0.0...v5.0.1) (2017-06-09) +## [5.0.1](https://github.com/npm/npx/compare/v5.0.0...v5.0.1) (2017-06-09) -# [5.0.0](https://github.com/zkat/npx/compare/v4.0.3...v5.0.0) (2017-06-09) +# [5.0.0](https://github.com/npm/npx/compare/v4.0.3...v5.0.0) (2017-06-09) ### Features -* **fallback:** by default, only fall back if you have an @ in the name ([bea08a0](https://github.com/zkat/npx/commit/bea08a0)) +* **fallback:** by default, only fall back if you have an @ in the name ([bea08a0](https://github.com/npm/npx/commit/bea08a0)) ### BREAKING CHANGES @@ -624,45 +636,45 @@ an @ sign in the command. -## [4.0.3](https://github.com/zkat/npx/compare/v4.0.2...v4.0.3) (2017-06-04) +## [4.0.3](https://github.com/npm/npx/compare/v4.0.2...v4.0.3) (2017-06-04) ### Bug Fixes -* **npm:** use --userconfig when querying for npm cache config (#28) ([21bc3bf](https://github.com/zkat/npx/commit/21bc3bf)) +* **npm:** use --userconfig when querying for npm cache config (#28) ([21bc3bf](https://github.com/npm/npx/commit/21bc3bf)) -## [4.0.2](https://github.com/zkat/npx/compare/v4.0.1...v4.0.2) (2017-06-04) +## [4.0.2](https://github.com/npm/npx/compare/v4.0.1...v4.0.2) (2017-06-04) ### Bug Fixes -* **install:** get windows workin (#27) ([9472175](https://github.com/zkat/npx/commit/9472175)) +* **install:** get windows workin (#27) ([9472175](https://github.com/npm/npx/commit/9472175)) -## [4.0.1](https://github.com/zkat/npx/compare/v4.0.0...v4.0.1) (2017-06-04) +## [4.0.1](https://github.com/npm/npx/compare/v4.0.0...v4.0.1) (2017-06-04) ### Bug Fixes -* **cmd:** make sure to use our own, enriched path ([9c89c2a](https://github.com/zkat/npx/commit/9c89c2a)) -* **error:** join args with a space on Command failed error ([c2f6f18](https://github.com/zkat/npx/commit/c2f6f18)) +* **cmd:** make sure to use our own, enriched path ([9c89c2a](https://github.com/npm/npx/commit/9c89c2a)) +* **error:** join args with a space on Command failed error ([c2f6f18](https://github.com/npm/npx/commit/c2f6f18)) -# [4.0.0](https://github.com/zkat/npx/compare/v3.0.0...v4.0.0) (2017-06-03) +# [4.0.0](https://github.com/npm/npx/compare/v3.0.0...v4.0.0) (2017-06-03) ### Features -* **call:** -c now loads same env as run-script ([76ae44c](https://github.com/zkat/npx/commit/76ae44c)) -* **npm:** allow configuration of npm binary ([e5d5634](https://github.com/zkat/npx/commit/e5d5634)) -* **npm:** embed npm binary ([a2cae9d](https://github.com/zkat/npx/commit/a2cae9d)) +* **call:** -c now loads same env as run-script ([76ae44c](https://github.com/npm/npx/commit/76ae44c)) +* **npm:** allow configuration of npm binary ([e5d5634](https://github.com/npm/npx/commit/e5d5634)) +* **npm:** embed npm binary ([a2cae9d](https://github.com/npm/npx/commit/a2cae9d)) ### BREAKING CHANGES @@ -674,29 +686,29 @@ variables added to them that were not there before. -# [3.0.0](https://github.com/zkat/npx/compare/v2.1.0...v3.0.0) (2017-06-03) +# [3.0.0](https://github.com/npm/npx/compare/v2.1.0...v3.0.0) (2017-06-03) ### Bug Fixes -* **args:** accept argv as arg and fix minor bugs ([46f10fe](https://github.com/zkat/npx/commit/46f10fe)) -* **deps:** explicitly add mkdirp and rimraf to devDeps ([832c75d](https://github.com/zkat/npx/commit/832c75d)) -* **docs:** misc tweaks to docs ([ed70a7b](https://github.com/zkat/npx/commit/ed70a7b)) -* **exec:** escape binaries and args to cp.exec (#18) ([55d6a11](https://github.com/zkat/npx/commit/55d6a11)) -* **fallback:** shells were sometimes ignored based on $SHELL ([07b7efc](https://github.com/zkat/npx/commit/07b7efc)) -* **get-prefix:** nudge isRootPath ([1ab31eb](https://github.com/zkat/npx/commit/1ab31eb)) -* **help:** correctly enable -h and --help ([adc2f45](https://github.com/zkat/npx/commit/adc2f45)) -* **startup:** delay loading some things to speed up startup ([6b32bf5](https://github.com/zkat/npx/commit/6b32bf5)) +* **args:** accept argv as arg and fix minor bugs ([46f10fe](https://github.com/npm/npx/commit/46f10fe)) +* **deps:** explicitly add mkdirp and rimraf to devDeps ([832c75d](https://github.com/npm/npx/commit/832c75d)) +* **docs:** misc tweaks to docs ([ed70a7b](https://github.com/npm/npx/commit/ed70a7b)) +* **exec:** escape binaries and args to cp.exec (#18) ([55d6a11](https://github.com/npm/npx/commit/55d6a11)) +* **fallback:** shells were sometimes ignored based on $SHELL ([07b7efc](https://github.com/npm/npx/commit/07b7efc)) +* **get-prefix:** nudge isRootPath ([1ab31eb](https://github.com/npm/npx/commit/1ab31eb)) +* **help:** correctly enable -h and --help ([adc2f45](https://github.com/npm/npx/commit/adc2f45)) +* **startup:** delay loading some things to speed up startup ([6b32bf5](https://github.com/npm/npx/commit/6b32bf5)) ### Features -* **cmd:** do some heuristic guesswork on default command names (#23) ([2404420](https://github.com/zkat/npx/commit/2404420)) -* **ignore:** add --ignore-existing option (#20) ([0866a83](https://github.com/zkat/npx/commit/0866a83)) -* **install:** added --no-install option to prevent install fallbacks ([a5fbdaf](https://github.com/zkat/npx/commit/a5fbdaf)) -* **package:** multiple --package options are now accepted ([f2fa6b3](https://github.com/zkat/npx/commit/f2fa6b3)) -* **save:** remove all save-related functionality (#19) ([ab77f6c](https://github.com/zkat/npx/commit/ab77f6c)) -* **shell:** run -c strings inside a system shell (#22) ([17db461](https://github.com/zkat/npx/commit/17db461)) +* **cmd:** do some heuristic guesswork on default command names (#23) ([2404420](https://github.com/npm/npx/commit/2404420)) +* **ignore:** add --ignore-existing option (#20) ([0866a83](https://github.com/npm/npx/commit/0866a83)) +* **install:** added --no-install option to prevent install fallbacks ([a5fbdaf](https://github.com/npm/npx/commit/a5fbdaf)) +* **package:** multiple --package options are now accepted ([f2fa6b3](https://github.com/npm/npx/commit/f2fa6b3)) +* **save:** remove all save-related functionality (#19) ([ab77f6c](https://github.com/npm/npx/commit/ab77f6c)) +* **shell:** run -c strings inside a system shell (#22) ([17db461](https://github.com/npm/npx/commit/17db461)) ### BREAKING CHANGES @@ -706,37 +718,37 @@ variables added to them that were not there before. -# [2.1.0](https://github.com/zkat/npx/compare/v2.0.1...v2.1.0) (2017-06-01) +# [2.1.0](https://github.com/npm/npx/compare/v2.0.1...v2.1.0) (2017-06-01) ### Features -* **opts:** add --shell-auto-fallback (#7) ([ac9cb40](https://github.com/zkat/npx/commit/ac9cb40)) +* **opts:** add --shell-auto-fallback (#7) ([ac9cb40](https://github.com/npm/npx/commit/ac9cb40)) -## [2.0.1](https://github.com/zkat/npx/compare/v2.0.0...v2.0.1) (2017-05-31) +## [2.0.1](https://github.com/npm/npx/compare/v2.0.0...v2.0.1) (2017-05-31) ### Bug Fixes -* **exec:** use command lookup joined with current PATH ([d9175e8](https://github.com/zkat/npx/commit/d9175e8)) +* **exec:** use command lookup joined with current PATH ([d9175e8](https://github.com/npm/npx/commit/d9175e8)) -# [2.0.0](https://github.com/zkat/npx/compare/v1.1.1...v2.0.0) (2017-05-31) +# [2.0.0](https://github.com/npm/npx/compare/v1.1.1...v2.0.0) (2017-05-31) ### Bug Fixes -* **npm:** manually look up npm path for Windows compat ([0fe8fbf](https://github.com/zkat/npx/commit/0fe8fbf)) +* **npm:** manually look up npm path for Windows compat ([0fe8fbf](https://github.com/npm/npx/commit/0fe8fbf)) ### Features -* **commands:** -p and [@version](https://github.com/version) now trigger installs ([9668c83](https://github.com/zkat/npx/commit/9668c83)) +* **commands:** -p and [@version](https://github.com/version) now trigger installs ([9668c83](https://github.com/npm/npx/commit/9668c83)) ### BREAKING CHANGES @@ -746,51 +758,51 @@ variables added to them that were not there before. -## [1.1.1](https://github.com/zkat/npx/compare/v1.1.0...v1.1.1) (2017-05-30) +## [1.1.1](https://github.com/npm/npx/compare/v1.1.0...v1.1.1) (2017-05-30) ### Bug Fixes -* **docs:** make sure man page gets installed ([2aadc16](https://github.com/zkat/npx/commit/2aadc16)) +* **docs:** make sure man page gets installed ([2aadc16](https://github.com/npm/npx/commit/2aadc16)) -# [1.1.0](https://github.com/zkat/npx/compare/v1.0.2...v1.1.0) (2017-05-30) +# [1.1.0](https://github.com/npm/npx/compare/v1.0.2...v1.1.0) (2017-05-30) ### Bug Fixes -* **help:** update usage string for help ([0747cff](https://github.com/zkat/npx/commit/0747cff)) -* **main:** exit if no package was parsed ([cdb579d](https://github.com/zkat/npx/commit/cdb579d)) -* **opts:** allow -- to prevent further parsing ([db7a0e4](https://github.com/zkat/npx/commit/db7a0e4)) +* **help:** update usage string for help ([0747cff](https://github.com/npm/npx/commit/0747cff)) +* **main:** exit if no package was parsed ([cdb579d](https://github.com/npm/npx/commit/cdb579d)) +* **opts:** allow -- to prevent further parsing ([db7a0e4](https://github.com/npm/npx/commit/db7a0e4)) ### Features -* **updates:** added update-notifier ([8dc91d4](https://github.com/zkat/npx/commit/8dc91d4)) +* **updates:** added update-notifier ([8dc91d4](https://github.com/npm/npx/commit/8dc91d4)) -## [1.0.2](https://github.com/zkat/npx/compare/v1.0.1...v1.0.2) (2017-05-30) +## [1.0.2](https://github.com/npm/npx/compare/v1.0.1...v1.0.2) (2017-05-30) ### Bug Fixes -* **pkg:** bundle deps to guarantee global install precision ([3e21217](https://github.com/zkat/npx/commit/3e21217)) +* **pkg:** bundle deps to guarantee global install precision ([3e21217](https://github.com/npm/npx/commit/3e21217)) -## [1.0.1](https://github.com/zkat/npx/compare/v1.0.0...v1.0.1) (2017-05-30) +## [1.0.1](https://github.com/npm/npx/compare/v1.0.0...v1.0.1) (2017-05-30) ### Bug Fixes -* **build:** add dummy test file to let things build ([6199eb6](https://github.com/zkat/npx/commit/6199eb6)) -* **docs:** fix arg documentation in readme/manpage ([d1cf44c](https://github.com/zkat/npx/commit/d1cf44c)) -* **opts:** add --version/-v ([2633a0e](https://github.com/zkat/npx/commit/2633a0e)) +* **build:** add dummy test file to let things build ([6199eb6](https://github.com/npm/npx/commit/6199eb6)) +* **docs:** fix arg documentation in readme/manpage ([d1cf44c](https://github.com/npm/npx/commit/d1cf44c)) +* **opts:** add --version/-v ([2633a0e](https://github.com/npm/npx/commit/2633a0e)) @@ -800,4 +812,4 @@ variables added to them that were not there before. ### Features -* **npx:** initial working implementation ([a83a67d](https://github.com/zkat/npx/commit/a83a67d)) +* **npx:** initial working implementation ([a83a67d](https://github.com/npm/npx/commit/a83a67d)) diff --git a/node_modules/libnpx/README.md b/node_modules/libnpx/README.md index 130f1bc3fdd02..fa7f37d878a06 100644 --- a/node_modules/libnpx/README.md +++ b/node_modules/libnpx/README.md @@ -1,4 +1,4 @@ -[![npm](https://img.shields.io/npm/v/npx.svg)](https://npm.im/npx) [![license](https://img.shields.io/npm/l/npx.svg)](https://npm.im/npx) [![Travis](https://img.shields.io/travis/zkat/npx.svg)](https://travis-ci.org/zkat/npx) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/npx?svg=true)](https://ci.appveyor.com/project/zkat/npx) [![Coverage Status](https://coveralls.io/repos/github/zkat/npx/badge.svg?branch=latest)](https://coveralls.io/github/zkat/npx?branch=latest) +[![npm](https://img.shields.io/npm/v/npx.svg)](https://npm.im/npx) [![license](https://img.shields.io/npm/l/npx.svg)](https://npm.im/npx) [![Travis](https://img.shields.io/travis/npm/npx.svg)](https://travis-ci.org/npm/npx) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/npx?svg=true)](https://ci.appveyor.com/project/npm/npx) [![Coverage Status](https://coveralls.io/repos/github/npm/npx/badge.svg?branch=latest)](https://coveralls.io/github/npm/npx?branch=latest) # npx(1) -- execute npm package binaries @@ -152,7 +152,7 @@ Written by [Kat Marchan](https://github.com/zkat). ## REPORTING BUGS -Please file any relevant issues [on Github.](https://github.com/zkat/npx) +Please file any relevant issues [on Github.](https://github.com/npm/npx) ## LICENSE diff --git a/node_modules/libnpx/libnpx.1 b/node_modules/libnpx/libnpx.1 index 4215202da8e24..aea3ffe8f4192 100644 --- a/node_modules/libnpx/libnpx.1 +++ b/node_modules/libnpx/libnpx.1 @@ -1,4 +1,4 @@ -.TH "NPX" "1" "April 2018" "libnpx@10.1.1" "User Commands" +.TH "NPX" "1" "January 2020" "libnpx@10.2.1" "User Commands" .SH "NAME" \fBnpx\fR \- execute npm package binaries .SH SYNOPSIS @@ -158,7 +158,7 @@ Huge thanks to Kwyn Meagher \fIhttps://blog\.kwyn\.io\fR for generously donating Written by Kat Marchan \fIhttps://github\.com/zkat\fR\|\. .SH REPORTING BUGS .P -Please file any relevant issues on Github\. \fIhttps://github\.com/zkat/npx\fR +Please file any relevant issues on Github\. \fIhttps://github\.com/npm/npx\fR .SH LICENSE .P This work is released by its authors into the public domain under CC0\-1\.0\. See \fBLICENSE\.md\fP for details\. diff --git a/node_modules/libnpx/package.json b/node_modules/libnpx/package.json index b9d866b96a7d0..9b9949cd3ca4f 100644 --- a/node_modules/libnpx/package.json +++ b/node_modules/libnpx/package.json @@ -1,39 +1,36 @@ { - "_args": [ - [ - "libnpx@10.2.0", - "/Users/rebecca/code/npm" - ] - ], - "_from": "libnpx@10.2.0", - "_id": "libnpx@10.2.0", + "_from": "libnpx@10.2.2", + "_id": "libnpx@10.2.2", "_inBundle": false, - "_integrity": "sha512-X28coei8/XRCt15cYStbLBph+KGhFra4VQhRBPuH/HHMkC5dxM8v24RVgUsvODKCrUZ0eTgiTqJp6zbl0sskQQ==", + "_integrity": "sha512-ujaYToga1SAX5r7FU5ShMFi88CWpY75meNZtr6RtEyv4l2ZK3+Wgvxq2IqlwWBiDZOqhumdeiocPS1aKrCMe3A==", "_location": "/libnpx", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "libnpx@10.2.0", + "raw": "libnpx@10.2.2", "name": "libnpx", "escapedName": "libnpx", - "rawSpec": "10.2.0", + "rawSpec": "10.2.2", "saveSpec": null, - "fetchSpec": "10.2.0" + "fetchSpec": "10.2.2" }, "_requiredBy": [ + "#USER", "/" ], - "_resolved": "https://registry.npmjs.org/libnpx/-/libnpx-10.2.0.tgz", - "_spec": "10.2.0", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/libnpx/-/libnpx-10.2.2.tgz", + "_shasum": "5a4171b9b92dd031463ef66a4af9f5cbd6b09572", + "_spec": "libnpx@10.2.2", + "_where": "/Users/mperrotte/npminc/cli", "author": { "name": "Kat Marchán", "email": "kzm@sykosomatic.org" }, "bugs": { - "url": "https://github.com/zkat/npx/issues" + "url": "https://github.com/npm/npx/issues" }, + "bundleDependencies": false, "config": { "nyc": { "exclude": [ @@ -52,6 +49,7 @@ "y18n": "^4.0.0", "yargs": "^11.0.0" }, + "deprecated": false, "description": "support library for npx -- an tool for executing npm-based packages.", "devDependencies": { "cross-env": "^5.1.3", @@ -76,7 +74,7 @@ "libnpx.1", "locales" ], - "homepage": "https://github.com/zkat/npx#readme", + "homepage": "https://github.com/npm/npx#readme", "keywords": [ "npm", "npm exec", @@ -93,7 +91,7 @@ "name": "libnpx", "repository": { "type": "git", - "url": "git+https://github.com/zkat/npx.git" + "url": "git+https://github.com/npm/npx.git" }, "scripts": { "bin": "make bin", @@ -107,5 +105,5 @@ "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "10.2.0" + "version": "10.2.2" } diff --git a/node_modules/lock-verify/index.js b/node_modules/lock-verify/index.js index 22721329134d7..cf673888faf01 100644 --- a/node_modules/lock-verify/index.js +++ b/node_modules/lock-verify/index.js @@ -36,9 +36,17 @@ function lockVerify(check) { if (spec.registry) { // Can't match tags to package-lock w/o network if (spec.type === 'tag') return - if (!semver.satisfies(lock.version, spec.fetchSpec)) { - errors.push("Invalid: lock file's " + name + '@' + lock.version + ' does not satisfy ' + name + '@' + spec.fetchSpec) - return + if (spec.type === 'alias') { + const lockSpec = npa.resolve(name, lock.version) + if (!semver.satisfies(lockSpec.subSpec.fetchSpec, spec.subSpec.fetchSpec)) { + errors.push("Invalid: lock file's " + name + '@' + lock.version + ' does not satisfy ' + name + '@' + spec.rawSpec) + return + } + } else { + if (!semver.satisfies(lock.version, spec.fetchSpec)) { + errors.push("Invalid: lock file's " + name + '@' + lock.version + ' does not satisfy ' + name + '@' + spec.fetchSpec) + return + } } } else if (spec.type === 'git') { // can't verify git w/o network diff --git a/node_modules/lock-verify/package.json b/node_modules/lock-verify/package.json index 0f2002f549e5a..621c12fb76e87 100644 --- a/node_modules/lock-verify/package.json +++ b/node_modules/lock-verify/package.json @@ -1,33 +1,30 @@ { - "_args": [ - [ - "lock-verify@2.0.2", - "/Users/rebecca/code/npm" - ] - ], - "_from": "lock-verify@2.0.2", - "_id": "lock-verify@2.0.2", + "_from": "lock-verify@2.1.0", + "_id": "lock-verify@2.1.0", "_inBundle": false, - "_integrity": "sha512-QNVwK0EGZBS4R3YQ7F1Ox8p41Po9VGl2QG/2GsuvTbkJZYSsPeWHKMbbH6iZMCHWSMww5nrJroZYnGzI4cePuw==", + "_integrity": "sha512-vcLpxnGvrqisKvLQ2C2v0/u7LVly17ak2YSgoK4PrdsYBXQIax19vhKiLfvKNFx7FRrpTnitrpzF/uuCMuorIg==", "_location": "/lock-verify", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "lock-verify@2.0.2", + "raw": "lock-verify@2.1.0", "name": "lock-verify", "escapedName": "lock-verify", - "rawSpec": "2.0.2", + "rawSpec": "2.1.0", "saveSpec": null, - "fetchSpec": "2.0.2" + "fetchSpec": "2.1.0" }, "_requiredBy": [ + "#USER", "/", - "/libcipm" + "/libcipm", + "/libnpm" ], - "_resolved": "https://registry.npmjs.org/lock-verify/-/lock-verify-2.0.2.tgz", - "_spec": "2.0.2", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/lock-verify/-/lock-verify-2.1.0.tgz", + "_shasum": "fff4c918b8db9497af0c5fa7f6d71555de3ceb47", + "_spec": "lock-verify@2.1.0", + "_where": "/Users/zkat/Documents/code/work/npm", "author": { "name": "Rebecca Turner", "email": "me@re-becca.org", @@ -36,10 +33,12 @@ "bugs": { "url": "https://github.com/iarna/lock-verify/issues" }, + "bundleDependencies": false, "dependencies": { - "npm-package-arg": "^5.1.2 || 6", + "npm-package-arg": "^6.1.0", "semver": "^5.4.1" }, + "deprecated": false, "description": "Report if your package.json is out of sync with your package-lock.json.", "devDependencies": { "@iarna/cli": "^1.2.0" @@ -59,5 +58,5 @@ "scripts": { "test": "echo \"Error: no test specified\" && exit 1" }, - "version": "2.0.2" + "version": "2.1.0" } diff --git a/node_modules/lru-cache/README.md b/node_modules/lru-cache/README.md index d660dd5747abe..435dfebb7e27d 100644 --- a/node_modules/lru-cache/README.md +++ b/node_modules/lru-cache/README.md @@ -18,8 +18,8 @@ var LRU = require("lru-cache") , length: function (n, key) { return n * 2 + key.length } , dispose: function (key, n) { n.close() } , maxAge: 1000 * 60 * 60 } - , cache = LRU(options) - , otherCache = LRU(50) // sets just the max size + , cache = new LRU(options) + , otherCache = new LRU(50) // sets just the max size cache.set("key", "value") cache.get("key") // "value" @@ -49,10 +49,13 @@ away. * `max` The maximum size of the cache, checked by applying the length function to all values in the cache. Not setting this is kind of silly, since that's the whole purpose of this lib, but it defaults - to `Infinity`. + to `Infinity`. Setting it to a non-number or negative number will + throw a `TypeError`. Setting it to 0 makes it be `Infinity`. * `maxAge` Maximum age in ms. Items are not pro-actively pruned out as they age, but if you try to get an item that is too old, it'll drop it and return undefined instead of giving it to you. + Setting this to a negative value will make everything seem old! + Setting it to a non-number will throw a `TypeError`. * `length` Function that is used to calculate the length of stored items. If you're storing strings or buffers, then you probably want to do something like `function(n, key){return n.length}`. The default is @@ -76,6 +79,11 @@ away. it'll be called whenever a `set()` operation overwrites an existing key. If you set this option, `dispose()` will only be called when a key falls out of the cache, not when it is overwritten. +* `updateAgeOnGet` When using time-expiring entries with `maxAge`, + setting this to `true` will make each item's effective time update + to the current time whenever it is retrieved from cache, causing it + to not expire. (It can still fall out of cache based on recency of + use, of course.) ## API diff --git a/node_modules/lru-cache/index.js b/node_modules/lru-cache/index.js index 3f047f8ca78ae..573b6b85b9779 100644 --- a/node_modules/lru-cache/index.js +++ b/node_modules/lru-cache/index.js @@ -1,39 +1,20 @@ 'use strict' -module.exports = LRUCache - -// This will be a proper iterable 'Map' in engines that support it, -// or a fakey-fake PseudoMap in older versions. -var Map = require('pseudomap') -var util = require('util') - // A linked list to keep track of recently-used-ness -var Yallist = require('yallist') - -// use symbols if possible, otherwise just _props -var hasSymbol = typeof Symbol === 'function' -var makeSymbol -if (hasSymbol) { - makeSymbol = function (key) { - return Symbol(key) - } -} else { - makeSymbol = function (key) { - return '_' + key - } -} +const Yallist = require('yallist') -var MAX = makeSymbol('max') -var LENGTH = makeSymbol('length') -var LENGTH_CALCULATOR = makeSymbol('lengthCalculator') -var ALLOW_STALE = makeSymbol('allowStale') -var MAX_AGE = makeSymbol('maxAge') -var DISPOSE = makeSymbol('dispose') -var NO_DISPOSE_ON_SET = makeSymbol('noDisposeOnSet') -var LRU_LIST = makeSymbol('lruList') -var CACHE = makeSymbol('cache') +const MAX = Symbol('max') +const LENGTH = Symbol('length') +const LENGTH_CALCULATOR = Symbol('lengthCalculator') +const ALLOW_STALE = Symbol('allowStale') +const MAX_AGE = Symbol('maxAge') +const DISPOSE = Symbol('dispose') +const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') +const LRU_LIST = Symbol('lruList') +const CACHE = Symbol('cache') +const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') -function naiveLength () { return 1 } +const naiveLength = () => 1 // lruList is a yallist where the head is the youngest // item, and the tail is the oldest. the list contains the Hit @@ -43,425 +24,311 @@ function naiveLength () { return 1 } // // cache is a Map (or PseudoMap) that matches the keys to // the Yallist.Node object. -function LRUCache (options) { - if (!(this instanceof LRUCache)) { - return new LRUCache(options) +class LRUCache { + constructor (options) { + if (typeof options === 'number') + options = { max: options } + + if (!options) + options = {} + + if (options.max && (typeof options.max !== 'number' || options.max < 0)) + throw new TypeError('max must be a non-negative number') + // Kind of weird to have a default max of Infinity, but oh well. + const max = this[MAX] = options.max || Infinity + + const lc = options.length || naiveLength + this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc + this[ALLOW_STALE] = options.stale || false + if (options.maxAge && typeof options.maxAge !== 'number') + throw new TypeError('maxAge must be a number') + this[MAX_AGE] = options.maxAge || 0 + this[DISPOSE] = options.dispose + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false + this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false + this.reset() } - if (typeof options === 'number') { - options = { max: options } - } + // resize the cache when the max changes. + set max (mL) { + if (typeof mL !== 'number' || mL < 0) + throw new TypeError('max must be a non-negative number') - if (!options) { - options = {} + this[MAX] = mL || Infinity + trim(this) } - - var max = this[MAX] = options.max - // Kind of weird to have a default max of Infinity, but oh well. - if (!max || - !(typeof max === 'number') || - max <= 0) { - this[MAX] = Infinity + get max () { + return this[MAX] } - var lc = options.length || naiveLength - if (typeof lc !== 'function') { - lc = naiveLength + set allowStale (allowStale) { + this[ALLOW_STALE] = !!allowStale + } + get allowStale () { + return this[ALLOW_STALE] } - this[LENGTH_CALCULATOR] = lc - - this[ALLOW_STALE] = options.stale || false - this[MAX_AGE] = options.maxAge || 0 - this[DISPOSE] = options.dispose - this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false - this.reset() -} -// resize the cache when the max changes. -Object.defineProperty(LRUCache.prototype, 'max', { - set: function (mL) { - if (!mL || !(typeof mL === 'number') || mL <= 0) { - mL = Infinity - } - this[MAX] = mL - trim(this) - }, - get: function () { - return this[MAX] - }, - enumerable: true -}) + set maxAge (mA) { + if (typeof mA !== 'number') + throw new TypeError('maxAge must be a non-negative number') -Object.defineProperty(LRUCache.prototype, 'allowStale', { - set: function (allowStale) { - this[ALLOW_STALE] = !!allowStale - }, - get: function () { - return this[ALLOW_STALE] - }, - enumerable: true -}) - -Object.defineProperty(LRUCache.prototype, 'maxAge', { - set: function (mA) { - if (!mA || !(typeof mA === 'number') || mA < 0) { - mA = 0 - } this[MAX_AGE] = mA trim(this) - }, - get: function () { + } + get maxAge () { return this[MAX_AGE] - }, - enumerable: true -}) - -// resize the cache when the lengthCalculator changes. -Object.defineProperty(LRUCache.prototype, 'lengthCalculator', { - set: function (lC) { - if (typeof lC !== 'function') { + } + + // resize the cache when the lengthCalculator changes. + set lengthCalculator (lC) { + if (typeof lC !== 'function') lC = naiveLength - } + if (lC !== this[LENGTH_CALCULATOR]) { this[LENGTH_CALCULATOR] = lC this[LENGTH] = 0 - this[LRU_LIST].forEach(function (hit) { + this[LRU_LIST].forEach(hit => { hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) this[LENGTH] += hit.length - }, this) + }) } trim(this) - }, - get: function () { return this[LENGTH_CALCULATOR] }, - enumerable: true -}) - -Object.defineProperty(LRUCache.prototype, 'length', { - get: function () { return this[LENGTH] }, - enumerable: true -}) - -Object.defineProperty(LRUCache.prototype, 'itemCount', { - get: function () { return this[LRU_LIST].length }, - enumerable: true -}) - -LRUCache.prototype.rforEach = function (fn, thisp) { - thisp = thisp || this - for (var walker = this[LRU_LIST].tail; walker !== null;) { - var prev = walker.prev - forEachStep(this, fn, walker, thisp) - walker = prev } -} + get lengthCalculator () { return this[LENGTH_CALCULATOR] } -function forEachStep (self, fn, node, thisp) { - var hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) { - hit = undefined + get length () { return this[LENGTH] } + get itemCount () { return this[LRU_LIST].length } + + rforEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].tail; walker !== null;) { + const prev = walker.prev + forEachStep(this, fn, walker, thisp) + walker = prev } } - if (hit) { - fn.call(thisp, hit.value, hit.key, self) + + forEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].head; walker !== null;) { + const next = walker.next + forEachStep(this, fn, walker, thisp) + walker = next + } } -} -LRUCache.prototype.forEach = function (fn, thisp) { - thisp = thisp || this - for (var walker = this[LRU_LIST].head; walker !== null;) { - var next = walker.next - forEachStep(this, fn, walker, thisp) - walker = next + keys () { + return this[LRU_LIST].toArray().map(k => k.key) } -} -LRUCache.prototype.keys = function () { - return this[LRU_LIST].toArray().map(function (k) { - return k.key - }, this) -} + values () { + return this[LRU_LIST].toArray().map(k => k.value) + } -LRUCache.prototype.values = function () { - return this[LRU_LIST].toArray().map(function (k) { - return k.value - }, this) -} + reset () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) + } -LRUCache.prototype.reset = function () { - if (this[DISPOSE] && - this[LRU_LIST] && - this[LRU_LIST].length) { - this[LRU_LIST].forEach(function (hit) { - this[DISPOSE](hit.key, hit.value) - }, this) + this[CACHE] = new Map() // hash of items by key + this[LRU_LIST] = new Yallist() // list of items in order of use recency + this[LENGTH] = 0 // length of items in the list } - this[CACHE] = new Map() // hash of items by key - this[LRU_LIST] = new Yallist() // list of items in order of use recency - this[LENGTH] = 0 // length of items in the list -} - -LRUCache.prototype.dump = function () { - return this[LRU_LIST].map(function (hit) { - if (!isStale(this, hit)) { - return { + dump () { + return this[LRU_LIST].map(hit => + isStale(this, hit) ? false : { k: hit.key, v: hit.value, e: hit.now + (hit.maxAge || 0) - } - } - }, this).toArray().filter(function (h) { - return h - }) -} - -LRUCache.prototype.dumpLru = function () { - return this[LRU_LIST] -} - -LRUCache.prototype.inspect = function (n, opts) { - var str = 'LRUCache {' - var extras = false - - var as = this[ALLOW_STALE] - if (as) { - str += '\n allowStale: true' - extras = true + }).toArray().filter(h => h) } - var max = this[MAX] - if (max && max !== Infinity) { - if (extras) { - str += ',' - } - str += '\n max: ' + util.inspect(max, opts) - extras = true + dumpLru () { + return this[LRU_LIST] } - var maxAge = this[MAX_AGE] - if (maxAge) { - if (extras) { - str += ',' - } - str += '\n maxAge: ' + util.inspect(maxAge, opts) - extras = true - } + set (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE] - var lc = this[LENGTH_CALCULATOR] - if (lc && lc !== naiveLength) { - if (extras) { - str += ',' - } - str += '\n length: ' + util.inspect(this[LENGTH], opts) - extras = true - } + if (maxAge && typeof maxAge !== 'number') + throw new TypeError('maxAge must be a number') - var didFirst = false - this[LRU_LIST].forEach(function (item) { - if (didFirst) { - str += ',\n ' - } else { - if (extras) { - str += ',\n' + const now = maxAge ? Date.now() : 0 + const len = this[LENGTH_CALCULATOR](value, key) + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)) + return false } - didFirst = true - str += '\n ' - } - var key = util.inspect(item.key).split('\n').join('\n ') - var val = { value: item.value } - if (item.maxAge !== maxAge) { - val.maxAge = item.maxAge - } - if (lc !== naiveLength) { - val.length = item.length - } - if (isStale(this, item)) { - val.stale = true - } - val = util.inspect(val, opts).split('\n').join('\n ') - str += key + ' => ' + val - }) + const node = this[CACHE].get(key) + const item = node.value - if (didFirst || extras) { - str += '\n' - } - str += '}' + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) + this[DISPOSE](key, item.value) + } - return str -} + item.now = now + item.maxAge = maxAge + item.value = value + this[LENGTH] += len - item.length + item.length = len + this.get(key) + trim(this) + return true + } -LRUCache.prototype.set = function (key, value, maxAge) { - maxAge = maxAge || this[MAX_AGE] + const hit = new Entry(key, value, len, now, maxAge) - var now = maxAge ? Date.now() : 0 - var len = this[LENGTH_CALCULATOR](value, key) + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) + this[DISPOSE](key, value) - if (this[CACHE].has(key)) { - if (len > this[MAX]) { - del(this, this[CACHE].get(key)) return false } - var node = this[CACHE].get(key) - var item = node.value - - // dispose of the old one before overwriting - // split out into 2 ifs for better coverage tracking - if (this[DISPOSE]) { - if (!this[NO_DISPOSE_ON_SET]) { - this[DISPOSE](key, item.value) - } - } - - item.now = now - item.maxAge = maxAge - item.value = value - this[LENGTH] += len - item.length - item.length = len - this.get(key) + this[LENGTH] += hit.length + this[LRU_LIST].unshift(hit) + this[CACHE].set(key, this[LRU_LIST].head) trim(this) return true } - var hit = new Entry(key, value, len, now, maxAge) - - // oversized objects fall out of cache automatically. - if (hit.length > this[MAX]) { - if (this[DISPOSE]) { - this[DISPOSE](key, value) - } - return false + has (key) { + if (!this[CACHE].has(key)) return false + const hit = this[CACHE].get(key).value + return !isStale(this, hit) } - this[LENGTH] += hit.length - this[LRU_LIST].unshift(hit) - this[CACHE].set(key, this[LRU_LIST].head) - trim(this) - return true -} - -LRUCache.prototype.has = function (key) { - if (!this[CACHE].has(key)) return false - var hit = this[CACHE].get(key).value - if (isStale(this, hit)) { - return false + get (key) { + return get(this, key, true) } - return true -} -LRUCache.prototype.get = function (key) { - return get(this, key, true) -} + peek (key) { + return get(this, key, false) + } -LRUCache.prototype.peek = function (key) { - return get(this, key, false) -} + pop () { + const node = this[LRU_LIST].tail + if (!node) + return null -LRUCache.prototype.pop = function () { - var node = this[LRU_LIST].tail - if (!node) return null - del(this, node) - return node.value -} + del(this, node) + return node.value + } -LRUCache.prototype.del = function (key) { - del(this, this[CACHE].get(key)) -} + del (key) { + del(this, this[CACHE].get(key)) + } -LRUCache.prototype.load = function (arr) { - // reset the cache - this.reset() - - var now = Date.now() - // A previous serialized cache has the most recent items first - for (var l = arr.length - 1; l >= 0; l--) { - var hit = arr[l] - var expiresAt = hit.e || 0 - if (expiresAt === 0) { - // the item was created without expiration in a non aged cache - this.set(hit.k, hit.v) - } else { - var maxAge = expiresAt - now - // dont add already expired items - if (maxAge > 0) { - this.set(hit.k, hit.v, maxAge) + load (arr) { + // reset the cache + this.reset() + + const now = Date.now() + // A previous serialized cache has the most recent items first + for (let l = arr.length - 1; l >= 0; l--) { + const hit = arr[l] + const expiresAt = hit.e || 0 + if (expiresAt === 0) + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + else { + const maxAge = expiresAt - now + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge) + } } } } -} -LRUCache.prototype.prune = function () { - var self = this - this[CACHE].forEach(function (value, key) { - get(self, key, false) - }) + prune () { + this[CACHE].forEach((value, key) => get(this, key, false)) + } } -function get (self, key, doUse) { - var node = self[CACHE].get(key) +const get = (self, key, doUse) => { + const node = self[CACHE].get(key) if (node) { - var hit = node.value + const hit = node.value if (isStale(self, hit)) { del(self, node) - if (!self[ALLOW_STALE]) hit = undefined + if (!self[ALLOW_STALE]) + return undefined } else { if (doUse) { + if (self[UPDATE_AGE_ON_GET]) + node.value.now = Date.now() self[LRU_LIST].unshiftNode(node) } } - if (hit) hit = hit.value + return hit.value } - return hit } -function isStale (self, hit) { - if (!hit || (!hit.maxAge && !self[MAX_AGE])) { +const isStale = (self, hit) => { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) return false - } - var stale = false - var diff = Date.now() - hit.now - if (hit.maxAge) { - stale = diff > hit.maxAge - } else { - stale = self[MAX_AGE] && (diff > self[MAX_AGE]) - } - return stale + + const diff = Date.now() - hit.now + return hit.maxAge ? diff > hit.maxAge + : self[MAX_AGE] && (diff > self[MAX_AGE]) } -function trim (self) { +const trim = self => { if (self[LENGTH] > self[MAX]) { - for (var walker = self[LRU_LIST].tail; - self[LENGTH] > self[MAX] && walker !== null;) { + for (let walker = self[LRU_LIST].tail; + self[LENGTH] > self[MAX] && walker !== null;) { // We know that we're about to delete this one, and also // what the next least recently used key will be, so just // go ahead and set it now. - var prev = walker.prev + const prev = walker.prev del(self, walker) walker = prev } } } -function del (self, node) { +const del = (self, node) => { if (node) { - var hit = node.value - if (self[DISPOSE]) { + const hit = node.value + if (self[DISPOSE]) self[DISPOSE](hit.key, hit.value) - } + self[LENGTH] -= hit.length self[CACHE].delete(hit.key) self[LRU_LIST].removeNode(node) } } -// classy, since V8 prefers predictable objects. -function Entry (key, value, length, now, maxAge) { - this.key = key - this.value = value - this.length = length - this.now = now - this.maxAge = maxAge || 0 +class Entry { + constructor (key, value, length, now, maxAge) { + this.key = key + this.value = value + this.length = length + this.now = now + this.maxAge = maxAge || 0 + } +} + +const forEachStep = (self, fn, node, thisp) => { + let hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + hit = undefined + } + if (hit) + fn.call(thisp, hit.value, hit.key, self) } + +module.exports = LRUCache diff --git a/node_modules/lru-cache/package.json b/node_modules/lru-cache/package.json index 238a6894e929f..3b5a1f8b3fa2d 100644 --- a/node_modules/lru-cache/package.json +++ b/node_modules/lru-cache/package.json @@ -1,43 +1,28 @@ { - "_args": [ - [ - "lru-cache@4.1.3", - "/Users/rebecca/code/npm" - ] - ], - "_from": "lru-cache@4.1.3", - "_id": "lru-cache@4.1.3", + "_from": "lru-cache@5.1.1", + "_id": "lru-cache@5.1.1", "_inBundle": false, - "_integrity": "sha512-fFEhvcgzuIoJVUF8fYr5KR0YqxD238zgObTps31YdADwPPAp82a4M8TrckkWyx7ekNlf9aBcVn81cFwwXngrJA==", + "_integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "_location": "/lru-cache", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "lru-cache@4.1.3", + "raw": "lru-cache@5.1.1", "name": "lru-cache", "escapedName": "lru-cache", - "rawSpec": "4.1.3", + "rawSpec": "5.1.1", "saveSpec": null, - "fetchSpec": "4.1.3" + "fetchSpec": "5.1.1" }, "_requiredBy": [ - "/", - "/cacache", - "/cross-spawn", - "/foreground-child/cross-spawn", - "/libnpmhook/npm-registry-fetch", - "/make-fetch-happen", - "/npm-profile/cacache", - "/npm-profile/make-fetch-happen", - "/npm-registry-fetch", - "/npm-registry-fetch/cacache", - "/npm-registry-fetch/make-fetch-happen", - "/pacote" + "#USER", + "/" ], - "_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.3.tgz", - "_spec": "4.1.3", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "_shasum": "1da27e6710271947695daf6848e847f01d84b920", + "_spec": "lru-cache@5.1.1", + "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Isaac Z. Schlueter", "email": "i@izs.me" @@ -45,15 +30,15 @@ "bugs": { "url": "https://github.com/isaacs/node-lru-cache/issues" }, + "bundleDependencies": false, "dependencies": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" + "yallist": "^3.0.2" }, + "deprecated": false, "description": "A cache object that deletes the least-recently-used items.", "devDependencies": { "benchmark": "^2.1.4", - "standard": "^5.4.1", - "tap": "^11.1.4" + "tap": "^12.1.0" }, "files": [ "index.js" @@ -72,11 +57,12 @@ "url": "git://github.com/isaacs/node-lru-cache.git" }, "scripts": { + "coveragerport": "tap --coverage-report=html", "postpublish": "git push origin --all; git push origin --tags", - "posttest": "standard test/*.js index.js", "postversion": "npm publish", "preversion": "npm test", + "snap": "TAP_SNAPSHOT=1 tap test/*.js -J", "test": "tap test/*.js --100 -J" }, - "version": "4.1.3" + "version": "5.1.1" } diff --git a/node_modules/make-fetch-happen/CHANGELOG.md b/node_modules/make-fetch-happen/CHANGELOG.md index b83beda41379d..c73bd4de4fafc 100644 --- a/node_modules/make-fetch-happen/CHANGELOG.md +++ b/node_modules/make-fetch-happen/CHANGELOG.md @@ -2,6 +2,43 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [5.0.2](https://github.com/zkat/make-fetch-happen/compare/v5.0.1...v5.0.2) (2019-11-14) + + +### Bug Fixes + +* **streams:** only provide a size and not a boolean to highWaterMark & update travis environments ([a367a14](https://github.com/zkat/make-fetch-happen/commit/a367a14)) +* `highWaterMark` bug @ v5 ([#10](https://github.com/zkat/make-fetch-happen/issues/10)) ([4e4f4e0](https://github.com/zkat/make-fetch-happen/commit/4e4f4e0)) + + + + +## [5.0.1](https://github.com/zkat/make-fetch-happen/compare/v5.0.0...v5.0.1) (2019-10-23) + + + + +# [5.0.0](https://github.com/zkat/make-fetch-happen/compare/v4.0.2...v5.0.0) (2019-07-15) + + +### Features + +* cacache@12, no need for uid/gid opts ([fdb956f](https://github.com/zkat/make-fetch-happen/commit/fdb956f)) + + +### BREAKING CHANGES + +* cache uid and gid are inferred from the cache folder itself, +not passed in as options. + + + + +## [4.0.2](https://github.com/zkat/make-fetch-happen/compare/v4.0.1...v4.0.2) (2019-07-02) + + + ## [4.0.1](https://github.com/zkat/make-fetch-happen/compare/v4.0.0...v4.0.1) (2018-04-12) diff --git a/node_modules/make-fetch-happen/cache.js b/node_modules/make-fetch-happen/cache.js index 0c519e69fbe81..f00de14a8844a 100644 --- a/node_modules/make-fetch-happen/cache.js +++ b/node_modules/make-fetch-happen/cache.js @@ -31,8 +31,6 @@ function cacheKey (req) { module.exports = class Cache { constructor (path, opts) { this._path = path - this._uid = opts && opts.uid - this._gid = opts && opts.gid this.Promise = (opts && opts.Promise) || Promise } @@ -118,8 +116,6 @@ module.exports = class Cache { reqHeaders: req.headers.raw(), resHeaders: response.headers.raw() }, - uid: this._uid, - gid: this._gid, size, memoize: fitInMemory && opts.memoize } @@ -173,7 +169,7 @@ module.exports = class Cache { cacheTargetStream ? cacheTargetStream.end(done) : done() }) const oldBody = response.body - const newBody = through({highWaterMark: fitInMemory && MAX_MEM_SIZE}) + const newBody = through({highWaterMark: MAX_MEM_SIZE}) response.body = newBody oldBody.once('error', err => newBody.emit('error', err)) newBody.once('error', err => oldBody.emit('error', err)) diff --git a/node_modules/make-fetch-happen/package.json b/node_modules/make-fetch-happen/package.json index 9aa7f113c5955..a2c7397bc8c4f 100644 --- a/node_modules/make-fetch-happen/package.json +++ b/node_modules/make-fetch-happen/package.json @@ -1,28 +1,30 @@ { - "_from": "make-fetch-happen@^4.0.1", - "_id": "make-fetch-happen@4.0.1", + "_from": "make-fetch-happen@5.0.2", + "_id": "make-fetch-happen@5.0.2", "_inBundle": false, - "_integrity": "sha512-7R5ivfy9ilRJ1EMKIOziwrns9fGeAD4bAha8EB7BIiBBLHm2KeTUGCrICFt2rbHfzheTLynv50GnNTK1zDTrcQ==", + "_integrity": "sha512-07JHC0r1ykIoruKO8ifMXu+xEU8qOXDFETylktdug6vJDACnP+HKevOu3PXyNPzFyTSlz8vrBYlBO1JZRe8Cag==", "_location": "/make-fetch-happen", "_phantomChildren": {}, "_requested": { - "type": "range", + "type": "version", "registry": true, - "raw": "make-fetch-happen@^4.0.1", + "raw": "make-fetch-happen@5.0.2", "name": "make-fetch-happen", "escapedName": "make-fetch-happen", - "rawSpec": "^4.0.1", + "rawSpec": "5.0.2", "saveSpec": null, - "fetchSpec": "^4.0.1" + "fetchSpec": "5.0.2" }, "_requiredBy": [ - "/libnpmhook/npm-registry-fetch", + "#USER", + "/", + "/npm-registry-fetch", "/pacote" ], - "_resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-4.0.1.tgz", - "_shasum": "141497cb878f243ba93136c83d8aba12c216c083", - "_spec": "make-fetch-happen@^4.0.1", - "_where": "/Users/rebecca/code/npm/node_modules/pacote", + "_resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-5.0.2.tgz", + "_shasum": "aa8387104f2687edca01c8687ee45013d02d19bd", + "_spec": "make-fetch-happen@5.0.2", + "_where": "/Users/claudiahdz/npm/cli", "author": { "name": "Kat Marchán", "email": "kzm@zkat.tech" @@ -33,11 +35,11 @@ "bundleDependencies": false, "dependencies": { "agentkeepalive": "^3.4.1", - "cacache": "^11.0.1", + "cacache": "^12.0.0", "http-cache-semantics": "^3.8.1", "http-proxy-agent": "^2.1.0", - "https-proxy-agent": "^2.2.1", - "lru-cache": "^4.1.2", + "https-proxy-agent": "^2.2.3", + "lru-cache": "^5.1.1", "mississippi": "^3.0.0", "node-fetch-npm": "^2.0.2", "promise-retry": "^1.1.1", @@ -57,7 +59,7 @@ "standard": "^11.0.1", "standard-version": "^4.3.0", "tacks": "^1.2.6", - "tap": "^11.1.3", + "tap": "^12.7.0", "weallbehave": "^1.0.0", "weallcontribute": "^1.0.7" }, @@ -83,7 +85,7 @@ "url": "git+https://github.com/zkat/make-fetch-happen.git" }, "scripts": { - "postrelease": "npm publish && git push --follow-tags", + "postrelease": "npm publish --tag=legacy && git push --follow-tags", "prerelease": "npm t", "pretest": "standard", "release": "standard-version -s", @@ -91,5 +93,5 @@ "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "4.0.1" + "version": "5.0.2" } diff --git a/node_modules/map-age-cleaner/dist/index.d.ts b/node_modules/map-age-cleaner/dist/index.d.ts new file mode 100644 index 0000000000000..fbf5ce08f5fc1 --- /dev/null +++ b/node_modules/map-age-cleaner/dist/index.d.ts @@ -0,0 +1,20 @@ +interface Entry { + [key: string]: any; +} +interface MaxAgeEntry extends Entry { + maxAge: number; +} +/** + * Automatically cleanup the items in the provided `map`. The property of the expiration timestamp should be named `maxAge`. + * + * @param map - Map instance which should be cleaned up. + */ +export default function mapAgeCleaner(map: Map): any; +/** + * Automatically cleanup the items in the provided `map`. + * + * @param map - Map instance which should be cleaned up. + * @param property - Name of the property which olds the expiry timestamp. + */ +export default function mapAgeCleaner(map: Map, property: string): any; +export {}; diff --git a/node_modules/map-age-cleaner/dist/index.js b/node_modules/map-age-cleaner/dist/index.js new file mode 100644 index 0000000000000..ff137dec85f56 --- /dev/null +++ b/node_modules/map-age-cleaner/dist/index.js @@ -0,0 +1,92 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const p_defer_1 = __importDefault(require("p-defer")); +function mapAgeCleaner(map, property = 'maxAge') { + let processingKey; + let processingTimer; + let processingDeferred; + const cleanup = () => __awaiter(this, void 0, void 0, function* () { + if (processingKey !== undefined) { + // If we are already processing an item, we can safely exit + return; + } + const setupTimer = (item) => __awaiter(this, void 0, void 0, function* () { + processingDeferred = p_defer_1.default(); + const delay = item[1][property] - Date.now(); + if (delay <= 0) { + // Remove the item immediately if the delay is equal to or below 0 + map.delete(item[0]); + processingDeferred.resolve(); + return; + } + // Keep track of the current processed key + processingKey = item[0]; + processingTimer = setTimeout(() => { + // Remove the item when the timeout fires + map.delete(item[0]); + if (processingDeferred) { + processingDeferred.resolve(); + } + }, delay); + // tslint:disable-next-line:strict-type-predicates + if (typeof processingTimer.unref === 'function') { + // Don't hold up the process from exiting + processingTimer.unref(); + } + return processingDeferred.promise; + }); + try { + for (const entry of map) { + yield setupTimer(entry); + } + } + catch (_a) { + // Do nothing if an error occurs, this means the timer was cleaned up and we should stop processing + } + processingKey = undefined; + }); + const reset = () => { + processingKey = undefined; + if (processingTimer !== undefined) { + clearTimeout(processingTimer); + processingTimer = undefined; + } + if (processingDeferred !== undefined) { // tslint:disable-line:early-exit + processingDeferred.reject(undefined); + processingDeferred = undefined; + } + }; + const originalSet = map.set.bind(map); + map.set = (key, value) => { + if (map.has(key)) { + // If the key already exist, remove it so we can add it back at the end of the map. + map.delete(key); + } + // Call the original `map.set` + const result = originalSet(key, value); + // If we are already processing a key and the key added is the current processed key, stop processing it + if (processingKey && processingKey === key) { + reset(); + } + // Always run the cleanup method in case it wasn't started yet + cleanup(); // tslint:disable-line:no-floating-promises + return result; + }; + cleanup(); // tslint:disable-line:no-floating-promises + return map; +} +exports.default = mapAgeCleaner; +// Add support for CJS +module.exports = mapAgeCleaner; +module.exports.default = mapAgeCleaner; diff --git a/node_modules/map-age-cleaner/license b/node_modules/map-age-cleaner/license new file mode 100644 index 0000000000000..0711ab006f8a9 --- /dev/null +++ b/node_modules/map-age-cleaner/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sam Verschueren (github.com/SamVerschueren) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/map-age-cleaner/package.json b/node_modules/map-age-cleaner/package.json new file mode 100644 index 0000000000000..fc6be6aac5f14 --- /dev/null +++ b/node_modules/map-age-cleaner/package.json @@ -0,0 +1,95 @@ +{ + "_args": [ + [ + "map-age-cleaner@0.1.3", + "/Users/mperrotte/npminc/cli" + ] + ], + "_development": true, + "_from": "map-age-cleaner@0.1.3", + "_id": "map-age-cleaner@0.1.3", + "_inBundle": false, + "_integrity": "sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w==", + "_location": "/map-age-cleaner", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "map-age-cleaner@0.1.3", + "name": "map-age-cleaner", + "escapedName": "map-age-cleaner", + "rawSpec": "0.1.3", + "saveSpec": null, + "fetchSpec": "0.1.3" + }, + "_requiredBy": [ + "/nyc/mem" + ], + "_resolved": "https://registry.npmjs.org/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz", + "_spec": "0.1.3", + "_where": "/Users/mperrotte/npminc/cli", + "author": { + "name": "Sam Verschueren", + "email": "sam.verschueren@gmail.com", + "url": "github.com/SamVerschueren" + }, + "bugs": { + "url": "https://github.com/SamVerschueren/map-age-cleaner/issues" + }, + "dependencies": { + "p-defer": "^1.0.0" + }, + "description": "Automatically cleanup expired items in a Map", + "devDependencies": { + "@types/delay": "^2.0.1", + "@types/node": "^10.7.1", + "ava": "^0.25.0", + "codecov": "^3.0.0", + "del-cli": "^1.1.0", + "delay": "^3.0.0", + "nyc": "^12.0.0", + "tslint": "^5.11.0", + "tslint-xo": "^0.9.0", + "typescript": "^3.0.1" + }, + "engines": { + "node": ">=6" + }, + "files": [ + "dist/index.js", + "dist/index.d.ts" + ], + "homepage": "https://github.com/SamVerschueren/map-age-cleaner#readme", + "keywords": [ + "map", + "age", + "cleaner", + "maxage", + "expire", + "expiration", + "expiring" + ], + "license": "MIT", + "main": "dist/index.js", + "name": "map-age-cleaner", + "nyc": { + "exclude": [ + "dist/test.js" + ] + }, + "repository": { + "type": "git", + "url": "git+https://github.com/SamVerschueren/map-age-cleaner.git" + }, + "scripts": { + "build": "npm run clean && tsc", + "clean": "del-cli dist", + "lint": "tslint --format stylish --project .", + "prepublishOnly": "npm run build", + "pretest": "npm run build -- --sourceMap", + "test": "npm run lint && nyc ava dist/test.js" + }, + "sideEffects": false, + "typings": "dist/index.d.ts", + "version": "0.1.3" +} diff --git a/node_modules/map-age-cleaner/readme.md b/node_modules/map-age-cleaner/readme.md new file mode 100644 index 0000000000000..471d93353a07e --- /dev/null +++ b/node_modules/map-age-cleaner/readme.md @@ -0,0 +1,67 @@ +# map-age-cleaner + +[![Build Status](https://travis-ci.org/SamVerschueren/map-age-cleaner.svg?branch=master)](https://travis-ci.org/SamVerschueren/map-age-cleaner) [![codecov](https://codecov.io/gh/SamVerschueren/map-age-cleaner/badge.svg?branch=master)](https://codecov.io/gh/SamVerschueren/map-age-cleaner?branch=master) + +> Automatically cleanup expired items in a Map + + +## Install + +``` +$ npm install map-age-cleaner +``` + + +## Usage + +```js +import mapAgeCleaner from 'map-age-cleaner'; + +const map = new Map([ + ['unicorn', {data: '🦄', maxAge: Date.now() + 1000}] +]); + +mapAgeCleaner(map); + +map.has('unicorn'); +//=> true + +// Wait for 1 second... + +map.has('unicorn'); +//=> false +``` + +> **Note**: Items have to be ordered ascending based on the expiry property. This means that the item which will be expired first, should be in the first position of the `Map`. + + +## API + +### mapAgeCleaner(map, [property]) + +Returns the `Map` instance. + +#### map + +Type: `Map` + +Map instance which should be cleaned up. + +#### property + +Type: `string`
    +Default: `maxAge` + +Name of the property which olds the expiry timestamp. + + +## Related + +- [expiry-map](https://github.com/SamVerschueren/expiry-map) - A `Map` implementation with expirable items +- [expiry-set](https://github.com/SamVerschueren/expiry-set) - A `Set` implementation with expirable keys +- [mem](https://github.com/sindresorhus/mem) - Memoize functions + + +## License + +MIT © [Sam Verschueren](https://github.com/SamVerschueren) diff --git a/node_modules/mem/index.d.ts b/node_modules/mem/index.d.ts new file mode 100644 index 0000000000000..786625520b203 --- /dev/null +++ b/node_modules/mem/index.d.ts @@ -0,0 +1,96 @@ +declare namespace mem { + interface CacheStorage { + has(key: KeyType): boolean; + get(key: KeyType): ValueType | undefined; + set(key: KeyType, value: ValueType): void; + delete(key: KeyType): void; + clear?: () => void; + } + + interface Options< + ArgumentsType extends unknown[], + CacheKeyType extends unknown, + ReturnType extends unknown + > { + /** + Milliseconds until the cache expires. + + @default Infinity + */ + readonly maxAge?: number; + + /** + Determines the cache key for storing the result based on the function arguments. By default, if there's only one argument and it's a [primitive](https://developer.mozilla.org/en-US/docs/Glossary/Primitive), it's used directly as a key, otherwise it's all the function arguments JSON stringified as an array. + + You could for example change it to only cache on the first argument `x => JSON.stringify(x)`. + */ + readonly cacheKey?: (...arguments: ArgumentsType) => CacheKeyType; + + /** + Use a different cache storage. You could for example use a `WeakMap` instead or [`quick-lru`](https://github.com/sindresorhus/quick-lru) for a LRU cache. + + @default new Map() + */ + readonly cache?: CacheStorage; + + /** + Cache rejected promises. + + @default false + */ + readonly cachePromiseRejection?: boolean; + } +} + +declare const mem: { + /** + [Memoize](https://en.wikipedia.org/wiki/Memoization) functions - An optimization used to speed up consecutive function calls by caching the result of calls with identical input. + + @param fn - Function to be memoized. + + @example + ``` + import mem = require('mem'); + + let i = 0; + const counter = () => ++i; + const memoized = mem(counter); + + memoized('foo'); + //=> 1 + + // Cached as it's the same arguments + memoized('foo'); + //=> 1 + + // Not cached anymore as the arguments changed + memoized('bar'); + //=> 2 + + memoized('bar'); + //=> 2 + ``` + */ + < + ArgumentsType extends unknown[], + ReturnType extends unknown, + CacheKeyType extends unknown + >( + fn: (...arguments: ArgumentsType) => ReturnType, + options?: mem.Options + ): (...arguments: ArgumentsType) => ReturnType; + + /** + Clear all cached data of a memoized function. + + @param fn - Memoized function. + */ + clear( + fn: (...arguments: ArgumentsType) => ReturnType + ): void; + + // TODO: Remove this for the next major release + default: typeof mem; +}; + +export = mem; diff --git a/node_modules/mem/index.js b/node_modules/mem/index.js index aa5a0739822c7..51faf012c2fc4 100644 --- a/node_modules/mem/index.js +++ b/node_modules/mem/index.js @@ -1,51 +1,84 @@ 'use strict'; const mimicFn = require('mimic-fn'); +const isPromise = require('p-is-promise'); +const mapAgeCleaner = require('map-age-cleaner'); const cacheStore = new WeakMap(); -const defaultCacheKey = function (x) { - if (arguments.length === 1 && (x === null || x === undefined || (typeof x !== 'function' && typeof x !== 'object'))) { - return x; +const defaultCacheKey = (...arguments_) => { + if (arguments_.length === 0) { + return '__defaultKey'; } - return JSON.stringify(arguments); + if (arguments_.length === 1) { + const [firstArgument] = arguments_; + if ( + firstArgument === null || + firstArgument === undefined || + (typeof firstArgument !== 'function' && typeof firstArgument !== 'object') + ) { + return firstArgument; + } + } + + return JSON.stringify(arguments_); }; -module.exports = (fn, opts) => { - opts = Object.assign({ +const mem = (fn, options) => { + options = Object.assign({ cacheKey: defaultCacheKey, - cache: new Map() - }, opts); + cache: new Map(), + cachePromiseRejection: false + }, options); + + if (typeof options.maxAge === 'number') { + mapAgeCleaner(options.cache); + } - const memoized = function () { - const cache = cacheStore.get(memoized); - const key = opts.cacheKey.apply(null, arguments); + const {cache} = options; + options.maxAge = options.maxAge || 0; - if (cache.has(key)) { - const c = cache.get(key); + const setData = (key, data) => { + cache.set(key, { + data, + maxAge: Date.now() + options.maxAge + }); + }; + + const memoized = function (...arguments_) { + const key = options.cacheKey(...arguments_); - if (typeof opts.maxAge !== 'number' || Date.now() < c.maxAge) { - return c.data; - } + if (cache.has(key)) { + return cache.get(key).data; } - const ret = fn.apply(null, arguments); + const cacheItem = fn.call(this, ...arguments_); - cache.set(key, { - data: ret, - maxAge: Date.now() + (opts.maxAge || 0) - }); + setData(key, cacheItem); + + if (isPromise(cacheItem) && options.cachePromiseRejection === false) { + // Remove rejected promises from cache unless `cachePromiseRejection` is set to `true` + cacheItem.catch(() => cache.delete(key)); + } - return ret; + return cacheItem; }; - mimicFn(memoized, fn); + try { + // The below call will throw in some host environments + // See https://github.com/sindresorhus/mimic-fn/issues/10 + mimicFn(memoized, fn); + } catch (_) {} - cacheStore.set(memoized, opts.cache); + cacheStore.set(memoized, options.cache); return memoized; }; +module.exports = mem; +// TODO: Remove this for the next major release +module.exports.default = mem; + module.exports.clear = fn => { const cache = cacheStore.get(fn); diff --git a/node_modules/mem/license b/node_modules/mem/license index 654d0bfe94343..e7af2f77107d7 100644 --- a/node_modules/mem/license +++ b/node_modules/mem/license @@ -1,21 +1,9 @@ -The MIT License (MIT) +MIT License Copyright (c) Sindre Sorhus (sindresorhus.com) -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mem/node_modules/mimic-fn/index.d.ts b/node_modules/mem/node_modules/mimic-fn/index.d.ts new file mode 100644 index 0000000000000..b4047d58902eb --- /dev/null +++ b/node_modules/mem/node_modules/mimic-fn/index.d.ts @@ -0,0 +1,54 @@ +declare const mimicFn: { + /** + Make a function mimic another one. It will copy over the properties `name`, `length`, `displayName`, and any custom properties you may have set. + + @param to - Mimicking function. + @param from - Function to mimic. + @returns The modified `to` function. + + @example + ``` + import mimicFn = require('mimic-fn'); + + function foo() {} + foo.unicorn = '🦄'; + + function wrapper() { + return foo(); + } + + console.log(wrapper.name); + //=> 'wrapper' + + mimicFn(wrapper, foo); + + console.log(wrapper.name); + //=> 'foo' + + console.log(wrapper.unicorn); + //=> '🦄' + ``` + */ + < + ArgumentsType extends unknown[], + ReturnType, + FunctionType extends (...arguments: ArgumentsType) => ReturnType + >( + to: (...arguments: ArgumentsType) => ReturnType, + from: FunctionType + ): FunctionType; + + // TODO: Remove this for the next major release, refactor the whole definition to: + // declare function mimicFn< + // ArgumentsType extends unknown[], + // ReturnType, + // FunctionType extends (...arguments: ArgumentsType) => ReturnType + // >( + // to: (...arguments: ArgumentsType) => ReturnType, + // from: FunctionType + // ): FunctionType; + // export = mimicFn; + default: typeof mimicFn; +}; + +export = mimicFn; diff --git a/node_modules/mem/node_modules/mimic-fn/index.js b/node_modules/mem/node_modules/mimic-fn/index.js new file mode 100644 index 0000000000000..1a597051750da --- /dev/null +++ b/node_modules/mem/node_modules/mimic-fn/index.js @@ -0,0 +1,13 @@ +'use strict'; + +const mimicFn = (to, from) => { + for (const prop of Reflect.ownKeys(from)) { + Object.defineProperty(to, prop, Object.getOwnPropertyDescriptor(from, prop)); + } + + return to; +}; + +module.exports = mimicFn; +// TODO: Remove this for the next major release +module.exports.default = mimicFn; diff --git a/node_modules/mem/node_modules/mimic-fn/license b/node_modules/mem/node_modules/mimic-fn/license new file mode 100644 index 0000000000000..e7af2f77107d7 --- /dev/null +++ b/node_modules/mem/node_modules/mimic-fn/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mem/node_modules/mimic-fn/package.json b/node_modules/mem/node_modules/mimic-fn/package.json new file mode 100644 index 0000000000000..b4970a28d40c8 --- /dev/null +++ b/node_modules/mem/node_modules/mimic-fn/package.json @@ -0,0 +1,74 @@ +{ + "_from": "mimic-fn@^2.0.0", + "_id": "mimic-fn@2.1.0", + "_inBundle": false, + "_integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "_location": "/mem/mimic-fn", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "mimic-fn@^2.0.0", + "name": "mimic-fn", + "escapedName": "mimic-fn", + "rawSpec": "^2.0.0", + "saveSpec": null, + "fetchSpec": "^2.0.0" + }, + "_requiredBy": [ + "/mem" + ], + "_resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "_shasum": "7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b", + "_spec": "mimic-fn@^2.0.0", + "_where": "/Users/mperrotte/npminc/cli/node_modules/mem", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/mimic-fn/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Make a function mimic another one", + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.1", + "xo": "^0.24.0" + }, + "engines": { + "node": ">=6" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "homepage": "https://github.com/sindresorhus/mimic-fn#readme", + "keywords": [ + "function", + "mimic", + "imitate", + "rename", + "copy", + "inherit", + "properties", + "name", + "func", + "fn", + "set", + "infer", + "change" + ], + "license": "MIT", + "name": "mimic-fn", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/mimic-fn.git" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "version": "2.1.0" +} diff --git a/node_modules/mem/node_modules/mimic-fn/readme.md b/node_modules/mem/node_modules/mimic-fn/readme.md new file mode 100644 index 0000000000000..0ef8a13d7e0d9 --- /dev/null +++ b/node_modules/mem/node_modules/mimic-fn/readme.md @@ -0,0 +1,69 @@ +# mimic-fn [![Build Status](https://travis-ci.org/sindresorhus/mimic-fn.svg?branch=master)](https://travis-ci.org/sindresorhus/mimic-fn) + +> Make a function mimic another one + +Useful when you wrap a function in another function and like to preserve the original name and other properties. + + +## Install + +``` +$ npm install mimic-fn +``` + + +## Usage + +```js +const mimicFn = require('mimic-fn'); + +function foo() {} +foo.unicorn = '🦄'; + +function wrapper() { + return foo(); +} + +console.log(wrapper.name); +//=> 'wrapper' + +mimicFn(wrapper, foo); + +console.log(wrapper.name); +//=> 'foo' + +console.log(wrapper.unicorn); +//=> '🦄' +``` + + +## API + +It will copy over the properties `name`, `length`, `displayName`, and any custom properties you may have set. + +### mimicFn(to, from) + +Modifies the `to` function and returns it. + +#### to + +Type: `Function` + +Mimicking function. + +#### from + +Type: `Function` + +Function to mimic. + + +## Related + +- [rename-fn](https://github.com/sindresorhus/rename-fn) - Rename a function +- [keep-func-props](https://github.com/ehmicky/keep-func-props) - Wrap a function without changing its name, length and other properties + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/mem/package.json b/node_modules/mem/package.json index 27eefe8d720d4..eedf07469c4c3 100644 --- a/node_modules/mem/package.json +++ b/node_modules/mem/package.json @@ -1,27 +1,27 @@ { - "_from": "mem@^1.1.0", - "_id": "mem@1.1.0", + "_from": "mem@^4.0.0", + "_id": "mem@4.3.0", "_inBundle": false, - "_integrity": "sha1-Xt1StIXKHZAP5kiVUFOZoN+kX3Y=", + "_integrity": "sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w==", "_location": "/mem", "_phantomChildren": {}, "_requested": { "type": "range", "registry": true, - "raw": "mem@^1.1.0", + "raw": "mem@^4.0.0", "name": "mem", "escapedName": "mem", - "rawSpec": "^1.1.0", + "rawSpec": "^4.0.0", "saveSpec": null, - "fetchSpec": "^1.1.0" + "fetchSpec": "^4.0.0" }, "_requiredBy": [ "/os-locale" ], - "_resolved": "https://registry.npmjs.org/mem/-/mem-1.1.0.tgz", - "_shasum": "5edd52b485ca1d900fe64895505399a0dfa45f76", - "_spec": "mem@^1.1.0", - "_where": "/Users/rebecca/code/npm/node_modules/os-locale", + "_resolved": "https://registry.npmjs.org/mem/-/mem-4.3.0.tgz", + "_shasum": "461af497bc4ae09608cdb2e60eefb69bff744178", + "_spec": "mem@^4.0.0", + "_where": "/Users/mperrotte/npminc/cli/node_modules/os-locale", "author": { "name": "Sindre Sorhus", "email": "sindresorhus@gmail.com", @@ -32,20 +32,24 @@ }, "bundleDependencies": false, "dependencies": { - "mimic-fn": "^1.0.0" + "map-age-cleaner": "^0.1.1", + "mimic-fn": "^2.0.0", + "p-is-promise": "^2.0.0" }, "deprecated": false, "description": "Memoize functions - An optimization used to speed up consecutive function calls by caching the result of calls with identical input", "devDependencies": { - "ava": "*", - "delay": "^1.1.0", - "xo": "*" + "ava": "^1.4.1", + "delay": "^4.1.0", + "tsd": "^0.7.1", + "xo": "^0.24.0" }, "engines": { - "node": ">=4" + "node": ">=6" }, "files": [ - "index.js" + "index.js", + "index.d.ts" ], "homepage": "https://github.com/sindresorhus/mem#readme", "keywords": [ @@ -68,10 +72,7 @@ "url": "git+https://github.com/sindresorhus/mem.git" }, "scripts": { - "test": "xo && ava" + "test": "xo && ava && tsd" }, - "version": "1.1.0", - "xo": { - "esnext": true - } + "version": "4.3.0" } diff --git a/node_modules/mem/readme.md b/node_modules/mem/readme.md index 7ebab84f0e716..add4222b62d71 100644 --- a/node_modules/mem/readme.md +++ b/node_modules/mem/readme.md @@ -2,11 +2,13 @@ > [Memoize](https://en.wikipedia.org/wiki/Memoization) functions - An optimization used to speed up consecutive function calls by caching the result of calls with identical input +Memory is automatically released when an item expires. + ## Install ``` -$ npm install --save mem +$ npm install mem ``` @@ -22,11 +24,11 @@ const memoized = mem(counter); memoized('foo'); //=> 1 -// cached as it's the same arguments +// Cached as it's the same arguments memoized('foo'); //=> 1 -// not cached anymore as the arguments changed +// Not cached anymore as the arguments changed memoized('bar'); //=> 2 @@ -40,35 +42,37 @@ memoized('bar'); const mem = require('mem'); let i = 0; -const counter = () => Promise.resolve(++i); +const counter = async () => ++i; const memoized = mem(counter); -memoized().then(a => { - console.log(a); +(async () => { + console.log(await memoized()); //=> 1 - memoized().then(b => { - // the return value didn't increase as it's cached - console.log(b); - //=> 1 - }); -}); + // The return value didn't increase as it's cached + console.log(await memoized()); + //=> 1 +})(); ``` ```js const mem = require('mem'); const got = require('got'); +const delay = require('delay'); + const memGot = mem(got, {maxAge: 1000}); -memGot('sindresorhus.com').then(() => { - // this call is cached - memGot('sindresorhus.com').then(() => { - setTimeout(() => { - // this call is not cached as the cache has expired - memGot('sindresorhus.com').then(() => {}); - }, 2000); - }); -}); +(async () => { + await memGot('sindresorhus.com'); + + // This call is cached + await memGot('sindresorhus.com'); + + await delay(2000); + + // This call is not cached as the cache has expired + await memGot('sindresorhus.com'); +})(); ``` @@ -84,6 +88,8 @@ Function to be memoized. #### options +Type: `Object` + ##### maxAge Type: `number`
    @@ -104,7 +110,14 @@ You could for example change it to only cache on the first argument `x => JSON.s Type: `Object`
    Default: `new Map()` -Use a different cache storage. Must implement the following methods: `.has(key)`, `.get(key)`, `.set(key, value)`, and optionally `.clear()`. You could for example use a `WeakMap` instead. +Use a different cache storage. Must implement the following methods: `.has(key)`, `.get(key)`, `.set(key, value)`, `.delete(key)`, and optionally `.clear()`. You could for example use a `WeakMap` instead or [`quick-lru`](https://github.com/sindresorhus/quick-lru) for a LRU cache. + +##### cachePromiseRejection + +Type: `boolean`
    +Default: `false` + +Cache rejected promises. ### mem.clear(fn) @@ -133,15 +146,22 @@ const got = require('got'); const cache = new StatsMap(); const memGot = mem(got, {cache}); -memGot('sindresorhus.com') - .then(() => memGot('sindresorhus.com')) - .then(() => memGot('sindresorhus.com')); +(async () => { + await memGot('sindresorhus.com'); + await memGot('sindresorhus.com'); + await memGot('sindresorhus.com'); -console.log(cache.stats); -//=> {hits: 2, misses: 1} + console.log(cache.stats); + //=> {hits: 2, misses: 1} +})(); ``` +## Related + +- [p-memoize](https://github.com/sindresorhus/p-memoize) - Memoize promise-returning & async functions + + ## License MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/minizlib/README.md b/node_modules/minizlib/README.md index 2b585545efe14..4097b85225542 100644 --- a/node_modules/minizlib/README.md +++ b/node_modules/minizlib/README.md @@ -1,44 +1,53 @@ # minizlib -A tiny fast zlib stream built on [minipass](http://npm.im/minipass) -and Node.js's zlib binding. +A fast zlib stream built on [minipass](http://npm.im/minipass) and +Node.js's zlib binding. This module was created to serve the needs of -[node-tar](http://npm.im/tar) v2. If your needs are different, then -it may not be for you. +[node-tar](http://npm.im/tar) and +[minipass-fetch](http://npm.im/minipass-fetch). + +Brotli is supported in versions of node with a Brotli binding. ## How does this differ from the streams in `require('zlib')`? First, there are no convenience methods to compress or decompress a buffer. If you want those, use the built-in `zlib` module. This is -only streams. +only streams. That being said, Minipass streams to make it fairly easy to +use as one-liners: `new zlib.Deflate().end(data).read()` will return the +deflate compressed result. This module compresses and decompresses the data as fast as you feed it in. It is synchronous, and runs on the main process thread. Zlib -operations can be high CPU, but they're very fast, and doing it this -way means much less bookkeeping and artificial deferral. +and Brotli operations can be high CPU, but they're very fast, and doing it +this way means much less bookkeeping and artificial deferral. Node's built in zlib streams are built on top of `stream.Transform`. They do the maximally safe thing with respect to consistent asynchrony, buffering, and backpressure. -This module _does_ support backpressure, and will buffer output chunks -that are not consumed, but is less of a mediator between the input and -output. There is no high or low watermarks, no state objects, and so -artificial async deferrals. It will not protect you from Zalgo. - -If you write, data will be emitted right away. If you write -everything synchronously in one tick, and you are listening to the -`data` event to consume it, then it'll all be emitted right away in -that same tick. If you want data to be emitted in the next tick, then -write it in the next tick. - -It is thus the responsibility of the reader and writer to manage their -own consumption and process execution flow. - -The goal is to compress and decompress as fast as possible, even for -files that are too large to store all in one buffer. - -The API is very similar to the built-in zlib module. There are -classes that you instantiate with `new` and they are streams that can -be piped together. +See [Minipass](http://npm.im/minipass) for more on the differences between +Node.js core streams and Minipass streams, and the convenience methods +provided by that class. + +## Classes + +- Deflate +- Inflate +- Gzip +- Gunzip +- DeflateRaw +- InflateRaw +- Unzip +- BrotliCompress (Node v10 and higher) +- BrotliDecompress (Node v10 and higher) + +## USAGE + +```js +const zlib = require('minizlib') +const input = sourceOfCompressedData() +const decode = new zlib.BrotliDecompress() +const output = whereToWriteTheDecodedData() +input.pipe(decode).pipe(output) +``` diff --git a/node_modules/minizlib/constants.js b/node_modules/minizlib/constants.js index 4edffde86f852..641ebc73129bf 100644 --- a/node_modules/minizlib/constants.js +++ b/node_modules/minizlib/constants.js @@ -1,4 +1,11 @@ -module.exports = Object.freeze({ +// Update with any zlib constants that are added or changed in the future. +// Node v6 didn't export this, so we just hard code the version and rely +// on all the other hard-coded values from zlib v4736. When node v6 +// support drops, we can just export the realZlibConstants object. +const realZlibConstants = require('zlib').constants || + /* istanbul ignore next */ { ZLIB_VERNUM: 4736 } + +module.exports = Object.freeze(Object.assign(Object.create(null), { Z_NO_FLUSH: 0, Z_PARTIAL_FLUSH: 1, Z_SYNC_FLUSH: 2, @@ -23,7 +30,6 @@ module.exports = Object.freeze({ Z_RLE: 3, Z_FIXED: 4, Z_DEFAULT_STRATEGY: 0, - ZLIB_VERNUM: 4736, DEFLATE: 1, INFLATE: 2, GZIP: 3, @@ -31,6 +37,8 @@ module.exports = Object.freeze({ DEFLATERAW: 5, INFLATERAW: 6, UNZIP: 7, + BROTLI_DECODE: 8, + BROTLI_ENCODE: 9, Z_MIN_WINDOWBITS: 8, Z_MAX_WINDOWBITS: 15, Z_DEFAULT_WINDOWBITS: 15, @@ -42,5 +50,66 @@ module.exports = Object.freeze({ Z_DEFAULT_MEMLEVEL: 8, Z_MIN_LEVEL: -1, Z_MAX_LEVEL: 9, - Z_DEFAULT_LEVEL: -1 -}) + Z_DEFAULT_LEVEL: -1, + BROTLI_OPERATION_PROCESS: 0, + BROTLI_OPERATION_FLUSH: 1, + BROTLI_OPERATION_FINISH: 2, + BROTLI_OPERATION_EMIT_METADATA: 3, + BROTLI_MODE_GENERIC: 0, + BROTLI_MODE_TEXT: 1, + BROTLI_MODE_FONT: 2, + BROTLI_DEFAULT_MODE: 0, + BROTLI_MIN_QUALITY: 0, + BROTLI_MAX_QUALITY: 11, + BROTLI_DEFAULT_QUALITY: 11, + BROTLI_MIN_WINDOW_BITS: 10, + BROTLI_MAX_WINDOW_BITS: 24, + BROTLI_LARGE_MAX_WINDOW_BITS: 30, + BROTLI_DEFAULT_WINDOW: 22, + BROTLI_MIN_INPUT_BLOCK_BITS: 16, + BROTLI_MAX_INPUT_BLOCK_BITS: 24, + BROTLI_PARAM_MODE: 0, + BROTLI_PARAM_QUALITY: 1, + BROTLI_PARAM_LGWIN: 2, + BROTLI_PARAM_LGBLOCK: 3, + BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4, + BROTLI_PARAM_SIZE_HINT: 5, + BROTLI_PARAM_LARGE_WINDOW: 6, + BROTLI_PARAM_NPOSTFIX: 7, + BROTLI_PARAM_NDIRECT: 8, + BROTLI_DECODER_RESULT_ERROR: 0, + BROTLI_DECODER_RESULT_SUCCESS: 1, + BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0, + BROTLI_DECODER_PARAM_LARGE_WINDOW: 1, + BROTLI_DECODER_NO_ERROR: 0, + BROTLI_DECODER_SUCCESS: 1, + BROTLI_DECODER_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1, + BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5, + BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6, + BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7, + BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10, + BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11, + BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12, + BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13, + BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14, + BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15, + BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16, + BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19, + BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21, + BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27, + BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30, + BROTLI_DECODER_ERROR_UNREACHABLE: -31, +}, realZlibConstants)) diff --git a/node_modules/minizlib/index.js b/node_modules/minizlib/index.js index c91a59c92dbd3..295047b9c1447 100644 --- a/node_modules/minizlib/index.js +++ b/node_modules/minizlib/index.js @@ -2,16 +2,24 @@ const assert = require('assert') const Buffer = require('buffer').Buffer -const binding = process.binding('zlib') +const realZlib = require('zlib') const constants = exports.constants = require('./constants.js') -const MiniPass = require('minipass') +const Minipass = require('minipass') + +const OriginalBufferConcat = Buffer.concat class ZlibError extends Error { - constructor (msg, errno) { - super('zlib: ' + msg) - this.errno = errno - this.code = codes.get(errno) + constructor (err) { + super('zlib: ' + err.message) + this.code = err.code + this.errno = err.errno + /* istanbul ignore if */ + if (!this.code) + this.code = 'ZLIB_ERROR' + + this.message = 'zlib: ' + err.message + Error.captureStackTrace(this, this.constructor) } get name () { @@ -19,153 +27,51 @@ class ZlibError extends Error { } } -// translation table for return codes. -const codes = new Map([ - [constants.Z_OK, 'Z_OK'], - [constants.Z_STREAM_END, 'Z_STREAM_END'], - [constants.Z_NEED_DICT, 'Z_NEED_DICT'], - [constants.Z_ERRNO, 'Z_ERRNO'], - [constants.Z_STREAM_ERROR, 'Z_STREAM_ERROR'], - [constants.Z_DATA_ERROR, 'Z_DATA_ERROR'], - [constants.Z_MEM_ERROR, 'Z_MEM_ERROR'], - [constants.Z_BUF_ERROR, 'Z_BUF_ERROR'], - [constants.Z_VERSION_ERROR, 'Z_VERSION_ERROR'] -]) - -const validFlushFlags = new Set([ - constants.Z_NO_FLUSH, - constants.Z_PARTIAL_FLUSH, - constants.Z_SYNC_FLUSH, - constants.Z_FULL_FLUSH, - constants.Z_FINISH, - constants.Z_BLOCK -]) - -const strategies = new Set([ - constants.Z_FILTERED, - constants.Z_HUFFMAN_ONLY, - constants.Z_RLE, - constants.Z_FIXED, - constants.Z_DEFAULT_STRATEGY -]) - // the Zlib class they all inherit from // This thing manages the queue of requests, and returns // true or false if there is anything in the queue when // you call the .write() method. const _opts = Symbol('opts') -const _chunkSize = Symbol('chunkSize') const _flushFlag = Symbol('flushFlag') -const _finishFlush = Symbol('finishFlush') +const _finishFlushFlag = Symbol('finishFlushFlag') +const _fullFlushFlag = Symbol('fullFlushFlag') const _handle = Symbol('handle') -const _hadError = Symbol('hadError') -const _buffer = Symbol('buffer') -const _offset = Symbol('offset') +const _onError = Symbol('onError') +const _sawError = Symbol('sawError') const _level = Symbol('level') const _strategy = Symbol('strategy') const _ended = Symbol('ended') -const _writeState = Symbol('writeState') +const _defaultFullFlush = Symbol('_defaultFullFlush') -class Zlib extends MiniPass { +class ZlibBase extends Minipass { constructor (opts, mode) { + if (!opts || typeof opts !== 'object') + throw new TypeError('invalid options for ZlibBase constructor') + super(opts) this[_ended] = false - this[_opts] = opts = opts || {} - this[_chunkSize] = opts.chunkSize || constants.Z_DEFAULT_CHUNK - if (opts.flush && !validFlushFlags.has(opts.flush)) { - throw new TypeError('Invalid flush flag: ' + opts.flush) - } - if (opts.finishFlush && !validFlushFlags.has(opts.finishFlush)) { - throw new TypeError('Invalid flush flag: ' + opts.finishFlush) - } - - this[_flushFlag] = opts.flush || constants.Z_NO_FLUSH - this[_finishFlush] = typeof opts.finishFlush !== 'undefined' ? - opts.finishFlush : constants.Z_FINISH - - if (opts.chunkSize) { - if (opts.chunkSize < constants.Z_MIN_CHUNK) { - throw new RangeError('Invalid chunk size: ' + opts.chunkSize) - } - } - - if (opts.windowBits) { - if (opts.windowBits < constants.Z_MIN_WINDOWBITS || - opts.windowBits > constants.Z_MAX_WINDOWBITS) { - throw new RangeError('Invalid windowBits: ' + opts.windowBits) - } + this[_opts] = opts + + this[_flushFlag] = opts.flush + this[_finishFlushFlag] = opts.finishFlush + // this will throw if any options are invalid for the class selected + try { + this[_handle] = new realZlib[mode](opts) + } catch (er) { + // make sure that all errors get decorated properly + throw new ZlibError(er) } - if (opts.level) { - if (opts.level < constants.Z_MIN_LEVEL || - opts.level > constants.Z_MAX_LEVEL) { - throw new RangeError('Invalid compression level: ' + opts.level) - } - } - - if (opts.memLevel) { - if (opts.memLevel < constants.Z_MIN_MEMLEVEL || - opts.memLevel > constants.Z_MAX_MEMLEVEL) { - throw new RangeError('Invalid memLevel: ' + opts.memLevel) - } - } - - if (opts.strategy && !(strategies.has(opts.strategy))) - throw new TypeError('Invalid strategy: ' + opts.strategy) - - if (opts.dictionary) { - if (!(opts.dictionary instanceof Buffer)) { - throw new TypeError('Invalid dictionary: it should be a Buffer instance') - } - } - - this[_handle] = new binding.Zlib(mode) - - this[_hadError] = false - this[_handle].onerror = (message, errno) => { + this[_onError] = (err) => { + this[_sawError] = true // there is no way to cleanly recover. // continuing only obscures problems. this.close() - this[_hadError] = true - - const error = new ZlibError(message, errno) - this.emit('error', error) - } - - const level = typeof opts.level === 'number' ? opts.level - : constants.Z_DEFAULT_COMPRESSION - - var strategy = typeof opts.strategy === 'number' ? opts.strategy - : constants.Z_DEFAULT_STRATEGY - - this[_writeState] = new Uint32Array(2); - const window = opts.windowBits || constants.Z_DEFAULT_WINDOWBITS - const memLevel = opts.memLevel || constants.Z_DEFAULT_MEMLEVEL - - // API changed in node v9 - /* istanbul ignore next */ - if (/^v[0-8]\./.test(process.version)) { - this[_handle].init(window, - level, - memLevel, - strategy, - opts.dictionary) - } else { - this[_handle].init(window, - level, - memLevel, - strategy, - this[_writeState], - () => {}, - opts.dictionary) + this.emit('error', err) } - this[_buffer] = Buffer.allocUnsafe(this[_chunkSize]) - this[_offset] = 0 - this[_level] = level - this[_strategy] = strategy - - this.once('end', this.close) + this[_handle].on('error', er => this[_onError](new ZlibError(er))) + this.once('end', () => this.close) } close () { @@ -176,57 +82,26 @@ class Zlib extends MiniPass { } } - params (level, strategy) { - if (!this[_handle]) - throw new Error('cannot switch params when binding is closed') - - // no way to test this without also not supporting params at all - /* istanbul ignore if */ - if (!this[_handle].params) - throw new Error('not supported in this implementation') - - if (level < constants.Z_MIN_LEVEL || - level > constants.Z_MAX_LEVEL) { - throw new RangeError('Invalid compression level: ' + level) - } - - if (!(strategies.has(strategy))) - throw new TypeError('Invalid strategy: ' + strategy) - - if (this[_level] !== level || this[_strategy] !== strategy) { - this.flush(constants.Z_SYNC_FLUSH) + reset () { + if (!this[_sawError]) { assert(this[_handle], 'zlib binding closed') - this[_handle].params(level, strategy) - /* istanbul ignore else */ - if (!this[_hadError]) { - this[_level] = level - this[_strategy] = strategy - } + return this[_handle].reset() } } - reset () { - assert(this[_handle], 'zlib binding closed') - return this[_handle].reset() - } - - flush (kind) { - if (kind === undefined) - kind = constants.Z_FULL_FLUSH - + flush (flushFlag) { if (this.ended) return - const flushFlag = this[_flushFlag] - this[_flushFlag] = kind - this.write(Buffer.alloc(0)) - this[_flushFlag] = flushFlag + if (typeof flushFlag !== 'number') + flushFlag = this[_fullFlushFlag] + this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag })) } end (chunk, encoding, cb) { if (chunk) this.write(chunk, encoding) - this.flush(this[_finishFlush]) + this.flush(this[_finishFlushFlag]) this[_ended] = true return super.end(null, null, cb) } @@ -244,64 +119,59 @@ class Zlib extends MiniPass { if (typeof chunk === 'string') chunk = Buffer.from(chunk, encoding) - let availInBefore = chunk && chunk.length - let availOutBefore = this[_chunkSize] - this[_offset] - let inOff = 0 // the offset of the input buffer - const flushFlag = this[_flushFlag] - let writeReturn = true - + if (this[_sawError]) + return assert(this[_handle], 'zlib binding closed') - do { - let res = this[_handle].writeSync( - flushFlag, - chunk, // in - inOff, // in_off - availInBefore, // in_len - this[_buffer], // out - this[_offset], //out_off - availOutBefore // out_len - ) - - if (this[_hadError]) - break - - // API changed in v9 - /* istanbul ignore next */ - let availInAfter = res ? res[0] : this[_writeState][1] - /* istanbul ignore next */ - let availOutAfter = res ? res[1] : this[_writeState][0] - - const have = availOutBefore - availOutAfter - assert(have >= 0, 'have should not go down') - - if (have > 0) { - const out = this[_buffer].slice( - this[_offset], this[_offset] + have - ) - - this[_offset] += have - // serve some output to the consumer. - writeReturn = super.write(out) && writeReturn - } - // exhausted the output buffer, or used all the input create a new one. - if (availOutAfter === 0 || this[_offset] >= this[_chunkSize]) { - availOutBefore = this[_chunkSize] - this[_offset] = 0 - this[_buffer] = Buffer.allocUnsafe(this[_chunkSize]) + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + const nativeHandle = this[_handle]._handle + const originalNativeClose = nativeHandle.close + nativeHandle.close = () => {} + const originalClose = this[_handle].close + this[_handle].close = () => {} + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + Buffer.concat = (args) => args + let result + try { + const flushFlag = typeof chunk[_flushFlag] === 'number' + ? chunk[_flushFlag] : this[_flushFlag] + result = this[_handle]._processChunk(chunk, flushFlag) + // if we don't throw, reset it back how it was + Buffer.concat = OriginalBufferConcat + } catch (err) { + // or if we do, put Buffer.concat() back before we emit error + // Error events call into user code, which may call Buffer.concat() + Buffer.concat = OriginalBufferConcat + this[_onError](new ZlibError(err)) + } finally { + if (this[_handle]) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + this[_handle]._handle = nativeHandle + nativeHandle.close = originalNativeClose + this[_handle].close = originalClose + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this[_handle].removeAllListeners('error') } + } - if (availOutAfter === 0) { - // Not actually done. Need to reprocess. - // Also, update the availInBefore to the availInAfter value, - // so that if we have to hit it a third (fourth, etc.) time, - // it'll have the correct byte counts. - inOff += (availInBefore - availInAfter) - availInBefore = availInAfter - continue + let writeReturn + if (result) { + if (Array.isArray(result) && result.length > 0) { + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = super.write(Buffer.from(result[0])) + for (let i = 1; i < result.length; i++) { + writeReturn = super.write(result[i]) + } + } else { + writeReturn = super.write(Buffer.from(result)) } - break - } while (!this[_hadError]) + } if (cb) cb() @@ -309,49 +179,124 @@ class Zlib extends MiniPass { } } +class Zlib extends ZlibBase { + constructor (opts, mode) { + opts = opts || {} + + opts.flush = opts.flush || constants.Z_NO_FLUSH + opts.finishFlush = opts.finishFlush || constants.Z_FINISH + super(opts, mode) + + this[_fullFlushFlag] = constants.Z_FULL_FLUSH + this[_level] = opts.level + this[_strategy] = opts.strategy + } + + params (level, strategy) { + if (this[_sawError]) + return + + if (!this[_handle]) + throw new Error('cannot switch params when binding is closed') + + // no way to test this without also not supporting params at all + /* istanbul ignore if */ + if (!this[_handle].params) + throw new Error('not supported in this implementation') + + if (this[_level] !== level || this[_strategy] !== strategy) { + this.flush(constants.Z_SYNC_FLUSH) + assert(this[_handle], 'zlib binding closed') + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this[_handle].flush + this[_handle].flush = (flushFlag, cb) => { + this.flush(flushFlag) + cb() + } + try { + this[_handle].params(level, strategy) + } finally { + this[_handle].flush = origFlush + } + /* istanbul ignore else */ + if (this[_handle]) { + this[_level] = level + this[_strategy] = strategy + } + } + } +} + // minimal 2-byte header class Deflate extends Zlib { constructor (opts) { - super(opts, constants.DEFLATE) + super(opts, 'Deflate') } } class Inflate extends Zlib { constructor (opts) { - super(opts, constants.INFLATE) + super(opts, 'Inflate') } } // gzip - bigger header, same deflate compression class Gzip extends Zlib { constructor (opts) { - super(opts, constants.GZIP) + super(opts, 'Gzip') } } class Gunzip extends Zlib { constructor (opts) { - super(opts, constants.GUNZIP) + super(opts, 'Gunzip') } } // raw - no header class DeflateRaw extends Zlib { constructor (opts) { - super(opts, constants.DEFLATERAW) + super(opts, 'DeflateRaw') } } class InflateRaw extends Zlib { constructor (opts) { - super(opts, constants.INFLATERAW) + super(opts, 'InflateRaw') } } // auto-detect header. class Unzip extends Zlib { constructor (opts) { - super(opts, constants.UNZIP) + super(opts, 'Unzip') + } +} + +class Brotli extends ZlibBase { + constructor (opts, mode) { + opts = opts || {} + + opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS + opts.finishFlush = opts.finishFlush || constants.BROTLI_OPERATION_FINISH + + super(opts, mode) + + this[_fullFlushFlag] = constants.BROTLI_OPERATION_FLUSH + } +} + +class BrotliCompress extends Brotli { + constructor (opts) { + super(opts, 'BrotliCompress') + } +} + +class BrotliDecompress extends Brotli { + constructor (opts) { + super(opts, 'BrotliDecompress') } } @@ -362,3 +307,14 @@ exports.Gunzip = Gunzip exports.DeflateRaw = DeflateRaw exports.InflateRaw = InflateRaw exports.Unzip = Unzip +/* istanbul ignore else */ +if (typeof realZlib.BrotliCompress === 'function') { + exports.BrotliCompress = BrotliCompress + exports.BrotliDecompress = BrotliDecompress +} else { + exports.BrotliCompress = exports.BrotliDecompress = class { + constructor () { + throw new Error('Brotli is not supported in this version of Node.js') + } + } +} diff --git a/node_modules/minizlib/node_modules/minipass/LICENSE b/node_modules/minizlib/node_modules/minipass/LICENSE new file mode 100644 index 0000000000000..20a4762540923 --- /dev/null +++ b/node_modules/minizlib/node_modules/minipass/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/minizlib/node_modules/minipass/README.md b/node_modules/minizlib/node_modules/minipass/README.md new file mode 100644 index 0000000000000..c989beea0e6d9 --- /dev/null +++ b/node_modules/minizlib/node_modules/minipass/README.md @@ -0,0 +1,606 @@ +# minipass + +A _very_ minimal implementation of a [PassThrough +stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough) + +[It's very +fast](https://docs.google.com/spreadsheets/d/1oObKSrVwLX_7Ut4Z6g3fZW-AX1j1-k6w-cDsrkaSbHM/edit#gid=0) +for objects, strings, and buffers. + +Supports pipe()ing (including multi-pipe() and backpressure +transmission), buffering data until either a `data` event handler or +`pipe()` is added (so you don't lose the first chunk), and most other +cases where PassThrough is a good idea. + +There is a `read()` method, but it's much more efficient to consume +data from this stream via `'data'` events or by calling `pipe()` into +some other stream. Calling `read()` requires the buffer to be +flattened in some cases, which requires copying memory. + +There is also no `unpipe()` method. Once you start piping, there is +no stopping it! + +If you set `objectMode: true` in the options, then whatever is written +will be emitted. Otherwise, it'll do a minimal amount of Buffer +copying to ensure proper Streams semantics when `read(n)` is called. + +`objectMode` can also be set by doing `stream.objectMode = true`, or by +writing any non-string/non-buffer data. `objectMode` cannot be set to +false once it is set. + +This is not a `through` or `through2` stream. It doesn't transform +the data, it just passes it right through. If you want to transform +the data, extend the class, and override the `write()` method. Once +you're done transforming the data however you want, call +`super.write()` with the transform output. + +For some examples of streams that extend Minipass in various ways, check +out: + +- [minizlib](http://npm.im/minizlib) +- [fs-minipass](http://npm.im/fs-minipass) +- [tar](http://npm.im/tar) +- [minipass-collect](http://npm.im/minipass-collect) +- [minipass-flush](http://npm.im/minipass-flush) +- [minipass-pipeline](http://npm.im/minipass-pipeline) +- [tap](http://npm.im/tap) +- [tap-parser](http://npm.im/tap) +- [treport](http://npm.im/tap) + +## Differences from Node.js Streams + +There are several things that make Minipass streams different from (and in +some ways superior to) Node.js core streams. + +Please read these caveats if you are familiar with noode-core streams and +intend to use Minipass streams in your programs. + +### Timing + +Minipass streams are designed to support synchronous use-cases. Thus, data +is emitted as soon as it is available, always. It is buffered until read, +but no longer. Another way to look at it is that Minipass streams are +exactly as synchronous as the logic that writes into them. + +This can be surprising if your code relies on `PassThrough.write()` always +providing data on the next tick rather than the current one, or being able +to call `resume()` and not have the entire buffer disappear immediately. + +However, without this synchronicity guarantee, there would be no way for +Minipass to achieve the speeds it does, or support the synchronous use +cases that it does. Simply put, waiting takes time. + +This non-deferring approach makes Minipass streams much easier to reason +about, especially in the context of Promises and other flow-control +mechanisms. + +### No High/Low Water Marks + +Node.js core streams will optimistically fill up a buffer, returning `true` +on all writes until the limit is hit, even if the data has nowhere to go. +Then, they will not attempt to draw more data in until the buffer size dips +below a minimum value. + +Minipass streams are much simpler. The `write()` method will return `true` +if the data has somewhere to go (which is to say, given the timing +guarantees, that the data is already there by the time `write()` returns). + +If the data has nowhere to go, then `write()` returns false, and the data +sits in a buffer, to be drained out immediately as soon as anyone consumes +it. + +### Hazards of Buffering (or: Why Minipass Is So Fast) + +Since data written to a Minipass stream is immediately written all the way +through the pipeline, and `write()` always returns true/false based on +whether the data was fully flushed, backpressure is communicated +immediately to the upstream caller. This minimizes buffering. + +Consider this case: + +```js +const {PassThrough} = require('stream') +const p1 = new PassThrough({ highWaterMark: 1024 }) +const p2 = new PassThrough({ highWaterMark: 1024 }) +const p3 = new PassThrough({ highWaterMark: 1024 }) +const p4 = new PassThrough({ highWaterMark: 1024 }) + +p1.pipe(p2).pipe(p3).pipe(p4) +p4.on('data', () => console.log('made it through')) + +// this returns false and buffers, then writes to p2 on next tick (1) +// p2 returns false and buffers, pausing p1, then writes to p3 on next tick (2) +// p3 returns false and buffers, pausing p2, then writes to p4 on next tick (3) +// p4 returns false and buffers, pausing p3, then emits 'data' and 'drain' +// on next tick (4) +// p3 sees p4's 'drain' event, and calls resume(), emitting 'resume' and +// 'drain' on next tick (5) +// p2 sees p3's 'drain', calls resume(), emits 'resume' and 'drain' on next tick (6) +// p1 sees p2's 'drain', calls resume(), emits 'resume' and 'drain' on next +// tick (7) + +p1.write(Buffer.alloc(2048)) // returns false +``` + +Along the way, the data was buffered and deferred at each stage, and +multiple event deferrals happened, for an unblocked pipeline where it was +perfectly safe to write all the way through! + +Furthermore, setting a `highWaterMark` of `1024` might lead someone reading +the code to think an advisory maximum of 1KiB is being set for the +pipeline. However, the actual advisory buffering level is the _sum_ of +`highWaterMark` values, since each one has its own bucket. + +Consider the Minipass case: + +```js +const m1 = new Minipass() +const m2 = new Minipass() +const m3 = new Minipass() +const m4 = new Minipass() + +m1.pipe(m2).pipe(m3).pipe(m4) +m4.on('data', () => console.log('made it through')) + +// m1 is flowing, so it writes the data to m2 immediately +// m2 is flowing, so it writes the data to m3 immediately +// m3 is flowing, so it writes the data to m4 immediately +// m4 is flowing, so it fires the 'data' event immediately, returns true +// m4's write returned true, so m3 is still flowing, returns true +// m3's write returned true, so m2 is still flowing, returns true +// m2's write returned true, so m1 is still flowing, returns true +// No event deferrals or buffering along the way! + +m1.write(Buffer.alloc(2048)) // returns true +``` + +It is extremely unlikely that you _don't_ want to buffer any data written, +or _ever_ buffer data that can be flushed all the way through. Neither +node-core streams nor Minipass ever fail to buffer written data, but +node-core streams do a lot of unnecessary buffering and pausing. + +As always, the faster implementation is the one that does less stuff and +waits less time to do it. + +### Immediately emit `end` for empty streams (when not paused) + +If a stream is not paused, and `end()` is called before writing any data +into it, then it will emit `end` immediately. + +If you have logic that occurs on the `end` event which you don't want to +potentially happen immediately (for example, closing file descriptors, +moving on to the next entry in an archive parse stream, etc.) then be sure +to call `stream.pause()` on creation, and then `stream.resume()` once you +are ready to respond to the `end` event. + +### Emit `end` When Asked + +One hazard of immediately emitting `'end'` is that you may not yet have had +a chance to add a listener. In order to avoid this hazard, Minipass +streams safely re-emit the `'end'` event if a new listener is added after +`'end'` has been emitted. + +Ie, if you do `stream.on('end', someFunction)`, and the stream has already +emitted `end`, then it will call the handler right away. (You can think of +this somewhat like attaching a new `.then(fn)` to a previously-resolved +Promise.) + +To prevent calling handlers multiple times who would not expect multiple +ends to occur, all listeners are removed from the `'end'` event whenever it +is emitted. + +### Impact of "immediate flow" on Tee-streams + +A "tee stream" is a stream piping to multiple destinations: + +```js +const tee = new Minipass() +t.pipe(dest1) +t.pipe(dest2) +t.write('foo') // goes to both destinations +``` + +Since Minipass streams _immediately_ process any pending data through the +pipeline when a new pipe destination is added, this can have surprising +effects, especially when a stream comes in from some other function and may +or may not have data in its buffer. + +```js +// WARNING! WILL LOSE DATA! +const src = new Minipass() +src.write('foo') +src.pipe(dest1) // 'foo' chunk flows to dest1 immediately, and is gone +src.pipe(dest2) // gets nothing! +``` + +The solution is to create a dedicated tee-stream junction that pipes to +both locations, and then pipe to _that_ instead. + +```js +// Safe example: tee to both places +const src = new Minipass() +src.write('foo') +const tee = new Minipass() +tee.pipe(dest1) +tee.pipe(dest2) +stream.pipe(tee) // tee gets 'foo', pipes to both locations +``` + +The same caveat applies to `on('data')` event listeners. The first one +added will _immediately_ receive all of the data, leaving nothing for the +second: + +```js +// WARNING! WILL LOSE DATA! +const src = new Minipass() +src.write('foo') +src.on('data', handler1) // receives 'foo' right away +src.on('data', handler2) // nothing to see here! +``` + +Using a dedicated tee-stream can be used in this case as well: + +```js +// Safe example: tee to both data handlers +const src = new Minipass() +src.write('foo') +const tee = new Minipass() +tee.on('data', handler1) +tee.on('data', handler2) +src.pipe(tee) +``` + +## USAGE + +It's a stream! Use it like a stream and it'll most likely do what you want. + +```js +const Minipass = require('minipass') +const mp = new Minipass(options) // optional: { encoding, objectMode } +mp.write('foo') +mp.pipe(someOtherStream) +mp.end('bar') +``` + +### OPTIONS + +* `encoding` How would you like the data coming _out_ of the stream to be + encoded? Accepts any values that can be passed to `Buffer.toString()`. +* `objectMode` Emit data exactly as it comes in. This will be flipped on + by default if you write() something other than a string or Buffer at any + point. Setting `objectMode: true` will prevent setting any encoding + value. + +### API + +Implements the user-facing portions of Node.js's `Readable` and `Writable` +streams. + +### Methods + +* `write(chunk, [encoding], [callback])` - Put data in. (Note that, in the + base Minipass class, the same data will come out.) Returns `false` if + the stream will buffer the next write, or true if it's still in + "flowing" mode. +* `end([chunk, [encoding]], [callback])` - Signal that you have no more + data to write. This will queue an `end` event to be fired when all the + data has been consumed. +* `setEncoding(encoding)` - Set the encoding for data coming of the + stream. This can only be done once. +* `pause()` - No more data for a while, please. This also prevents `end` + from being emitted for empty streams until the stream is resumed. +* `resume()` - Resume the stream. If there's data in the buffer, it is + all discarded. Any buffered events are immediately emitted. +* `pipe(dest)` - Send all output to the stream provided. There is no way + to unpipe. When data is emitted, it is immediately written to any and + all pipe destinations. +* `on(ev, fn)`, `emit(ev, fn)` - Minipass streams are EventEmitters. + Some events are given special treatment, however. (See below under + "events".) +* `promise()` - Returns a Promise that resolves when the stream emits + `end`, or rejects if the stream emits `error`. +* `collect()` - Return a Promise that resolves on `end` with an array + containing each chunk of data that was emitted, or rejects if the + stream emits `error`. Note that this consumes the stream data. +* `concat()` - Same as `collect()`, but concatenates the data into a + single Buffer object. Will reject the returned promise if the stream is + in objectMode, or if it goes into objectMode by the end of the data. +* `read(n)` - Consume `n` bytes of data out of the buffer. If `n` is not + provided, then consume all of it. If `n` bytes are not available, then + it returns null. **Note** consuming streams in this way is less + efficient, and can lead to unnecessary Buffer copying. +* `destroy([er])` - Destroy the stream. If an error is provided, then an + `'error'` event is emitted. If the stream has a `close()` method, and + has not emitted a `'close'` event yet, then `stream.close()` will be + called. Any Promises returned by `.promise()`, `.collect()` or + `.concat()` will be rejected. After being destroyed, writing to the + stream will emit an error. No more data will be emitted if the stream is + destroyed, even if it was previously buffered. + +### Properties + +* `bufferLength` Read-only. Total number of bytes buffered, or in the case + of objectMode, the total number of objects. +* `encoding` The encoding that has been set. (Setting this is equivalent + to calling `setEncoding(enc)` and has the same prohibition against + setting multiple times.) +* `flowing` Read-only. Boolean indicating whether a chunk written to the + stream will be immediately emitted. +* `emittedEnd` Read-only. Boolean indicating whether the end-ish events + (ie, `end`, `prefinish`, `finish`) have been emitted. Note that + listening on any end-ish event will immediateyl re-emit it if it has + already been emitted. +* `writable` Whether the stream is writable. Default `true`. Set to + `false` when `end()` +* `readable` Whether the stream is readable. Default `true`. +* `buffer` A [yallist](http://npm.im/yallist) linked list of chunks written + to the stream that have not yet been emitted. (It's probably a bad idea + to mess with this.) +* `pipes` A [yallist](http://npm.im/yallist) linked list of streams that + this stream is piping into. (It's probably a bad idea to mess with + this.) +* `destroyed` A getter that indicates whether the stream was destroyed. +* `paused` True if the stream has been explicitly paused, otherwise false. +* `objectMode` Indicates whether the stream is in `objectMode`. Once set + to `true`, it cannot be set to `false`. + +### Events + +* `data` Emitted when there's data to read. Argument is the data to read. + This is never emitted while not flowing. If a listener is attached, that + will resume the stream. +* `end` Emitted when there's no more data to read. This will be emitted + immediately for empty streams when `end()` is called. If a listener is + attached, and `end` was already emitted, then it will be emitted again. + All listeners are removed when `end` is emitted. +* `prefinish` An end-ish event that follows the same logic as `end` and is + emitted in the same conditions where `end` is emitted. Emitted after + `'end'`. +* `finish` An end-ish event that follows the same logic as `end` and is + emitted in the same conditions where `end` is emitted. Emitted after + `'prefinish'`. +* `close` An indication that an underlying resource has been released. + Minipass does not emit this event, but will defer it until after `end` + has been emitted, since it throws off some stream libraries otherwise. +* `drain` Emitted when the internal buffer empties, and it is again + suitable to `write()` into the stream. +* `readable` Emitted when data is buffered and ready to be read by a + consumer. +* `resume` Emitted when stream changes state from buffering to flowing + mode. (Ie, when `resume` is called, `pipe` is called, or a `data` event + listener is added.) + +### Static Methods + +* `Minipass.isStream(stream)` Returns `true` if the argument is a stream, + and false otherwise. To be considered a stream, the object must be + either an instance of Minipass, or an EventEmitter that has either a + `pipe()` method, or both `write()` and `end()` methods. (Pretty much any + stream in node-land will return `true` for this.) + +## EXAMPLES + +Here are some examples of things you can do with Minipass streams. + +### simple "are you done yet" promise + +```js +mp.promise().then(() => { + // stream is finished +}, er => { + // stream emitted an error +}) +``` + +### collecting + +```js +mp.collect().then(all => { + // all is an array of all the data emitted + // encoding is supported in this case, so + // so the result will be a collection of strings if + // an encoding is specified, or buffers/objects if not. + // + // In an async function, you may do + // const data = await stream.collect() +}) +``` + +### collecting into a single blob + +This is a bit slower because it concatenates the data into one chunk for +you, but if you're going to do it yourself anyway, it's convenient this +way: + +```js +mp.concat().then(onebigchunk => { + // onebigchunk is a string if the stream + // had an encoding set, or a buffer otherwise. +}) +``` + +### iteration + +You can iterate over streams synchronously or asynchronously in +platforms that support it. + +Synchronous iteration will end when the currently available data is +consumed, even if the `end` event has not been reached. In string and +buffer mode, the data is concatenated, so unless multiple writes are +occurring in the same tick as the `read()`, sync iteration loops will +generally only have a single iteration. + +To consume chunks in this way exactly as they have been written, with +no flattening, create the stream with the `{ objectMode: true }` +option. + +```js +const mp = new Minipass({ objectMode: true }) +mp.write('a') +mp.write('b') +for (let letter of mp) { + console.log(letter) // a, b +} +mp.write('c') +mp.write('d') +for (let letter of mp) { + console.log(letter) // c, d +} +mp.write('e') +mp.end() +for (let letter of mp) { + console.log(letter) // e +} +for (let letter of mp) { + console.log(letter) // nothing +} +``` + +Asynchronous iteration will continue until the end event is reached, +consuming all of the data. + +```js +const mp = new Minipass({ encoding: 'utf8' }) + +// some source of some data +let i = 5 +const inter = setInterval(() => { + if (i --> 0) + mp.write(Buffer.from('foo\n', 'utf8')) + else { + mp.end() + clearInterval(inter) + } +}, 100) + +// consume the data with asynchronous iteration +async function consume () { + for await (let chunk of mp) { + console.log(chunk) + } + return 'ok' +} + +consume().then(res => console.log(res)) +// logs `foo\n` 5 times, and then `ok` +``` + +### subclass that `console.log()`s everything written into it + +```js +class Logger extends Minipass { + write (chunk, encoding, callback) { + console.log('WRITE', chunk, encoding) + return super.write(chunk, encoding, callback) + } + end (chunk, encoding, callback) { + console.log('END', chunk, encoding) + return super.end(chunk, encoding, callback) + } +} + +someSource.pipe(new Logger()).pipe(someDest) +``` + +### same thing, but using an inline anonymous class + +```js +// js classes are fun +someSource + .pipe(new (class extends Minipass { + emit (ev, ...data) { + // let's also log events, because debugging some weird thing + console.log('EMIT', ev) + return super.emit(ev, ...data) + } + write (chunk, encoding, callback) { + console.log('WRITE', chunk, encoding) + return super.write(chunk, encoding, callback) + } + end (chunk, encoding, callback) { + console.log('END', chunk, encoding) + return super.end(chunk, encoding, callback) + } + })) + .pipe(someDest) +``` + +### subclass that defers 'end' for some reason + +```js +class SlowEnd extends Minipass { + emit (ev, ...args) { + if (ev === 'end') { + console.log('going to end, hold on a sec') + setTimeout(() => { + console.log('ok, ready to end now') + super.emit('end', ...args) + }, 100) + } else { + return super.emit(ev, ...args) + } + } +} +``` + +### transform that creates newline-delimited JSON + +```js +class NDJSONEncode extends Minipass { + write (obj, cb) { + try { + // JSON.stringify can throw, emit an error on that + return super.write(JSON.stringify(obj) + '\n', 'utf8', cb) + } catch (er) { + this.emit('error', er) + } + } + end (obj, cb) { + if (typeof obj === 'function') { + cb = obj + obj = undefined + } + if (obj !== undefined) { + this.write(obj) + } + return super.end(cb) + } +} +``` + +### transform that parses newline-delimited JSON + +```js +class NDJSONDecode extends Minipass { + constructor (options) { + // always be in object mode, as far as Minipass is concerned + super({ objectMode: true }) + this._jsonBuffer = '' + } + write (chunk, encoding, cb) { + if (typeof chunk === 'string' && + typeof encoding === 'string' && + encoding !== 'utf8') { + chunk = Buffer.from(chunk, encoding).toString() + } else if (Buffer.isBuffer(chunk)) + chunk = chunk.toString() + } + if (typeof encoding === 'function') { + cb = encoding + } + const jsonData = (this._jsonBuffer + chunk).split('\n') + this._jsonBuffer = jsonData.pop() + for (let i = 0; i < jsonData.length; i++) { + let parsed + try { + super.write(parsed) + } catch (er) { + this.emit('error', er) + continue + } + } + if (cb) + cb() + } +} +``` diff --git a/node_modules/minizlib/node_modules/minipass/index.js b/node_modules/minizlib/node_modules/minipass/index.js new file mode 100644 index 0000000000000..c072352d448a9 --- /dev/null +++ b/node_modules/minizlib/node_modules/minipass/index.js @@ -0,0 +1,537 @@ +'use strict' +const EE = require('events') +const Yallist = require('yallist') +const SD = require('string_decoder').StringDecoder + +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +const DESTROYED = Symbol('destroyed') + +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = doIter && Symbol.asyncIterator + || Symbol('asyncIterator not implemented') +const ITERATOR = doIter && Symbol.iterator + || Symbol('iterator not implemented') + +// Buffer in node 4.x < 4.5.0 doesn't have working Buffer.from +// or Buffer.alloc, and Buffer in node 10 deprecated the ctor. +// .M, this is fine .\^/M.. +const B = Buffer.alloc ? Buffer + : /* istanbul ignore next */ require('safe-buffer').Buffer + +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => + ev === 'end' || + ev === 'finish' || + ev === 'prefinish' + +const isArrayBuffer = b => b instanceof ArrayBuffer || + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0 + +const isArrayBufferView = b => !B.isBuffer(b) && ArrayBuffer.isView(b) + +module.exports = class Minipass extends EE { + constructor (options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this.pipes = new Yallist() + this.buffer = new Yallist() + this[OBJECTMODE] = options && options.objectMode || false + if (this[OBJECTMODE]) + this[ENCODING] = null + else + this[ENCODING] = options && options.encoding || null + if (this[ENCODING] === 'buffer') + this[ENCODING] = null + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + } + + get bufferLength () { return this[BUFFERLENGTH] } + + get encoding () { return this[ENCODING] } + set encoding (enc) { + if (this[OBJECTMODE]) + throw new Error('cannot set encoding in objectMode') + + if (this[ENCODING] && enc !== this[ENCODING] && + (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH])) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this.buffer.length) + this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding (enc) { + this.encoding = enc + } + + get objectMode () { return this[OBJECTMODE] } + set objectMode (ॐ ) { this[OBJECTMODE] = this[OBJECTMODE] || !!ॐ } + + write (chunk, encoding, cb) { + if (this[EOF]) + throw new Error('write after end') + + if (this[DESTROYED]) { + this.emit('error', Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + )) + return true + } + + if (typeof encoding === 'function') + cb = encoding, encoding = 'utf8' + + if (!encoding) + encoding = 'utf8' + + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !B.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = B.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) + chunk = B.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } + + // this ensures at this point that the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!this.objectMode && !chunk.length) { + const ret = this.flowing + if (this[BUFFERLENGTH] !== 0) + this.emit('readable') + if (cb) + cb() + return ret + } + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && !this[OBJECTMODE] && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) { + chunk = B.from(chunk, encoding) + } + + if (B.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + try { + return this.flowing + ? (this.emit('data', chunk), this.flowing) + : (this[BUFFERPUSH](chunk), false) + } finally { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable') + if (cb) + cb() + } + } + + read (n) { + if (this[DESTROYED]) + return null + + try { + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) + return null + + if (this[OBJECTMODE]) + n = null + + if (this.buffer.length > 1 && !this[OBJECTMODE]) { + if (this.encoding) + this.buffer = new Yallist([ + Array.from(this.buffer).join('') + ]) + else + this.buffer = new Yallist([ + B.concat(Array.from(this.buffer), this[BUFFERLENGTH]) + ]) + } + + return this[READ](n || null, this.buffer.head.value) + } finally { + this[MAYBE_EMIT_END]() + } + } + + [READ] (n, chunk) { + if (n === chunk.length || n === null) + this[BUFFERSHIFT]() + else { + this.buffer.head.value = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this.buffer.length && !this[EOF]) + this.emit('drain') + + return chunk + } + + end (chunk, encoding, cb) { + if (typeof chunk === 'function') + cb = chunk, chunk = null + if (typeof encoding === 'function') + cb = encoding, encoding = 'utf8' + if (chunk) + this.write(chunk, encoding) + if (cb) + this.once('end', cb) + this[EOF] = true + this.writable = false + + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) + this[MAYBE_EMIT_END]() + return this + } + + // don't let the internal resume be overwritten + [RESUME] () { + if (this[DESTROYED]) + return + + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this.buffer.length) + this[FLUSH]() + else if (this[EOF]) + this[MAYBE_EMIT_END]() + else + this.emit('drain') + } + + resume () { + return this[RESUME]() + } + + pause () { + this[FLOWING] = false + this[PAUSED] = true + } + + get destroyed () { + return this[DESTROYED] + } + + get flowing () { + return this[FLOWING] + } + + get paused () { + return this[PAUSED] + } + + [BUFFERPUSH] (chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1 + else + this[BUFFERLENGTH] += chunk.length + return this.buffer.push(chunk) + } + + [BUFFERSHIFT] () { + if (this.buffer.length) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1 + else + this[BUFFERLENGTH] -= this.buffer.head.value.length + } + return this.buffer.shift() + } + + [FLUSH] () { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) + + if (!this.buffer.length && !this[EOF]) + this.emit('drain') + } + + [FLUSHCHUNK] (chunk) { + return chunk ? (this.emit('data', chunk), this.flowing) : false + } + + pipe (dest, opts) { + if (this[DESTROYED]) + return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === process.stdout || dest === process.stderr) + opts.end = false + else + opts.end = opts.end !== false + + const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() } + this.pipes.push(p) + + dest.on('drain', p.ondrain) + this[RESUME]() + // piping an ended stream ends immediately + if (ended && p.opts.end) + p.dest.end() + return dest + } + + addListener (ev, fn) { + return this.on(ev, fn) + } + + on (ev, fn) { + try { + return super.on(ev, fn) + } finally { + if (ev === 'data' && !this.pipes.length && !this.flowing) + this[RESUME]() + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) + } + } + } + + get emittedEnd () { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END] () { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this.buffer.length === 0 && + this[EOF]) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) + this.emit('close') + this[EMITTING_END] = false + } + } + + emit (ev, data) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + if (!data) + return + + if (this.pipes.length) + this.pipes.forEach(p => + p.dest.write(data) === false && this.pause()) + } else if (ev === 'end') { + // only actual end gets this treatment + if (this[EMITTED_END] === true) + return + + this[EMITTED_END] = true + this.readable = false + + if (this[DECODER]) { + data = this[DECODER].end() + if (data) { + this.pipes.forEach(p => p.dest.write(data)) + super.emit('data', data) + } + } + + this.pipes.forEach(p => { + p.dest.removeListener('drain', p.ondrain) + if (p.opts.end) + p.dest.end() + }) + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return + } + + // TODO: replace with a spread operator when Node v4 support drops + const args = new Array(arguments.length) + args[0] = ev + args[1] = data + if (arguments.length > 2) { + for (let i = 2; i < arguments.length; i++) { + args[i] = arguments[i] + } + } + + try { + return super.emit.apply(this, args) + } finally { + if (!isEndish(ev)) + this[MAYBE_EMIT_END]() + else + this.removeAllListeners(ev) + } + } + + // const all = await stream.collect() + collect () { + const buf = [] + buf.dataLength = 0 + this.on('data', c => { + buf.push(c) + buf.dataLength += c.length + }) + return this.promise().then(() => buf) + } + + // const data = await stream.concat() + concat () { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] ? buf.join('') : B.concat(buf, buf.dataLength)) + } + + // stream.promise().then(() => done, er => emitted error) + promise () { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('end', () => resolve()) + this.on('error', er => reject(er)) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR] () { + const next = () => { + const res = this.read() + if (res !== null) + return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) + return Promise.resolve({ done: true }) + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { next } + } + + // for (let chunk of stream) + [ITERATOR] () { + const next = () => { + const value = this.read() + const done = value === null + return { value, done } + } + return { next } + } + + destroy (er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er) + else + this.emit(DESTROYED) + return this + } + + this[DESTROYED] = true + + // throw away all buffered data, it's never coming out + this.buffer = new Yallist() + this[BUFFERLENGTH] = 0 + + if (typeof this.close === 'function' && !this[CLOSED]) + this.close() + + if (er) + this.emit('error', er) + else // if no error to emit, still reject pending promises + this.emit(DESTROYED) + + return this + } + + static isStream (s) { + return !!s && (s instanceof Minipass || s instanceof EE && ( + typeof s.pipe === 'function' || // readable + (typeof s.write === 'function' && typeof s.end === 'function') // writable + )) + } +} diff --git a/node_modules/minizlib/node_modules/minipass/package.json b/node_modules/minizlib/node_modules/minipass/package.json new file mode 100644 index 0000000000000..57284172b071f --- /dev/null +++ b/node_modules/minizlib/node_modules/minipass/package.json @@ -0,0 +1,70 @@ +{ + "_from": "minipass@^2.9.0", + "_id": "minipass@2.9.0", + "_inBundle": false, + "_integrity": "sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==", + "_location": "/minizlib/minipass", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "minipass@^2.9.0", + "name": "minipass", + "escapedName": "minipass", + "rawSpec": "^2.9.0", + "saveSpec": null, + "fetchSpec": "^2.9.0" + }, + "_requiredBy": [ + "/minizlib" + ], + "_resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz", + "_shasum": "e713762e7d3e32fed803115cf93e04bca9fcc9a6", + "_spec": "minipass@^2.9.0", + "_where": "/Users/mperrotte/npminc/cli/node_modules/minizlib", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/isaacs/minipass/issues" + }, + "bundleDependencies": false, + "dependencies": { + "safe-buffer": "^5.1.2", + "yallist": "^3.0.0" + }, + "deprecated": false, + "description": "minimal implementation of a PassThrough stream", + "devDependencies": { + "end-of-stream": "^1.4.0", + "tap": "^14.6.5", + "through2": "^2.0.3" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/isaacs/minipass#readme", + "keywords": [ + "passthrough", + "stream" + ], + "license": "ISC", + "main": "index.js", + "name": "minipass", + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minipass.git" + }, + "scripts": { + "postpublish": "git push origin --follow-tags", + "postversion": "npm publish", + "preversion": "npm test", + "test": "tap" + }, + "tap": { + "check-coverage": true + }, + "version": "2.9.0" +} diff --git a/node_modules/minizlib/package.json b/node_modules/minizlib/package.json index f3a57e9f1d821..1284b8c6c4831 100644 --- a/node_modules/minizlib/package.json +++ b/node_modules/minizlib/package.json @@ -1,27 +1,30 @@ { - "_from": "minizlib@^1.1.1", - "_id": "minizlib@1.1.1", + "_from": "minizlib@^1.2.1", + "_id": "minizlib@1.3.3", "_inBundle": false, - "_integrity": "sha512-TrfjCjk4jLhcJyGMYymBH6oTXcWjYbUAXTHDbtnWHjZC25h0cdajHuPE1zxb4DVmu8crfh+HwH/WMuyLG0nHBg==", + "_integrity": "sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==", "_location": "/minizlib", - "_phantomChildren": {}, + "_phantomChildren": { + "safe-buffer": "5.1.2", + "yallist": "3.0.3" + }, "_requested": { "type": "range", "registry": true, - "raw": "minizlib@^1.1.1", + "raw": "minizlib@^1.2.1", "name": "minizlib", "escapedName": "minizlib", - "rawSpec": "^1.1.1", + "rawSpec": "^1.2.1", "saveSpec": null, - "fetchSpec": "^1.1.1" + "fetchSpec": "^1.2.1" }, "_requiredBy": [ "/tar" ], - "_resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.1.1.tgz", - "_shasum": "6734acc045a46e61d596a43bb9d9cd326e19cc42", - "_spec": "minizlib@^1.1.1", - "_where": "/Users/zkat/Documents/code/work/npm/node_modules/tar", + "_resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.3.3.tgz", + "_shasum": "2290de96818a34c29551c8a8d301216bd65a861d", + "_spec": "minizlib@^1.2.1", + "_where": "/Users/mperrotte/npminc/cli/node_modules/tar", "author": { "name": "Isaac Z. Schlueter", "email": "i@izs.me", @@ -32,12 +35,12 @@ }, "bundleDependencies": false, "dependencies": { - "minipass": "^2.2.1" + "minipass": "^2.9.0" }, "deprecated": false, "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.", "devDependencies": { - "tap": "^10.7.2" + "tap": "^12.0.1" }, "files": [ "index.js", @@ -67,5 +70,5 @@ "preversion": "npm test", "test": "tap test/*.js --100 -J" }, - "version": "1.1.1" + "version": "1.3.3" } diff --git a/node_modules/move-concurrently/node_modules/aproba/LICENSE b/node_modules/move-concurrently/node_modules/aproba/LICENSE new file mode 100644 index 0000000000000..f4be44d881b2d --- /dev/null +++ b/node_modules/move-concurrently/node_modules/aproba/LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/move-concurrently/node_modules/aproba/README.md b/node_modules/move-concurrently/node_modules/aproba/README.md new file mode 100644 index 0000000000000..0bfc594c56a37 --- /dev/null +++ b/node_modules/move-concurrently/node_modules/aproba/README.md @@ -0,0 +1,94 @@ +aproba +====== + +A ridiculously light-weight function argument validator + +``` +var validate = require("aproba") + +function myfunc(a, b, c) { + // `a` must be a string, `b` a number, `c` a function + validate('SNF', arguments) // [a,b,c] is also valid +} + +myfunc('test', 23, function () {}) // ok +myfunc(123, 23, function () {}) // type error +myfunc('test', 23) // missing arg error +myfunc('test', 23, function () {}, true) // too many args error + +``` + +Valid types are: + +| type | description +| :--: | :---------- +| * | matches any type +| A | `Array.isArray` OR an `arguments` object +| S | typeof == string +| N | typeof == number +| F | typeof == function +| O | typeof == object and not type A and not type E +| B | typeof == boolean +| E | `instanceof Error` OR `null` **(special: see below)** +| Z | == `null` + +Validation failures throw one of three exception types, distinguished by a +`code` property of `EMISSINGARG`, `EINVALIDTYPE` or `ETOOMANYARGS`. + +If you pass in an invalid type then it will throw with a code of +`EUNKNOWNTYPE`. + +If an **error** argument is found and is not null then the remaining +arguments are optional. That is, if you say `ESO` then that's like using a +non-magical `E` in: `E|ESO|ZSO`. + +### But I have optional arguments?! + +You can provide more than one signature by separating them with pipes `|`. +If any signature matches the arguments then they'll be considered valid. + +So for example, say you wanted to write a signature for +`fs.createWriteStream`. The docs for it describe it thusly: + +``` +fs.createWriteStream(path[, options]) +``` + +This would be a signature of `SO|S`. That is, a string and and object, or +just a string. + +Now, if you read the full `fs` docs, you'll see that actually path can ALSO +be a buffer. And options can be a string, that is: +``` +path | +options | +``` + +To reproduce this you have to fully enumerate all of the possible +combinations and that implies a signature of `SO|SS|OO|OS|S|O`. The +awkwardness is a feature: It reminds you of the complexity you're adding to +your API when you do this sort of thing. + + +### Browser support + +This has no dependencies and should work in browsers, though you'll have +noisier stack traces. + +### Why this exists + +I wanted a very simple argument validator. It needed to do two things: + +1. Be more concise and easier to use than assertions + +2. Not encourage an infinite bikeshed of DSLs + +This is why types are specified by a single character and there's no such +thing as an optional argument. + +This is not intended to validate user data. This is specifically about +asserting the interface of your functions. + +If you need greater validation, I encourage you to write them by hand or +look elsewhere. + diff --git a/node_modules/move-concurrently/node_modules/aproba/index.js b/node_modules/move-concurrently/node_modules/aproba/index.js new file mode 100644 index 0000000000000..6f3f797c09a75 --- /dev/null +++ b/node_modules/move-concurrently/node_modules/aproba/index.js @@ -0,0 +1,105 @@ +'use strict' + +function isArguments (thingy) { + return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee') +} + +var types = { + '*': {label: 'any', check: function () { return true }}, + A: {label: 'array', check: function (thingy) { return Array.isArray(thingy) || isArguments(thingy) }}, + S: {label: 'string', check: function (thingy) { return typeof thingy === 'string' }}, + N: {label: 'number', check: function (thingy) { return typeof thingy === 'number' }}, + F: {label: 'function', check: function (thingy) { return typeof thingy === 'function' }}, + O: {label: 'object', check: function (thingy) { return typeof thingy === 'object' && thingy != null && !types.A.check(thingy) && !types.E.check(thingy) }}, + B: {label: 'boolean', check: function (thingy) { return typeof thingy === 'boolean' }}, + E: {label: 'error', check: function (thingy) { return thingy instanceof Error }}, + Z: {label: 'null', check: function (thingy) { return thingy == null }} +} + +function addSchema (schema, arity) { + var group = arity[schema.length] = arity[schema.length] || [] + if (group.indexOf(schema) === -1) group.push(schema) +} + +var validate = module.exports = function (rawSchemas, args) { + if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length) + if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas') + if (!args) throw missingRequiredArg(1, 'args') + if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas) + if (!types.A.check(args)) throw invalidType(1, ['array'], args) + var schemas = rawSchemas.split('|') + var arity = {} + + schemas.forEach(function (schema) { + for (var ii = 0; ii < schema.length; ++ii) { + var type = schema[ii] + if (!types[type]) throw unknownType(ii, type) + } + if (/E.*E/.test(schema)) throw moreThanOneError(schema) + addSchema(schema, arity) + if (/E/.test(schema)) { + addSchema(schema.replace(/E.*$/, 'E'), arity) + addSchema(schema.replace(/E/, 'Z'), arity) + if (schema.length === 1) addSchema('', arity) + } + }) + var matching = arity[args.length] + if (!matching) { + throw wrongNumberOfArgs(Object.keys(arity), args.length) + } + for (var ii = 0; ii < args.length; ++ii) { + var newMatching = matching.filter(function (schema) { + var type = schema[ii] + var typeCheck = types[type].check + return typeCheck(args[ii]) + }) + if (!newMatching.length) { + var labels = matching.map(function (schema) { + return types[schema[ii]].label + }).filter(function (schema) { return schema != null }) + throw invalidType(ii, labels, args[ii]) + } + matching = newMatching + } +} + +function missingRequiredArg (num) { + return newException('EMISSINGARG', 'Missing required argument #' + (num + 1)) +} + +function unknownType (num, type) { + return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1)) +} + +function invalidType (num, expectedTypes, value) { + var valueType + Object.keys(types).forEach(function (typeCode) { + if (types[typeCode].check(value)) valueType = types[typeCode].label + }) + return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' + + englishList(expectedTypes) + ' but got ' + valueType) +} + +function englishList (list) { + return list.join(', ').replace(/, ([^,]+)$/, ' or $1') +} + +function wrongNumberOfArgs (expected, got) { + var english = englishList(expected) + var args = expected.every(function (ex) { return ex.length === 1 }) + ? 'argument' + : 'arguments' + return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got) +} + +function moreThanOneError (schema) { + return newException('ETOOMANYERRORTYPES', + 'Only one error type per argument signature is allowed, more than one found in "' + schema + '"') +} + +function newException (code, msg) { + var e = new Error(msg) + e.code = code + if (Error.captureStackTrace) Error.captureStackTrace(e, validate) + return e +} diff --git a/node_modules/move-concurrently/node_modules/aproba/package.json b/node_modules/move-concurrently/node_modules/aproba/package.json new file mode 100644 index 0000000000000..eba0d3e6ea8f5 --- /dev/null +++ b/node_modules/move-concurrently/node_modules/aproba/package.json @@ -0,0 +1,62 @@ +{ + "_from": "aproba@^1.1.1", + "_id": "aproba@1.2.0", + "_inBundle": false, + "_integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", + "_location": "/move-concurrently/aproba", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "aproba@^1.1.1", + "name": "aproba", + "escapedName": "aproba", + "rawSpec": "^1.1.1", + "saveSpec": null, + "fetchSpec": "^1.1.1" + }, + "_requiredBy": [ + "/move-concurrently" + ], + "_resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "_shasum": "6802e6264efd18c790a1b0d517f0f2627bf2c94a", + "_spec": "aproba@^1.1.1", + "_where": "/Users/aeschright/code/cli/node_modules/move-concurrently", + "author": { + "name": "Rebecca Turner", + "email": "me@re-becca.org" + }, + "bugs": { + "url": "https://github.com/iarna/aproba/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "A ridiculously light-weight argument validator (now browser friendly)", + "devDependencies": { + "standard": "^10.0.3", + "tap": "^10.0.2" + }, + "directories": { + "test": "test" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/iarna/aproba", + "keywords": [ + "argument", + "validate" + ], + "license": "ISC", + "main": "index.js", + "name": "aproba", + "repository": { + "type": "git", + "url": "git+https://github.com/iarna/aproba.git" + }, + "scripts": { + "test": "standard && tap -j3 test/*.js" + }, + "version": "1.2.0" +} diff --git a/node_modules/nice-try/CHANGELOG.md b/node_modules/nice-try/CHANGELOG.md new file mode 100644 index 0000000000000..9e6baf2fb59be --- /dev/null +++ b/node_modules/nice-try/CHANGELOG.md @@ -0,0 +1,21 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +## [1.0.5] - 2018-08-25 + +### Changed + +- Removed `prepublish` script from `package.json` + +## [1.0.4] - 2017-08-08 + +### New + +- Added a changelog + +### Changed + +- Ignore `yarn.lock` and `package-lock.json` files \ No newline at end of file diff --git a/node_modules/nice-try/LICENSE b/node_modules/nice-try/LICENSE new file mode 100644 index 0000000000000..681c8f507b00e --- /dev/null +++ b/node_modules/nice-try/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2018 Tobias Reich + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/nice-try/README.md b/node_modules/nice-try/README.md new file mode 100644 index 0000000000000..5b83b78824a0e --- /dev/null +++ b/node_modules/nice-try/README.md @@ -0,0 +1,32 @@ +# nice-try + +[![Travis Build Status](https://travis-ci.org/electerious/nice-try.svg?branch=master)](https://travis-ci.org/electerious/nice-try) [![AppVeyor Status](https://ci.appveyor.com/api/projects/status/8tqb09wrwci3xf8l?svg=true)](https://ci.appveyor.com/project/electerious/nice-try) [![Coverage Status](https://coveralls.io/repos/github/electerious/nice-try/badge.svg?branch=master)](https://coveralls.io/github/electerious/nice-try?branch=master) [![Dependencies](https://david-dm.org/electerious/nice-try.svg)](https://david-dm.org/electerious/nice-try#info=dependencies) [![Greenkeeper badge](https://badges.greenkeeper.io/electerious/nice-try.svg)](https://greenkeeper.io/) + +A function that tries to execute a function and discards any error that occurs. + +## Install + +``` +npm install nice-try +``` + +## Usage + +```js +const niceTry = require('nice-try') + +niceTry(() => JSON.parse('true')) // true +niceTry(() => JSON.parse('truee')) // undefined +niceTry() // undefined +niceTry(true) // undefined +``` + +## API + +### Parameters + +- `fn` `{Function}` Function that might or might not throw an error. + +### Returns + +- `{?*}` Return-value of the function when no error occurred. \ No newline at end of file diff --git a/node_modules/nice-try/package.json b/node_modules/nice-try/package.json new file mode 100644 index 0000000000000..787ce7a7a846e --- /dev/null +++ b/node_modules/nice-try/package.json @@ -0,0 +1,65 @@ +{ + "_args": [ + [ + "nice-try@1.0.5", + "/Users/mperrotte/npminc/cli" + ] + ], + "_development": true, + "_from": "nice-try@1.0.5", + "_id": "nice-try@1.0.5", + "_inBundle": false, + "_integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", + "_location": "/nice-try", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "nice-try@1.0.5", + "name": "nice-try", + "escapedName": "nice-try", + "rawSpec": "1.0.5", + "saveSpec": null, + "fetchSpec": "1.0.5" + }, + "_requiredBy": [ + "/nyc/cross-spawn" + ], + "_resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "_spec": "1.0.5", + "_where": "/Users/mperrotte/npminc/cli", + "authors": [ + "Tobias Reich " + ], + "bugs": { + "url": "https://github.com/electerious/nice-try/issues" + }, + "description": "Tries to execute a function and discards any error that occurs", + "devDependencies": { + "chai": "^4.1.2", + "coveralls": "^3.0.0", + "mocha": "^5.1.1", + "nyc": "^12.0.1" + }, + "files": [ + "src" + ], + "homepage": "https://github.com/electerious/nice-try", + "keywords": [ + "try", + "catch", + "error" + ], + "license": "MIT", + "main": "src/index.js", + "name": "nice-try", + "repository": { + "type": "git", + "url": "git+https://github.com/electerious/nice-try.git" + }, + "scripts": { + "coveralls": "nyc report --reporter=text-lcov | coveralls", + "test": "nyc node_modules/mocha/bin/_mocha" + }, + "version": "1.0.5" +} diff --git a/node_modules/nice-try/src/index.js b/node_modules/nice-try/src/index.js new file mode 100644 index 0000000000000..837506f2cc2db --- /dev/null +++ b/node_modules/nice-try/src/index.js @@ -0,0 +1,12 @@ +'use strict' + +/** + * Tries to execute a function and discards any error that occurs. + * @param {Function} fn - Function that might or might not throw an error. + * @returns {?*} Return-value of the function when no error occurred. + */ +module.exports = function(fn) { + + try { return fn() } catch (e) {} + +} \ No newline at end of file diff --git a/node_modules/node-gyp/.github/ISSUE_TEMPLATE.md b/node_modules/node-gyp/.github/ISSUE_TEMPLATE.md index dbd053a90b38f..b5bed7fdd1ea6 100644 --- a/node_modules/node-gyp/.github/ISSUE_TEMPLATE.md +++ b/node_modules/node-gyp/.github/ISSUE_TEMPLATE.md @@ -14,10 +14,11 @@ that module's issue tracker (`npm issues modulename`).
    Verbose output (from npm or node-gyp): - - ``` - +Paste your log here, between the backticks. It can be: + - npm --verbose output, + - or contents of npm-debug.log, + - or output of node-gyp rebuild --verbose. ```
    diff --git a/node_modules/node-gyp/.travis.yml b/node_modules/node-gyp/.travis.yml new file mode 100644 index 0000000000000..e43ba548fb784 --- /dev/null +++ b/node_modules/node-gyp/.travis.yml @@ -0,0 +1,101 @@ +dist: xenial +language: python +cache: pip +matrix: + include: + - name: "Python 2.7 on Linux" + env: NODE_GYP_FORCE_PYTHON=python2 + python: 2.7 + - name: "Python 2.7 on macOS" + os: osx + osx_image: xcode11.2 + language: shell # 'language: python' is not yet supported on macOS + env: NODE_GYP_FORCE_PYTHON=python2 + before_install: HOMEBREW_NO_AUTO_UPDATE=1 brew install npm + - name: "Node.js 6 & Python 2.7 on Windows" + os: windows + language: node_js + node_js: 6 # node + env: >- + PATH=/c/Python27:/c/Python27/Scripts:$PATH + NODE_GYP_FORCE_PYTHON=/c/Python27/python.exe + before_install: choco install python2 + - name: "Node.js 12 & Python 2.7 on Windows" + os: windows + language: node_js + node_js: 12 # node + env: >- + PATH=/c/Python27:/c/Python27/Scripts:$PATH + NODE_GYP_FORCE_PYTHON=/c/Python27/python.exe + before_install: choco install python2 + + - name: "Node.js 6 & Python 3.8 on Linux" + python: 3.8 + env: NODE_GYP_FORCE_PYTHON=python3 + before_install: nvm install 6 + - name: "Node.js 8 & Python 3.8 on Linux" + python: 3.8 + env: NODE_GYP_FORCE_PYTHON=python3 + before_install: nvm install 8 + - name: "Node.js 10 & Python 3.8 on Linux" + python: 3.8 + env: NODE_GYP_FORCE_PYTHON=python3 + before_install: nvm install 10 + - name: "Node.js 12 & Python 3.5 on Linux" + python: 3.5 + env: NODE_GYP_FORCE_PYTHON=python3 + before_install: nvm install 12 + - name: "Node.js 12 & Python 3.6 on Linux" + python: 3.6 + env: NODE_GYP_FORCE_PYTHON=python3 + before_install: nvm install 12 + - name: "Node.js 12 & Python 3.7 on Linux" + python: 3.7 + env: NODE_GYP_FORCE_PYTHON=python3 + before_install: nvm install 12 + - name: "Node.js 12 & Python 3.8 on Linux" + python: 3.8 + env: NODE_GYP_FORCE_PYTHON=python3 + before_install: nvm install 12 + + - name: "Python 3.7 on macOS" + os: osx + osx_image: xcode11.2 + language: shell # 'language: python' is not yet supported on macOS + env: NODE_GYP_FORCE_PYTHON=python3 + before_install: HOMEBREW_NO_AUTO_UPDATE=1 brew upgrade npm || true + - name: "Node.js 12 & Python 3.7 on Windows" + os: windows + language: node_js + node_js: 12 # node + env: >- + PATH=/c/Python37:/c/Python37/Scripts:$PATH + NODE_GYP_FORCE_PYTHON=/c/Python37/python.exe + before_install: choco install python --version=3.7.4 + - name: "Node.js 12 & Python 3.8 on Windows" + os: windows + language: node_js + node_js: 12 # node + env: >- + PATH=/c/Python38:/c/Python38/Scripts:$PATH + NODE_GYP_FORCE_PYTHON=/c/Python38/python.exe + before_install: choco install python + +install: + #- pip install -r requirements.txt + - pip install --upgrade flake8 pytest==4.6.6 # pytest 5 no longer supports legacy Python +before_script: + - flake8 --version + # stop the build if there are Python syntax errors or undefined names + - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + # exit-zero treats all errors as warnings. Two space indentation is OK. The GitHub editor is 127 chars wide + - flake8 . --count --exit-zero --ignore=E111,E114,W503 --max-complexity=10 --max-line-length=127 --statistics + - npm install + - npm list +script: + - node -e 'require("npmlog").level="verbose"; require("./lib/find-python")(null,()=>{})' + - npm test + - GYP_MSVS_VERSION=2015 GYP_MSVS_OVERRIDE_PATH="C:\\Dummy" pytest +notifications: + on_success: change + on_failure: change # `always` will be the setting once code changes slow down diff --git a/node_modules/node-gyp/CHANGELOG.md b/node_modules/node-gyp/CHANGELOG.md index 33bbfad5dec9a..28f523779841d 100644 --- a/node_modules/node-gyp/CHANGELOG.md +++ b/node_modules/node-gyp/CHANGELOG.md @@ -1,3 +1,152 @@ +v5.0.7 2019-12-16 +================= + +Republish of v5.0.6 with unnecessary tarball removed from pack file. + +v5.0.6 2019-12-16 +================= + +* [[`cdec00286f`](https://github.com/nodejs/node-gyp/commit/cdec00286f)] - **doc**: adjustments to the README.md for new users (Dan Pike) [#1919](https://github.com/nodejs/node-gyp/pull/1919) +* [[`b7c8233ef2`](https://github.com/nodejs/node-gyp/commit/b7c8233ef2)] - **test**: fix Python unittests (cclauss) [#1961](https://github.com/nodejs/node-gyp/pull/1961) +* [[`e12b00ab0a`](https://github.com/nodejs/node-gyp/commit/e12b00ab0a)] - **doc**: macOS Catalina add two commands (Christian Clauss) [#1962](https://github.com/nodejs/node-gyp/pull/1962) +* [[`70b9890c0d`](https://github.com/nodejs/node-gyp/commit/70b9890c0d)] - **test**: add header download test (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796) +* [[`4029fa8629`](https://github.com/nodejs/node-gyp/commit/4029fa8629)] - **test**: configure proper devDir for invoking configure() (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796) +* [[`fe8b02cc8b`](https://github.com/nodejs/node-gyp/commit/fe8b02cc8b)] - **doc**: add macOS\_Catalina.md document (cclauss) [#1940](https://github.com/nodejs/node-gyp/pull/1940) +* [[`8ea47ce365`](https://github.com/nodejs/node-gyp/commit/8ea47ce365)] - **gyp**: python3 fixes: utf8 decode, use of 'None' in eval (Wilfried Goesgens) [#1925](https://github.com/nodejs/node-gyp/pull/1925) +* [[`c7229716ba`](https://github.com/nodejs/node-gyp/commit/c7229716ba)] - **gyp**: iteritems() -\> items() in compile\_commands\_json.py (cclauss) [#1947](https://github.com/nodejs/node-gyp/pull/1947) +* [[`2a18b2a0f8`](https://github.com/nodejs/node-gyp/commit/2a18b2a0f8)] - **gyp**: make cmake python3 compatible (gengjiawen) [#1944](https://github.com/nodejs/node-gyp/pull/1944) +* [[`70f391e844`](https://github.com/nodejs/node-gyp/commit/70f391e844)] - **gyp**: fix TypeError in XcodeVersion() (Christian Clauss) [#1939](https://github.com/nodejs/node-gyp/pull/1939) +* [[`9f4f0fa34e`](https://github.com/nodejs/node-gyp/commit/9f4f0fa34e)] - **gyp**: finish decode stdout on Python 3 (Christian Clauss) [#1937](https://github.com/nodejs/node-gyp/pull/1937) +* [[`7cf507906d`](https://github.com/nodejs/node-gyp/commit/7cf507906d)] - **src,win**: allow 403 errors for arm64 node.lib (Richard Lau) [#1934](https://github.com/nodejs/node-gyp/pull/1934) +* [[`ad0d182c01`](https://github.com/nodejs/node-gyp/commit/ad0d182c01)] - **deps**: update deps to roughly match current npm@6 (Rod Vagg) [#1920](https://github.com/nodejs/node-gyp/pull/1920) +* [[`1553081ed6`](https://github.com/nodejs/node-gyp/commit/1553081ed6)] - **test**: upgrade Linux Travis CI to Python 3.8 (Christian Clauss) [#1923](https://github.com/nodejs/node-gyp/pull/1923) +* [[`0705cae9aa`](https://github.com/nodejs/node-gyp/commit/0705cae9aa)] - **travis**: ignore failed `brew upgrade npm`, update xcode (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932) +* [[`7bfdb6f5bf`](https://github.com/nodejs/node-gyp/commit/7bfdb6f5bf)] - **build**: avoid bare exceptions in xcode\_emulation.py (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932) +* [[`7edf7658fa`](https://github.com/nodejs/node-gyp/commit/7edf7658fa)] - **lib,install**: always download SHA sums on Windows (Sam Hughes) [#1926](https://github.com/nodejs/node-gyp/pull/1926) +* [[`69056d04fe`](https://github.com/nodejs/node-gyp/commit/69056d04fe)] - **travis**: add Windows + Python 3.8 to the mix (Rod Vagg) [#1921](https://github.com/nodejs/node-gyp/pull/1921) + +v5.0.5 2019-10-04 +================= + +* [[`3891391746`](https://github.com/nodejs/node-gyp/commit/3891391746)] - **doc**: reconcile README with Python 3 compat changes (Rod Vagg) [#1911](https://github.com/nodejs/node-gyp/pull/1911) +* [[`07f81f1920`](https://github.com/nodejs/node-gyp/commit/07f81f1920)] - **lib**: accept Python 3 after Python 2 (Sam Roberts) [#1910](https://github.com/nodejs/node-gyp/pull/1910) +* [[`04ce59f4a2`](https://github.com/nodejs/node-gyp/commit/04ce59f4a2)] - **doc**: clarify Python configuration, etc (Sam Roberts) [#1908](https://github.com/nodejs/node-gyp/pull/1908) +* [[`01c46ee3df`](https://github.com/nodejs/node-gyp/commit/01c46ee3df)] - **gyp**: add \_\_lt\_\_ to MSVSSolutionEntry (João Reis) [#1904](https://github.com/nodejs/node-gyp/pull/1904) +* [[`735d961b99`](https://github.com/nodejs/node-gyp/commit/735d961b99)] - **win**: support VS 2017 Desktop Express (João Reis) [#1902](https://github.com/nodejs/node-gyp/pull/1902) +* [[`3834156a92`](https://github.com/nodejs/node-gyp/commit/3834156a92)] - **test**: add Python 3.5 and 3.6 tests on Linux (cclauss) [#1909](https://github.com/nodejs/node-gyp/pull/1909) +* [[`1196e990d8`](https://github.com/nodejs/node-gyp/commit/1196e990d8)] - **src**: update to standard@14 (Rod Vagg) [#1899](https://github.com/nodejs/node-gyp/pull/1899) +* [[`53ee7dfe89`](https://github.com/nodejs/node-gyp/commit/53ee7dfe89)] - **gyp**: fix undefined name: cflags --\> ldflags (Christian Clauss) [#1901](https://github.com/nodejs/node-gyp/pull/1901) +* [[`5871dcf6c9`](https://github.com/nodejs/node-gyp/commit/5871dcf6c9)] - **src,win**: add support for fetching arm64 node.lib (Richard Townsend) [#1875](https://github.com/nodejs/node-gyp/pull/1875) + +v5.0.4 2019-09-27 +================= + +* [[`1236869ffc`](https://github.com/nodejs/node-gyp/commit/1236869ffc)] - **gyp**: modify XcodeVersion() to convert "4.2" to "0420" and "10.0" to "1000" (Christian Clauss) [#1895](https://github.com/nodejs/node-gyp/pull/1895) +* [[`36638afe48`](https://github.com/nodejs/node-gyp/commit/36638afe48)] - **gyp**: more decode stdout on Python 3 (cclauss) [#1894](https://github.com/nodejs/node-gyp/pull/1894) +* [[`f753c167c5`](https://github.com/nodejs/node-gyp/commit/f753c167c5)] - **gyp**: decode stdout on Python 3 (cclauss) [#1890](https://github.com/nodejs/node-gyp/pull/1890) +* [[`60a4083523`](https://github.com/nodejs/node-gyp/commit/60a4083523)] - **doc**: update xcode install instructions to match Node's BUILDING (Nhan Khong) [#1884](https://github.com/nodejs/node-gyp/pull/1884) +* [[`19dbc9ac32`](https://github.com/nodejs/node-gyp/commit/19dbc9ac32)] - **deps**: update tar to 4.4.12 (Matheus Marchini) [#1889](https://github.com/nodejs/node-gyp/pull/1889) +* [[`5f3ed92181`](https://github.com/nodejs/node-gyp/commit/5f3ed92181)] - **bin**: fix the usage instructions (Halit Ogunc) [#1888](https://github.com/nodejs/node-gyp/pull/1888) +* [[`aab118edf1`](https://github.com/nodejs/node-gyp/commit/aab118edf1)] - **lib**: adding keep-alive header to download requests (Milad Farazmand) [#1863](https://github.com/nodejs/node-gyp/pull/1863) +* [[`1186e89326`](https://github.com/nodejs/node-gyp/commit/1186e89326)] - **lib**: ignore non-critical os.userInfo() failures (Rod Vagg) [#1835](https://github.com/nodejs/node-gyp/pull/1835) +* [[`785e527c3d`](https://github.com/nodejs/node-gyp/commit/785e527c3d)] - **doc**: fix missing argument for setting python path (lagorsse) [#1802](https://github.com/nodejs/node-gyp/pull/1802) +* [[`a97615196c`](https://github.com/nodejs/node-gyp/commit/a97615196c)] - **gyp**: rm semicolons (Python != JavaScript) (MattIPv4) [#1858](https://github.com/nodejs/node-gyp/pull/1858) +* [[`06019bac24`](https://github.com/nodejs/node-gyp/commit/06019bac24)] - **gyp**: assorted typo fixes (XhmikosR) [#1853](https://github.com/nodejs/node-gyp/pull/1853) +* [[`3f4972c1ca`](https://github.com/nodejs/node-gyp/commit/3f4972c1ca)] - **gyp**: use "is" when comparing to None (Vladyslav Burzakovskyy) [#1860](https://github.com/nodejs/node-gyp/pull/1860) +* [[`1cb4708073`](https://github.com/nodejs/node-gyp/commit/1cb4708073)] - **src,win**: improve unmanaged handling (Peter Sabath) [#1852](https://github.com/nodejs/node-gyp/pull/1852) +* [[`5553cd910e`](https://github.com/nodejs/node-gyp/commit/5553cd910e)] - **gyp**: improve Windows+Cygwin compatibility (Jose Quijada) [#1817](https://github.com/nodejs/node-gyp/pull/1817) +* [[`8bcb1fbb43`](https://github.com/nodejs/node-gyp/commit/8bcb1fbb43)] - **gyp**: Python 3 Windows fixes (João Reis) [#1843](https://github.com/nodejs/node-gyp/pull/1843) +* [[`2e24d0a326`](https://github.com/nodejs/node-gyp/commit/2e24d0a326)] - **test**: accept Python 3 in test-find-python.js (João Reis) [#1843](https://github.com/nodejs/node-gyp/pull/1843) +* [[`1267b4dc1c`](https://github.com/nodejs/node-gyp/commit/1267b4dc1c)] - **build**: add test run Python 3.7 on macOS (Christian Clauss) [#1843](https://github.com/nodejs/node-gyp/pull/1843) +* [[`da1b031aa3`](https://github.com/nodejs/node-gyp/commit/da1b031aa3)] - **build**: import StringIO on Python 2 and Python 3 (Christian Clauss) [#1836](https://github.com/nodejs/node-gyp/pull/1836) +* [[`fa0ed4aa42`](https://github.com/nodejs/node-gyp/commit/fa0ed4aa42)] - **build**: more Python 3 compat, replace compile with ast (cclauss) [#1820](https://github.com/nodejs/node-gyp/pull/1820) +* [[`18d5c7c9d0`](https://github.com/nodejs/node-gyp/commit/18d5c7c9d0)] - **win,src**: update win\_delay\_load\_hook.cc to work with /clr (Ivan Petrovic) [#1819](https://github.com/nodejs/node-gyp/pull/1819) + +v5.0.3 2019-07-17 +================= + +* [[`66ad305775`](https://github.com/nodejs/node-gyp/commit/66ad305775)] - **python**: accept Python 3 conditionally (João Reis) [#1815](https://github.com/nodejs/node-gyp/pull/1815) +* [[`7e7fce3fed`](https://github.com/nodejs/node-gyp/commit/7e7fce3fed)] - **python**: move Python detection to its own file (João Reis) [#1815](https://github.com/nodejs/node-gyp/pull/1815) +* [[`e40c99e283`](https://github.com/nodejs/node-gyp/commit/e40c99e283)] - **src**: implement standard.js linting (Rod Vagg) [#1794](https://github.com/nodejs/node-gyp/pull/1794) +* [[`bb92c761a9`](https://github.com/nodejs/node-gyp/commit/bb92c761a9)] - **test**: add Node.js 6 on Windows to Travis CI (João Reis) [#1812](https://github.com/nodejs/node-gyp/pull/1812) +* [[`7fd924079f`](https://github.com/nodejs/node-gyp/commit/7fd924079f)] - **test**: increase tap timeout (João Reis) [#1812](https://github.com/nodejs/node-gyp/pull/1812) +* [[`7e8127068f`](https://github.com/nodejs/node-gyp/commit/7e8127068f)] - **test**: cover supported node versions with travis (Rod Vagg) [#1809](https://github.com/nodejs/node-gyp/pull/1809) +* [[`24109148df`](https://github.com/nodejs/node-gyp/commit/24109148df)] - **test**: downgrade to tap@^12 for continued Node 6 support (Rod Vagg) [#1808](https://github.com/nodejs/node-gyp/pull/1808) +* [[`656117cc4a`](https://github.com/nodejs/node-gyp/commit/656117cc4a)] - **win**: make VS path match case-insensitive (João Reis) [#1806](https://github.com/nodejs/node-gyp/pull/1806) + +v5.0.2 2019-06-27 +================= + +* [[`2761afbf73`](https://github.com/nodejs/node-gyp/commit/2761afbf73)] - **build,test**: add duplicate symbol test (Gabriel Schulhof) [#1689](https://github.com/nodejs/node-gyp/pull/1689) +* [[`82f129d6de`](https://github.com/nodejs/node-gyp/commit/82f129d6de)] - **gyp**: replace optparse to argparse (KiYugadgeter) [#1591](https://github.com/nodejs/node-gyp/pull/1591) +* [[`afaaa29c61`](https://github.com/nodejs/node-gyp/commit/afaaa29c61)] - **gyp**: remove from \_\_future\_\_ import with\_statement (cclauss) [#1799](https://github.com/nodejs/node-gyp/pull/1799) +* [[`a991f633d6`](https://github.com/nodejs/node-gyp/commit/a991f633d6)] - **gyp**: fix the remaining Python 3 issues (cclauss) [#1793](https://github.com/nodejs/node-gyp/pull/1793) +* [[`f952b08f84`](https://github.com/nodejs/node-gyp/commit/f952b08f84)] - **gyp**: move from \_\_future\_\_ import to the top of the file (cclauss) [#1789](https://github.com/nodejs/node-gyp/pull/1789) +* [[`4f4a677dfa`](https://github.com/nodejs/node-gyp/commit/4f4a677dfa)] - **gyp**: use different default compiler for z/OS (Shuowang (Wayne) Zhang) [#1768](https://github.com/nodejs/node-gyp/pull/1768) +* [[`03683f09d6`](https://github.com/nodejs/node-gyp/commit/03683f09d6)] - **lib**: code de-duplication (Pavel Medvedev) [#965](https://github.com/nodejs/node-gyp/pull/965) +* [[`611bc3c89f`](https://github.com/nodejs/node-gyp/commit/611bc3c89f)] - **lib**: add .json suffix for explicit require (Rod Vagg) [#1787](https://github.com/nodejs/node-gyp/pull/1787) +* [[`d3478d7b0b`](https://github.com/nodejs/node-gyp/commit/d3478d7b0b)] - **meta**: add to .gitignore (Refael Ackermann) [#1573](https://github.com/nodejs/node-gyp/pull/1573) +* [[`7a9a038e9e`](https://github.com/nodejs/node-gyp/commit/7a9a038e9e)] - **test**: add parallel test runs on macOS and Windows (cclauss) [#1800](https://github.com/nodejs/node-gyp/pull/1800) +* [[`7dd7f2b2a2`](https://github.com/nodejs/node-gyp/commit/7dd7f2b2a2)] - **test**: fix Python syntax error in test-adding.js (cclauss) [#1793](https://github.com/nodejs/node-gyp/pull/1793) +* [[`395f843de0`](https://github.com/nodejs/node-gyp/commit/395f843de0)] - **test**: replace self-signed cert with 'localhost' (Rod Vagg) [#1795](https://github.com/nodejs/node-gyp/pull/1795) +* [[`a52c6eb9e8`](https://github.com/nodejs/node-gyp/commit/a52c6eb9e8)] - **test**: migrate from tape to tap (Rod Vagg) [#1795](https://github.com/nodejs/node-gyp/pull/1795) +* [[`ec2eb44a30`](https://github.com/nodejs/node-gyp/commit/ec2eb44a30)] - **test**: use Nan in duplicate\_symbols (Gabriel Schulhof) [#1689](https://github.com/nodejs/node-gyp/pull/1689) +* [[`1597c84aad`](https://github.com/nodejs/node-gyp/commit/1597c84aad)] - **test**: use Travis CI to run tests on every pull request (cclauss) [#1752](https://github.com/nodejs/node-gyp/pull/1752) +* [[`dd9bf929ac`](https://github.com/nodejs/node-gyp/commit/dd9bf929ac)] - **zos**: update compiler options (Shuowang (Wayne) Zhang) [#1768](https://github.com/nodejs/node-gyp/pull/1768) + +v5.0.1 2019-06-20 +================= + +* [[`e3861722ed`](https://github.com/nodejs/node-gyp/commit/e3861722ed)] - **doc**: document --jobs max (David Sanders) [#1770](https://github.com/nodejs/node-gyp/pull/1770) +* [[`1cfdb28886`](https://github.com/nodejs/node-gyp/commit/1cfdb28886)] - **lib**: reintroduce support for iojs file naming for releases \>= 1 && \< 4 (Samuel Attard) [#1777](https://github.com/nodejs/node-gyp/pull/1777) + +v5.0.0 2019-06-13 +================= + +* [[`8a83972743`](https://github.com/nodejs/node-gyp/commit/8a83972743)] - **(SEMVER-MAJOR)** **bin**: follow XDG OS conventions for storing data (Selwyn) [#1570](https://github.com/nodejs/node-gyp/pull/1570) +* [[`9e46872ea3`](https://github.com/nodejs/node-gyp/commit/9e46872ea3)] - **bin,lib**: remove extra comments/lines/spaces (Jon Moss) [#1508](https://github.com/nodejs/node-gyp/pull/1508) +* [[`8098ebdeb4`](https://github.com/nodejs/node-gyp/commit/8098ebdeb4)] - **deps**: replace `osenv` dependency with native `os` (Selwyn) +* [[`f83b457e03`](https://github.com/nodejs/node-gyp/commit/f83b457e03)] - **deps**: bump request to 2.8.7, fixes heok/hawk issues (Rohit Hazra) [#1492](https://github.com/nodejs/node-gyp/pull/1492) +* [[`323cee7323`](https://github.com/nodejs/node-gyp/commit/323cee7323)] - **deps**: pin `request` version range (Refael Ackermann) [#1300](https://github.com/nodejs/node-gyp/pull/1300) +* [[`c515912d08`](https://github.com/nodejs/node-gyp/commit/c515912d08)] - **doc**: improve issue template (Bartosz Sosnowski) [#1618](https://github.com/nodejs/node-gyp/pull/1618) +* [[`cca2d66727`](https://github.com/nodejs/node-gyp/commit/cca2d66727)] - **doc**: python info needs own header (Taylor D. Lee) [#1245](https://github.com/nodejs/node-gyp/pull/1245) +* [[`3e64c780f5`](https://github.com/nodejs/node-gyp/commit/3e64c780f5)] - **doc**: lint README.md (Jon Moss) [#1498](https://github.com/nodejs/node-gyp/pull/1498) +* [[`a20faedc91`](https://github.com/nodejs/node-gyp/commit/a20faedc91)] - **(SEMVER-MAJOR)** **gyp**: enable MARMASM items only on new VS versions (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762) +* [[`721eb691cf`](https://github.com/nodejs/node-gyp/commit/721eb691cf)] - **gyp**: teach MSVS generator about MARMASM Items (Jon Kunkee) [#1679](https://github.com/nodejs/node-gyp/pull/1679) +* [[`91744bfecc`](https://github.com/nodejs/node-gyp/commit/91744bfecc)] - **gyp**: add support for Windows on Arm (Richard Townsend) [#1739](https://github.com/nodejs/node-gyp/pull/1739) +* [[`a6e0a6c7ed`](https://github.com/nodejs/node-gyp/commit/a6e0a6c7ed)] - **gyp**: move compile\_commands\_json (Paul Maréchal) [#1661](https://github.com/nodejs/node-gyp/pull/1661) +* [[`92e8b52cee`](https://github.com/nodejs/node-gyp/commit/92e8b52cee)] - **gyp**: fix target --\> self.target (cclauss) +* [[`febdfa2137`](https://github.com/nodejs/node-gyp/commit/febdfa2137)] - **gyp**: fix sntex error (cclauss) [#1333](https://github.com/nodejs/node-gyp/pull/1333) +* [[`588d333c14`](https://github.com/nodejs/node-gyp/commit/588d333c14)] - **gyp**: \_winreg module was renamed to winreg in Python 3. (Craig Rodrigues) +* [[`98226d198c`](https://github.com/nodejs/node-gyp/commit/98226d198c)] - **gyp**: replace basestring with str, but only on Python 3. (Craig Rodrigues) +* [[`7535e4478e`](https://github.com/nodejs/node-gyp/commit/7535e4478e)] - **gyp**: replace deprecated functions (Craig Rodrigues) +* [[`2040cd21cc`](https://github.com/nodejs/node-gyp/commit/2040cd21cc)] - **gyp**: use print as a function, as specified in PEP 3105. (Craig Rodrigues) +* [[`abef93ded5`](https://github.com/nodejs/node-gyp/commit/abef93ded5)] - **gyp**: get ready for python 3 (cclauss) +* [[`43031fadcb`](https://github.com/nodejs/node-gyp/commit/43031fadcb)] - **python**: clean-up detection (João Reis) [#1582](https://github.com/nodejs/node-gyp/pull/1582) +* [[`49ab79d221`](https://github.com/nodejs/node-gyp/commit/49ab79d221)] - **python**: more informative error (Refael Ackermann) [#1269](https://github.com/nodejs/node-gyp/pull/1269) +* [[`997bc3c748`](https://github.com/nodejs/node-gyp/commit/997bc3c748)] - **readme**: add ARM64 info to MSVC setup instructions (Jon Kunkee) [#1655](https://github.com/nodejs/node-gyp/pull/1655) +* [[`788e767179`](https://github.com/nodejs/node-gyp/commit/788e767179)] - **test**: remove unused variable (João Reis) +* [[`6f5a408934`](https://github.com/nodejs/node-gyp/commit/6f5a408934)] - **tools**: fix usage of inherited -fPIC and -fPIE (Jens) [#1340](https://github.com/nodejs/node-gyp/pull/1340) +* [[`0efb8fb34b`](https://github.com/nodejs/node-gyp/commit/0efb8fb34b)] - **(SEMVER-MAJOR)** **win**: support running in VS Command Prompt (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762) +* [[`360ddbdf3a`](https://github.com/nodejs/node-gyp/commit/360ddbdf3a)] - **(SEMVER-MAJOR)** **win**: add support for Visual Studio 2019 (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762) +* [[`8f43f68275`](https://github.com/nodejs/node-gyp/commit/8f43f68275)] - **(SEMVER-MAJOR)** **win**: detect all VS versions in node-gyp (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762) +* [[`7fe4095974`](https://github.com/nodejs/node-gyp/commit/7fe4095974)] - **(SEMVER-MAJOR)** **win**: generic Visual Studio 2017 detection (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762) +* [[`7a71d68bce`](https://github.com/nodejs/node-gyp/commit/7a71d68bce)] - **win**: use msbuild from the configure stage (Bartosz Sosnowski) [#1654](https://github.com/nodejs/node-gyp/pull/1654) +* [[`d3b21220a0`](https://github.com/nodejs/node-gyp/commit/d3b21220a0)] - **win**: fix delay-load hook for electron 4 (Andy Dill) +* [[`81f3a92338`](https://github.com/nodejs/node-gyp/commit/81f3a92338)] - Update list of Node.js versions to test against. (Ben Noordhuis) [#1670](https://github.com/nodejs/node-gyp/pull/1670) +* [[`4748f6ab75`](https://github.com/nodejs/node-gyp/commit/4748f6ab75)] - Remove deprecated compatibility code. (Ben Noordhuis) [#1670](https://github.com/nodejs/node-gyp/pull/1670) +* [[`45e3221fd4`](https://github.com/nodejs/node-gyp/commit/45e3221fd4)] - Remove an outdated workaround for Python 2.4 (cclauss) [#1650](https://github.com/nodejs/node-gyp/pull/1650) +* [[`721dc7d314`](https://github.com/nodejs/node-gyp/commit/721dc7d314)] - Add ARM64 to MSBuild /Platform logic (Jon Kunkee) [#1655](https://github.com/nodejs/node-gyp/pull/1655) +* [[`a5b7410497`](https://github.com/nodejs/node-gyp/commit/a5b7410497)] - Add ESLint no-unused-vars rule (Jon Moss) [#1497](https://github.com/nodejs/node-gyp/pull/1497) + +v4.0.0 2019-04-24 +================= + +* [[`ceed5cbe10`](https://github.com/nodejs/node-gyp/commit/ceed5cbe10)] - **deps**: updated tar package version to 4.4.8 (Pobegaylo Maksim) [#1713](https://github.com/nodejs/node-gyp/pull/1713) +* [[`374519e066`](https://github.com/nodejs/node-gyp/commit/374519e066)] - **(SEMVER-MAJOR)** Upgrade to tar v3 (isaacs) [#1212](https://github.com/nodejs/node-gyp/pull/1212) +* [[`e6699d13cd`](https://github.com/nodejs/node-gyp/commit/e6699d13cd)] - **test**: fix addon test for Node.js 12 and V8 7.4 (Richard Lau) [#1705](https://github.com/nodejs/node-gyp/pull/1705) +* [[`0c6bf530a0`](https://github.com/nodejs/node-gyp/commit/0c6bf530a0)] - **lib**: use print() for python version detection (GreenAddress) [#1534](https://github.com/nodejs/node-gyp/pull/1534) + v3.8.0 2018-08-09 ================= diff --git a/node_modules/node-gyp/README.md b/node_modules/node-gyp/README.md index 0fed03c54322a..e39c5de8ea045 100644 --- a/node_modules/node-gyp/README.md +++ b/node_modules/node-gyp/README.md @@ -1,52 +1,48 @@ -node-gyp -========= -## Node.js native addon build tool - -`node-gyp` is a cross-platform command-line tool written in Node.js for compiling -native addon modules for Node.js. It bundles the [gyp](https://gyp.gsrc.io) -project used by the Chromium team and takes away the pain of dealing with the -various differences in build platforms. It is the replacement to the `node-waf` -program which is removed for node `v0.8`. If you have a native addon for node that -still has a `wscript` file, then you should definitely add a `binding.gyp` file -to support the latest versions of node. - -Multiple target versions of node are supported (i.e. `0.8`, ..., `4`, `5`, `6`, -etc.), regardless of what version of node is actually installed on your system +# `node-gyp` - Node.js native addon build tool + +`node-gyp` is a cross-platform command-line tool written in Node.js for +compiling native addon modules for Node.js. It contains a fork of the +[gyp](https://gyp.gsrc.io) project that was previously used by the Chromium +team, extended to support the development of Node.js native addons. + +Note that `node-gyp` is _not_ used to build Node.js itself. + +Multiple target versions of Node.js are supported (i.e. `0.8`, ..., `4`, `5`, `6`, +etc.), regardless of what version of Node.js is actually installed on your system (`node-gyp` downloads the necessary development files or headers for the target version). ## Features - * Easy to use, consistent interface - * Same commands to build your module on every platform - * Supports multiple target versions of Node - + * The same build commands work on any of the supported platforms + * Supports the targetting of different versions of Node.js -Installation ------------- +## Installation -You can install with `npm`: +You can install `node-gyp` using `npm`: ``` bash $ npm install -g node-gyp ``` -You will also need to install: +Depending on your operating system, you will need to install: ### On Unix - * `python` (`v2.7` recommended, `v3.x.x` is __*not*__ supported) + * Python v2.7, v3.5, v3.6, or v3.7 * `make` * A proper C/C++ compiler toolchain, like [GCC](https://gcc.gnu.org) ### On macOS - * `python` (`v2.7` recommended, `v3.x.x` is __*not*__ supported) (already installed on macOS) + * Python v2.7, v3.5, v3.6, or v3.7 * [Xcode](https://developer.apple.com/xcode/download/) - * You also need to install the `Command Line Tools` via Xcode. You can find this under the menu `Xcode -> Preferences -> Locations` (or by running `xcode-select --install` in your Terminal) - * This step will install `gcc` and the related toolchain containing `make` + * You also need to install the `XCode Command Line Tools` by running `xcode-select --install`. Alternatively, if you already have the full Xcode installed, you can find them under the menu `Xcode -> Open Developer Tool -> More Developer Tools...`. This step will install `clang`, `clang++`, and `make`. + * If your Mac has been _upgraded_ to macOS Catalina (10.15), please read [macOS_Catalina.md](macOS_Catalina.md). ### On Windows +Install the current version of Python from the [Microsoft Store package](https://docs.python.org/3/using/windows.html#the-microsoft-store-package). + #### Option 1 Install all the required tools and configurations using Microsoft's [windows-build-tools](https://github.com/felixrieseberg/windows-build-tools) using `npm install --global --production windows-build-tools` from an elevated PowerShell or CMD.exe (run as Administrator). @@ -57,28 +53,41 @@ Install tools and configuration manually: * Install Visual C++ Build Environment: [Visual Studio Build Tools](https://visualstudio.microsoft.com/thank-you-downloading-visual-studio/?sku=BuildTools) (using "Visual C++ build tools" workload) or [Visual Studio 2017 Community](https://visualstudio.microsoft.com/pl/thank-you-downloading-visual-studio/?sku=Community) (using the "Desktop development with C++" workload) - * Install [Python 2.7](https://www.python.org/downloads/) (`v3.x.x` is not supported), and run `npm config set python python2.7` (or see below for further instructions on specifying the proper Python version and path.) * Launch cmd, `npm config set msvs_version 2017` If the above steps didn't work for you, please visit [Microsoft's Node.js Guidelines for Windows](https://github.com/Microsoft/nodejs-guidelines/blob/master/windows-environment.md#compiling-native-addon-modules) for additional tips. -If you have multiple Python versions installed, you can identify which Python -version `node-gyp` uses by setting the '--python' variable: + To target native ARM64 Node.js on Windows 10 on ARM, add the components "Visual C++ compilers and libraries for ARM64" and "Visual C++ ATL for ARM64". + +### Configuring Python Dependency + +`node-gyp` requires that you have installed a compatible version of Python, one of: v2.7, v3.5, v3.6, +or v3.7. If you have multiple Python versions installed, you can identify which Python +version `node-gyp` should use in one of the following ways: + +1. by setting the `--python` command-line option, e.g.: ``` bash -$ node-gyp --python /path/to/python2.7 +$ node-gyp --python /path/to/executable/python ``` -If `node-gyp` is called by way of `npm` *and* you have multiple versions of +2. If `node-gyp` is called by way of `npm`, *and* you have multiple versions of Python installed, then you can set `npm`'s 'python' config key to the appropriate value: ``` bash -$ npm config set python /path/to/executable/python2.7 +$ npm config set python /path/to/executable/python ``` -How to Use ----------- +3. If the `PYTHON` environment variable is set to the path of a Python executable, +then that version will be used, if it is a compatible version. + +4. If the `NODE_GYP_FORCE_PYTHON` environment variable is set to the path of a +Python executable, it will be used instead of any of the other configured or +builtin Python search paths. If it's not a compatible version, no further +searching will be done. + +## How to Use To compile your native addon, first go to its root directory: @@ -99,35 +108,32 @@ needs to be added (not needed when run by npm as configured above): $ node-gyp configure --msvs_version=2015 ``` -__Note__: The `configure` step looks for the `binding.gyp` file in the current -directory to process. See below for instructions on creating the `binding.gyp` file. +__Note__: The `configure` step looks for a `binding.gyp` file in the current +directory to process. See below for instructions on creating a `binding.gyp` file. Now you will have either a `Makefile` (on Unix platforms) or a `vcxproj` file -(on Windows) in the `build/` directory. Next invoke the `build` command: +(on Windows) in the `build/` directory. Next, invoke the `build` command: ``` bash $ node-gyp build ``` Now you have your compiled `.node` bindings file! The compiled bindings end up -in `build/Debug/` or `build/Release/`, depending on the build mode. At this point -you can require the `.node` file with Node and run your tests! +in `build/Debug/` or `build/Release/`, depending on the build mode. At this point, +you can require the `.node` file with Node.js and run your tests! __Note:__ To create a _Debug_ build of the bindings file, pass the `--debug` (or -`-d`) switch when running either the `configure`, `build` or `rebuild` command. +`-d`) switch when running either the `configure`, `build` or `rebuild` commands. +## The `binding.gyp` file -The "binding.gyp" file ----------------------- +A `binding.gyp` file describes the configuration to build your module, in a +JSON-like format. This file gets placed in the root of your package, alongside +`package.json`. -Previously when node had `node-waf` you had to write a `wscript` file. The -replacement for that is the `binding.gyp` file, which describes the configuration -to build your module in a JSON-like format. This file gets placed in the root of -your package, alongside the `package.json` file. +A barebones `gyp` file appropriate for building a Node.js addon could look like: -A barebones `gyp` file appropriate for building a node addon looks like: - -``` python +```python { "targets": [ { @@ -138,7 +144,9 @@ A barebones `gyp` file appropriate for building a node addon looks like: } ``` -Some additional resources for addons and writing `gyp` files: +## Further reading + +Some additional resources for Node.js native addons and writing `gyp` configuration files: * ["Going Native" a nodeschool.io tutorial](http://nodeschool.io/#goingnative) * ["Hello World" node addon example](https://github.com/nodejs/node/tree/master/test/addons/hello-world) @@ -146,9 +154,7 @@ Some additional resources for addons and writing `gyp` files: * [gyp input format reference](https://gyp.gsrc.io/docs/InputFormatReference.md) * [*"binding.gyp" files out in the wild* wiki page](https://github.com/nodejs/node-gyp/wiki/%22binding.gyp%22-files-out-in-the-wild) - -Commands --------- +## Commands `node-gyp` responds to the following commands: @@ -159,86 +165,74 @@ Commands | `clean` | Removes the `build` directory if it exists | `configure` | Generates project build files for the current platform | `rebuild` | Runs `clean`, `configure` and `build` all in a row -| `install` | Installs node header files for the given version -| `list` | Lists the currently installed node header versions -| `remove` | Removes the node header files for the given version +| `install` | Installs Node.js header files for the given version +| `list` | Lists the currently installed Node.js header versions +| `remove` | Removes the Node.js header files for the given version -Command Options --------- +## Command Options `node-gyp` accepts the following command options: | **Command** | **Description** |:----------------------------------|:------------------------------------------ -| `-j n`, `--jobs n` | Run make in parallel -| `--target=v6.2.1` | Node version to build for (default=process.version) +| `-j n`, `--jobs n` | Run `make` in parallel. The value `max` will use all available CPU cores +| `--target=v6.2.1` | Node.js version to build for (default is `process.version`) | `--silly`, `--loglevel=silly` | Log all progress to console | `--verbose`, `--loglevel=verbose` | Log most progress to console | `--silent`, `--loglevel=silent` | Don't log anything to console -| `debug`, `--debug` | Make Debug build (default=Release) +| `debug`, `--debug` | Make Debug build (default is `Release`) | `--release`, `--no-debug` | Make Release build | `-C $dir`, `--directory=$dir` | Run command in different directory -| `--make=$make` | Override make command (e.g. gmake) +| `--make=$make` | Override `make` command (e.g. `gmake`) | `--thin=yes` | Enable thin static libraries | `--arch=$arch` | Set target architecture (e.g. ia32) | `--tarball=$path` | Get headers from a local tarball -| `--devdir=$path` | SDK download directory (default=~/.node-gyp) +| `--devdir=$path` | SDK download directory (default is OS cache directory) | `--ensure` | Don't reinstall headers if already present | `--dist-url=$url` | Download header tarball from custom URL | `--proxy=$url` | Set HTTP proxy for downloading header tarball | `--cafile=$cafile` | Override default CA chain (to download tarball) | `--nodedir=$path` | Set the path to the node source code -| `--python=$path` | Set path to the python (2) binary -| `--msvs_version=$version` | Set Visual Studio version (win) -| `--solution=$solution` | Set Visual Studio Solution version (win) - - -Configuration --------- - -__`node-gyp` responds to environment variables or `npm` configuration__ -1. Environment variables take the form `npm_config_OPTION_NAME` for any of the - options listed above (dashes in option names should be replaced by underscores). - These work also when `node-gyp` is invoked directly: - `$ export npm_config_devdir=/tmp/.gyp` - or on Windows - `> set npm_config_devdir=c:\temp\.gyp` -2. As `npm` configuration, variables take the form `OPTION_NAME`. - This way only works when `node-gyp` is executed by `npm`: - `$ npm config set [--global] devdir /tmp/.gyp` - `$ npm i buffertools` - - - -License -------- - -(The MIT License) - -Copyright (c) 2012 Nathan Rajlich <nathan@tootallnate.net> - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -[python-v2.7.10]: https://www.python.org/downloads/release/python-2710/ -[msvc2013]: https://www.microsoft.com/en-gb/download/details.aspx?id=44914 -[win7sdk]: https://www.microsoft.com/en-us/download/details.aspx?id=8279 -[compiler update for the Windows SDK 7.1]: https://www.microsoft.com/en-us/download/details.aspx?id=4422 +| `--python=$path` | Set path to the Python binary +| `--msvs_version=$version` | Set Visual Studio version (Windows only) +| `--solution=$solution` | Set Visual Studio Solution version (Windows only) + +## Configuration + +### Environment variables + +Use the form `npm_config_OPTION_NAME` for any of the command options listed +above (dashes in option names should be replaced by underscores). + +For example, to set `devdir` equal to `/tmp/.gyp`, you would: + +Run this on Unix: + +```bash +$ export npm_config_devdir=/tmp/.gyp +``` + +Or this on Windows: + +```console +> set npm_config_devdir=c:\temp\.gyp +``` + +### `npm` configuration + +Use the form `OPTION_NAME` for any of the command options listed above. + +For example, to set `devdir` equal to `/tmp/.gyp`, you would run: + +```bash +$ npm config set [--global] devdir /tmp/.gyp +``` + +**Note:** Configuration set via `npm` will only be used when `node-gyp` +is run via `npm`, not when `node-gyp` is run directly. + +## License + +`node-gyp` is available under the MIT license. See the [LICENSE +file](LICENSE) for details. diff --git a/node_modules/node-gyp/addon.gypi b/node_modules/node-gyp/addon.gypi index 55fb3211186f4..6462f539ffb72 100644 --- a/node_modules/node-gyp/addon.gypi +++ b/node_modules/node-gyp/addon.gypi @@ -1,6 +1,7 @@ { 'variables' : { 'node_engine_include_dir%': 'deps/v8/include', + 'node_host_binary%': 'node' }, 'target_defaults': { 'type': 'loadable_module', @@ -55,6 +56,14 @@ 'standalone_static_library': '<(standalone_static_library)' }], + ['_type!="executable"', { + 'conditions': [ + [ 'OS=="android"', { + 'cflags!': [ '-fPIE' ], + }] + ] + }], + ['_win_delay_load_hook=="true"', { # If the addon specifies `'win_delay_load_hook': 'true'` in its # binding.gyp, link a delay-load hook into the DLL. This hook ensures @@ -62,12 +71,13 @@ # is named node.exe, iojs.exe, or something else. 'conditions': [ [ 'OS=="win"', { + 'defines': [ 'HOST_BINARY=\"<(node_host_binary)<(EXECUTABLE_SUFFIX)\"', ], 'sources': [ '<(node_gyp_dir)/src/win_delay_load_hook.cc', ], 'msvs_settings': { 'VCLinkerTool': { - 'DelayLoadDLLs': [ 'iojs.exe', 'node.exe' ], + 'DelayLoadDLLs': [ '<(node_host_binary)<(EXECUTABLE_SUFFIX)' ], # Don't print a linker warning when no imports from either .exe # are used. 'AdditionalOptions': [ '/ignore:4199' ], @@ -96,7 +106,14 @@ 'cflags': [ '-q64', '-Wc,DLL', - '-qlonglong' + '-qlonglong', + '-qenum=int', + '-qxclang=-fexec-charset=ISO8859-1' + ], + 'defines': [ + '_ALL_SOURCE=1', + 'MAP_FAILED=-1', + '_UNIX03_SOURCE=1' ], 'ldflags': [ '-q64', @@ -138,10 +155,10 @@ '_FILE_OFFSET_BITS=64' ], }], - [ 'OS in "freebsd openbsd netbsd solaris" or \ + [ 'OS in "freebsd openbsd netbsd solaris android" or \ (OS=="linux" and target_arch!="ia32")', { 'cflags': [ '-fPIC' ], - }] + }], ] } } diff --git a/node_modules/node-gyp/bin/node-gyp.js b/node_modules/node-gyp/bin/node-gyp.js index 70d7d502628bc..49b5721d02d89 100755 --- a/node_modules/node-gyp/bin/node-gyp.js +++ b/node_modules/node-gyp/bin/node-gyp.js @@ -1,39 +1,33 @@ #!/usr/bin/env node -/** - * Set the title. - */ +'use strict' process.title = 'node-gyp' -/** - * Module dependencies. - */ - -var gyp = require('../') -var log = require('npmlog') -var osenv = require('osenv') -var path = require('path') +const envPaths = require('env-paths') +const gyp = require('../') +const log = require('npmlog') +const os = require('os') /** * Process and execute the selected commands. */ -var prog = gyp() +const prog = gyp() var completed = false prog.parseArgv(process.argv) prog.devDir = prog.opts.devdir -var homeDir = osenv.home() +var homeDir = os.homedir() if (prog.devDir) { prog.devDir = prog.devDir.replace(/^~/, homeDir) } else if (homeDir) { - prog.devDir = path.resolve(homeDir, '.node-gyp') + prog.devDir = envPaths('node-gyp', { suffix: '' }).cache } else { throw new Error( "node-gyp requires that the user's home directory is specified " + - "in either of the environmental variables HOME or USERPROFILE. " + - "Overide with: --devdir /path/to/.node-gyp") + 'in either of the environmental variables HOME or USERPROFILE. ' + + 'Overide with: --devdir /path/to/.node-gyp') } if (prog.todo.length === 0) { @@ -42,7 +36,7 @@ if (prog.todo.length === 0) { } else { console.log('%s', prog.usage()) } - return process.exit(0) + process.exit(0) } log.info('it worked if it ends with', 'ok') @@ -50,7 +44,6 @@ log.verbose('cli', process.argv) log.info('using', 'node-gyp@%s', prog.version) log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch) - /** * Change dir if -C/--directory was passed. */ @@ -92,7 +85,7 @@ function run () { log.error('not ok') return process.exit(1) } - if (command.name == 'list') { + if (command.name === 'list') { var versions = arguments[1] if (versions.length > 0) { versions.forEach(function (version) { @@ -126,11 +119,11 @@ process.on('uncaughtException', function (err) { }) function errorMessage () { - // copied from npm's lib/util/error-handler.js + // copied from npm's lib/utils/error-handler.js var os = require('os') log.error('System', os.type() + ' ' + os.release()) log.error('command', process.argv - .map(JSON.stringify).join(' ')) + .map(JSON.stringify).join(' ')) log.error('cwd', process.cwd()) log.error('node -v', process.version) log.error('node-gyp -v', 'v' + prog.package.version) @@ -138,10 +131,10 @@ function errorMessage () { function issueMessage () { errorMessage() - log.error('', [ 'This is a bug in `node-gyp`.' - , 'Try to update node-gyp and file an Issue if it does not help:' - , ' ' - ].join('\n')) + log.error('', ['This is a bug in `node-gyp`.', + 'Try to update node-gyp and file an Issue if it does not help:', + ' ' + ].join('\n')) } // start running the given commands! diff --git a/node_modules/node-gyp/gyp/PRESUBMIT.py b/node_modules/node-gyp/gyp/PRESUBMIT.py index f6c8a357afe14..e52f9d2d22d60 100644 --- a/node_modules/node-gyp/gyp/PRESUBMIT.py +++ b/node_modules/node-gyp/gyp/PRESUBMIT.py @@ -76,8 +76,7 @@ def _LicenseHeader(input_api): # Accept any year number from 2009 to the current year. current_year = int(input_api.time.strftime('%Y')) - allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1))) - + allowed_years = (str(s) for s in reversed(range(2009, current_year + 1))) years_re = '(' + '|'.join(allowed_years) + ')' # The (c) is deprecated, but tolerate it until it's removed from all files. diff --git a/node_modules/node-gyp/gyp/gyp_main.py b/node_modules/node-gyp/gyp/gyp_main.py index 25a6eba94aae7..f738e8009f71e 100755 --- a/node_modules/node-gyp/gyp/gyp_main.py +++ b/node_modules/node-gyp/gyp/gyp_main.py @@ -6,10 +6,44 @@ import os import sys +import subprocess + +PY3 = bytes != str + +# Below IsCygwin() function copied from pylib/gyp/common.py +def IsCygwin(): + try: + out = subprocess.Popen("uname", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + stdout, stderr = out.communicate() + if PY3: + stdout = stdout.decode("utf-8") + return "CYGWIN" in str(stdout) + except Exception: + return False + + +def UnixifyPath(path): + try: + if not IsCygwin(): + return path + out = subprocess.Popen(["cygpath", "-u", path], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + stdout, _ = out.communicate() + if PY3: + stdout = stdout.decode("utf-8") + return str(stdout) + except Exception: + return path + # Make sure we're using the version of pylib in this repo, not one installed -# elsewhere on the system. -sys.path.insert(0, os.path.join(os.path.dirname(sys.argv[0]), 'pylib')) +# elsewhere on the system. Also convert to Unix style path on Cygwin systems, +# else the 'gyp' library will not be found +path = UnixifyPath(sys.argv[0]) +sys.path.insert(0, os.path.join(os.path.dirname(path), 'pylib')) import gyp if __name__ == '__main__': diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py index 593f0e5b0b2e8..76c4b95c0c3e8 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py @@ -4,22 +4,17 @@ """New implementation of Visual Studio project generation.""" +import hashlib import os import random import gyp.common -# hashlib is supplied as of Python 2.5 as the replacement interface for md5 -# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if -# available, avoiding a deprecation warning under 2.6. Import md5 otherwise, -# preserving 2.4 compatibility. try: - import hashlib - _new_md5 = hashlib.md5 -except ImportError: - import md5 - _new_md5 = md5.new - + cmp +except NameError: + def cmp(x, y): + return (x > y) - (x < y) # Initialize random number generator random.seed() @@ -50,7 +45,7 @@ def MakeGuid(name, seed='msvs_new'): not change when the project for a target is rebuilt. """ # Calculate a MD5 signature for the seed and name. - d = _new_md5(str(seed) + str(name)).hexdigest().upper() + d = hashlib.md5((str(seed) + str(name)).encode('utf-8')).hexdigest().upper() # Convert most of the signature to GUID form (discard the rest) guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20] + '-' + d[20:32] + '}') @@ -64,6 +59,9 @@ def __cmp__(self, other): # Sort by name then guid (so things are in order on vs2008). return cmp((self.name, self.get_guid()), (other.name, other.get_guid())) + def __lt__(self, other): + return self.__cmp__(other) < 0 + class MSVSFolder(MSVSSolutionEntry): """Folder in a Visual Studio project or solution.""" diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py index a08cc154d7398..065a339a8025e 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py @@ -14,6 +14,10 @@ MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild """ +from __future__ import print_function + +from gyp import string_types + import sys import re @@ -106,11 +110,11 @@ class _String(_Type): """A setting that's just a string.""" def ValidateMSVS(self, value): - if not isinstance(value, basestring): + if not isinstance(value, string_types): raise ValueError('expected string; got %r' % value) def ValidateMSBuild(self, value): - if not isinstance(value, basestring): + if not isinstance(value, string_types): raise ValueError('expected string; got %r' % value) def ConvertToMSBuild(self, value): @@ -122,11 +126,11 @@ class _StringList(_Type): """A settings that's a list of strings.""" def ValidateMSVS(self, value): - if not isinstance(value, basestring) and not isinstance(value, list): + if not isinstance(value, string_types) and not isinstance(value, list): raise ValueError('expected string list; got %r' % value) def ValidateMSBuild(self, value): - if not isinstance(value, basestring) and not isinstance(value, list): + if not isinstance(value, string_types) and not isinstance(value, list): raise ValueError('expected string list; got %r' % value) def ConvertToMSBuild(self, value): @@ -400,7 +404,7 @@ def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr): if unrecognized: # We don't know this setting. Give a warning. - print >> stderr, error_msg + print(error_msg, file=stderr) def FixVCMacroSlashes(s): @@ -433,7 +437,7 @@ def ConvertVCMacrosToMSBuild(s): '$(PlatformName)': '$(Platform)', '$(SafeInputName)': '%(Filename)', } - for old, new in replace_map.iteritems(): + for old, new in replace_map.items(): s = s.replace(old, new) s = FixVCMacroSlashes(s) return s @@ -453,17 +457,17 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): dictionaries of settings and their values. """ msbuild_settings = {} - for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems(): + for msvs_tool_name, msvs_tool_settings in msvs_settings.items(): if msvs_tool_name in _msvs_to_msbuild_converters: msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name] - for msvs_setting, msvs_value in msvs_tool_settings.iteritems(): + for msvs_setting, msvs_value in msvs_tool_settings.items(): if msvs_setting in msvs_tool: # Invoke the translation function. try: msvs_tool[msvs_setting](msvs_value, msbuild_settings) - except ValueError, e: - print >> stderr, ('Warning: while converting %s/%s to MSBuild, ' - '%s' % (msvs_tool_name, msvs_setting, e)) + except ValueError as e: + print('Warning: while converting %s/%s to MSBuild, ' + '%s' % (msvs_tool_name, msvs_setting, e), file=stderr) else: _ValidateExclusionSetting(msvs_setting, msvs_tool, @@ -472,8 +476,8 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): (msvs_tool_name, msvs_setting)), stderr) else: - print >> stderr, ('Warning: unrecognized tool %s while converting to ' - 'MSBuild.' % msvs_tool_name) + print('Warning: unrecognized tool %s while converting to ' + 'MSBuild.' % msvs_tool_name, file=stderr) return msbuild_settings @@ -513,13 +517,13 @@ def _ValidateSettings(validators, settings, stderr): for tool_name in settings: if tool_name in validators: tool_validators = validators[tool_name] - for setting, value in settings[tool_name].iteritems(): + for setting, value in settings[tool_name].items(): if setting in tool_validators: try: tool_validators[setting](value) - except ValueError, e: - print >> stderr, ('Warning: for %s/%s, %s' % - (tool_name, setting, e)) + except ValueError as e: + print('Warning: for %s/%s, %s' % + (tool_name, setting, e), file=stderr) else: _ValidateExclusionSetting(setting, tool_validators, @@ -528,7 +532,7 @@ def _ValidateSettings(validators, settings, stderr): stderr) else: - print >> stderr, ('Warning: unrecognized tool %s' % tool_name) + print('Warning: unrecognized tool %s' % (tool_name), file=stderr) # MSVS and MBuild names of the tools. @@ -539,6 +543,7 @@ def _ValidateSettings(validators, settings, stderr): _lib = _Tool('VCLibrarianTool', 'Lib') _manifest = _Tool('VCManifestTool', 'Manifest') _masm = _Tool('MASM', 'MASM') +_armasm = _Tool('ARMASM', 'ARMASM') _AddTool(_compile) @@ -548,6 +553,7 @@ def _ValidateSettings(validators, settings, stderr): _AddTool(_lib) _AddTool(_manifest) _AddTool(_masm) +_AddTool(_armasm) # Add sections only found in the MSBuild settings. _msbuild_validators[''] = {} _msbuild_validators['ProjectReference'] = {} @@ -969,7 +975,9 @@ def _ValidateSettings(validators, settings, stderr): _Enumeration(['NotSet', 'Win32', # /env win32 'Itanium', # /env ia64 - 'X64'])) # /env x64 + 'X64', # /env x64 + 'ARM64', # /env arm64 + ])) _Same(_midl, 'EnableErrorChecks', _Enumeration(['EnableCustom', 'None', # /error none diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py index bf6ea6b802ff9..ce71c38a9b94f 100755 --- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py @@ -6,7 +6,11 @@ """Unit tests for the MSVSSettings.py file.""" -import StringIO +try: + from cStringIO import StringIO +except ImportError: + from io import StringIO + import unittest import gyp.MSVSSettings as MSVSSettings @@ -14,7 +18,7 @@ class TestSequenceFunctions(unittest.TestCase): def setUp(self): - self.stderr = StringIO.StringIO() + self.stderr = StringIO() def _ExpectedWarnings(self, expected): """Compares recorded lines to expected warnings.""" @@ -1081,6 +1085,7 @@ def testConvertToMSBuildSettings_full_synthetic(self): 'GenerateManifest': 'true', 'IgnoreImportLibrary': 'true', 'LinkIncremental': 'false'}} + self.maxDiff = 9999 # on failure display a long diff actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( msvs_settings, self.stderr) @@ -1472,6 +1477,7 @@ def testConvertToMSBuildSettings_actual(self): 'ResourceOutputFileName': '$(IntDir)$(TargetFileName).embed.manifest.resfdsf'} } + self.maxDiff = 9999 # on failure display a long diff actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( msvs_settings, self.stderr) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py index 6c07e9a893bac..2264d640152a2 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py @@ -91,7 +91,7 @@ def AddDebugSettings(self, config_name, command, environment = {}, if environment and isinstance(environment, dict): env_list = ['%s="%s"' % (key, val) - for (key,val) in environment.iteritems()] + for (key,val) in environment.items()] environment = ' '.join(env_list) else: environment = '' @@ -135,7 +135,7 @@ def AddDebugSettings(self, config_name, command, environment = {}, def WriteIfChanged(self): """Writes the user file.""" configs = ['Configurations'] - for config, spec in sorted(self.configurations.iteritems()): + for config, spec in sorted(self.configurations.items()): configs.append(spec) content = ['VisualStudioUserFile', diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py index 0b32e91180784..c8187eb331447 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py @@ -235,7 +235,7 @@ def InsertLargePdbShims(target_list, target_dicts, vars): # Set up the shim to output its PDB to the same location as the final linker # target. - for config_name, config in shim_dict.get('configurations').iteritems(): + for config_name, config in shim_dict.get('configurations').items(): pdb_path = _GetPdbPath(target_dict, config_name, vars) # A few keys that we don't want to propagate. diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py index d9bfa684fa30c..c7cf68d3a1ba9 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py @@ -12,6 +12,8 @@ import gyp import glob +PY3 = bytes != str + class VisualStudioVersion(object): """Information regarding a version of Visual Studio.""" @@ -132,6 +134,8 @@ def _RegistryQueryBase(sysdir, key, value): # Obtain the stdout from reg.exe, reading to the end so p.returncode is valid # Note that the error text may be in [1] in some cases text = p.communicate()[0] + if PY3: + text = text.decode('utf-8') # Check return code from reg.exe; officially 0==success and 1==error if p.returncode: return None @@ -158,7 +162,7 @@ def _RegistryQuery(key, value=None): text = None try: text = _RegistryQueryBase('Sysnative', key, value) - except OSError, e: + except OSError as e: if e.errno == errno.ENOENT: text = _RegistryQueryBase('System32', key, value) else: @@ -176,12 +180,18 @@ def _RegistryGetValueUsingWinReg(key, value): contents of the registry key's value, or None on failure. Throws ImportError if _winreg is unavailable. """ - import _winreg + try: + # Python 2 + from _winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx + except ImportError: + # Python 3 + from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx + try: root, subkey = key.split('\\', 1) assert root == 'HKLM' # Only need HKLM for now. - with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey: - return _winreg.QueryValueEx(hkey, value)[0] + with OpenKey(HKEY_LOCAL_MACHINE, subkey) as hkey: + return QueryValueEx(hkey, value)[0] except WindowsError: return None @@ -328,6 +338,8 @@ def _ConvertToCygpath(path): if sys.platform == 'cygwin': p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE) path = p.communicate()[0].strip() + if PY3: + path = path.decode('utf-8') return path diff --git a/node_modules/node-gyp/gyp/pylib/gyp/__init__.py b/node_modules/node-gyp/gyp/pylib/gyp/__init__.py index 668f38b60d009..dee834013f414 100755 --- a/node_modules/node-gyp/gyp/pylib/gyp/__init__.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/__init__.py @@ -4,9 +4,11 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function + import copy import gyp.input -import optparse +import argparse import os.path import re import shlex @@ -14,6 +16,13 @@ import traceback from gyp.common import GypError +try: + # Python 2 + string_types = basestring +except NameError: + # Python 3 + string_types = str + # Default debug modes for GYP debug = {} @@ -34,8 +43,8 @@ def DebugOutput(mode, message, *args): pass if args: message %= args - print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]), - ctx[1], ctx[2], message) + print('%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]), + ctx[1], ctx[2], message)) def FindBuildFiles(): extension = '.gyp' @@ -207,7 +216,7 @@ def Noop(value): # We always want to ignore the environment when regenerating, to avoid # duplicate or changed flags in the environment at the time of regeneration. flags = ['--ignore-environment'] - for name, metadata in options._regeneration_metadata.iteritems(): + for name, metadata in options._regeneration_metadata.items(): opt = metadata['opt'] value = getattr(options, name) value_predicate = metadata['type'] == 'path' and FixPath or Noop @@ -226,24 +235,24 @@ def Noop(value): (action == 'store_false' and not value)): flags.append(opt) elif options.use_environment and env_name: - print >>sys.stderr, ('Warning: environment regeneration unimplemented ' + print('Warning: environment regeneration unimplemented ' 'for %s flag %r env_name %r' % (action, opt, - env_name)) + env_name), file=sys.stderr) else: - print >>sys.stderr, ('Warning: regeneration unimplemented for action %r ' - 'flag %r' % (action, opt)) + print('Warning: regeneration unimplemented for action %r ' + 'flag %r' % (action, opt), file=sys.stderr) return flags -class RegeneratableOptionParser(optparse.OptionParser): - def __init__(self): +class RegeneratableOptionParser(argparse.ArgumentParser): + def __init__(self, usage): self.__regeneratable_options = {} - optparse.OptionParser.__init__(self) + argparse.ArgumentParser.__init__(self, usage=usage) - def add_option(self, *args, **kw): + def add_argument(self, *args, **kw): """Add an option to the parser. - This accepts the same arguments as OptionParser.add_option, plus the + This accepts the same arguments as ArgumentParser.add_argument, plus the following: regenerate: can be set to False to prevent this option from being included in regeneration. @@ -260,7 +269,7 @@ def add_option(self, *args, **kw): # it as a string. type = kw.get('type') if type == 'path': - kw['type'] = 'string' + kw['type'] = str self.__regeneratable_options[dest] = { 'action': kw.get('action'), @@ -269,50 +278,50 @@ def add_option(self, *args, **kw): 'opt': args[0], } - optparse.OptionParser.add_option(self, *args, **kw) + argparse.ArgumentParser.add_argument(self, *args, **kw) def parse_args(self, *args): - values, args = optparse.OptionParser.parse_args(self, *args) + values, args = argparse.ArgumentParser.parse_known_args(self, *args) values._regeneration_metadata = self.__regeneratable_options return values, args def gyp_main(args): my_name = os.path.basename(sys.argv[0]) + usage = 'usage: %(prog)s [options ...] [build_file ...]' + - parser = RegeneratableOptionParser() - usage = 'usage: %s [options ...] [build_file ...]' - parser.set_usage(usage.replace('%s', '%prog')) - parser.add_option('--build', dest='configs', action='append', + parser = RegeneratableOptionParser(usage=usage.replace('%s', '%(prog)s')) + parser.add_argument('--build', dest='configs', action='append', help='configuration for build after project generation') - parser.add_option('--check', dest='check', action='store_true', + parser.add_argument('--check', dest='check', action='store_true', help='check format of gyp files') - parser.add_option('--config-dir', dest='config_dir', action='store', + parser.add_argument('--config-dir', dest='config_dir', action='store', env_name='GYP_CONFIG_DIR', default=None, help='The location for configuration files like ' 'include.gypi.') - parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE', + parser.add_argument('-d', '--debug', dest='debug', metavar='DEBUGMODE', action='append', default=[], help='turn on a debugging ' 'mode for debugging GYP. Supported modes are "variables", ' '"includes" and "general" or "all" for all of them.') - parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL', + parser.add_argument('-D', dest='defines', action='append', metavar='VAR=VAL', env_name='GYP_DEFINES', help='sets variable VAR to value VAL') - parser.add_option('--depth', dest='depth', metavar='PATH', type='path', + parser.add_argument('--depth', dest='depth', metavar='PATH', type='path', help='set DEPTH gyp variable to a relative path to PATH') - parser.add_option('-f', '--format', dest='formats', action='append', + parser.add_argument('-f', '--format', dest='formats', action='append', env_name='GYP_GENERATORS', regenerate=False, help='output formats to generate') - parser.add_option('-G', dest='generator_flags', action='append', default=[], + parser.add_argument('-G', dest='generator_flags', action='append', default=[], metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS', help='sets generator flag FLAG to VAL') - parser.add_option('--generator-output', dest='generator_output', + parser.add_argument('--generator-output', dest='generator_output', action='store', default=None, metavar='DIR', type='path', env_name='GYP_GENERATOR_OUTPUT', help='puts generated build files under DIR') - parser.add_option('--ignore-environment', dest='use_environment', + parser.add_argument('--ignore-environment', dest='use_environment', action='store_false', default=True, regenerate=False, help='do not read options from environment variables') - parser.add_option('-I', '--include', dest='includes', action='append', + parser.add_argument('-I', '--include', dest='includes', action='append', metavar='INCLUDE', type='path', help='files to include in all loaded .gyp files') # --no-circular-check disables the check for circular relationships between @@ -322,7 +331,7 @@ def gyp_main(args): # option allows the strict behavior to be used on Macs and the lenient # behavior to be used elsewhere. # TODO(mark): Remove this option when http://crbug.com/35878 is fixed. - parser.add_option('--no-circular-check', dest='circular_check', + parser.add_argument('--no-circular-check', dest='circular_check', action='store_false', default=True, regenerate=False, help="don't check for circular relationships between files") # --no-duplicate-basename-check disables the check for duplicate basenames @@ -331,18 +340,18 @@ def gyp_main(args): # when duplicate basenames are passed into Make generator on Mac. # TODO(yukawa): Remove this option when these legacy generators are # deprecated. - parser.add_option('--no-duplicate-basename-check', + parser.add_argument('--no-duplicate-basename-check', dest='duplicate_basename_check', action='store_false', default=True, regenerate=False, help="don't check for duplicate basenames") - parser.add_option('--no-parallel', action='store_true', default=False, + parser.add_argument('--no-parallel', action='store_true', default=False, help='Disable multiprocessing') - parser.add_option('-S', '--suffix', dest='suffix', default='', + parser.add_argument('-S', '--suffix', dest='suffix', default='', help='suffix to add to generated files') - parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store', + parser.add_argument('--toplevel-dir', dest='toplevel_dir', action='store', default=None, metavar='DIR', type='path', help='directory to use as the root of the source tree') - parser.add_option('-R', '--root-target', dest='root_targets', + parser.add_argument('-R', '--root-target', dest='root_targets', action='append', metavar='TARGET', help='include only TARGET and its deep dependencies') @@ -410,7 +419,7 @@ def gyp_main(args): for option, value in sorted(options.__dict__.items()): if option[0] == '_': continue - if isinstance(value, basestring): + if isinstance(value, string_types): DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value) else: DebugOutput(DEBUG_GENERAL, " %s: %s", option, value) @@ -432,7 +441,7 @@ def gyp_main(args): build_file_dir = os.path.abspath(os.path.dirname(build_file)) build_file_dir_components = build_file_dir.split(os.path.sep) components_len = len(build_file_dir_components) - for index in xrange(components_len - 1, -1, -1): + for index in range(components_len - 1, -1, -1): if build_file_dir_components[index] == 'src': options.depth = os.path.sep.join(build_file_dir_components) break @@ -475,7 +484,7 @@ def gyp_main(args): if home_dot_gyp != None: default_include = os.path.join(home_dot_gyp, 'include.gypi') if os.path.exists(default_include): - print 'Using overrides found in ' + default_include + print('Using overrides found in ' + default_include) includes.append(default_include) # Command-line --include files come after the default include. @@ -536,7 +545,7 @@ def gyp_main(args): def main(args): try: return gyp_main(args) - except GypError, e: + except GypError as e: sys.stderr.write("gyp: %s\n" % e) return 1 diff --git a/node_modules/node-gyp/gyp/pylib/gyp/common.py b/node_modules/node-gyp/gyp/pylib/gyp/common.py index 501118796f00d..d866e81d40b0a 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/common.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/common.py @@ -2,15 +2,20 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from __future__ import with_statement - -import collections import errno import filecmp import os.path import re import tempfile import sys +import subprocess + +try: + from collections.abc import MutableSet +except ImportError: + from collections import MutableSet + +PY3 = bytes != str # A minimal memoizing decorator. It'll blow up if the args aren't immutable, @@ -339,11 +344,16 @@ def WriteOnDiff(filename): class Writer(object): """Wrapper around file which only covers the target if it differs.""" def __init__(self): + # On Cygwin remove the "dir" argument because `C:` prefixed paths are treated as relative, + # consequently ending up with current dir "/cygdrive/c/..." being prefixed to those, which was + # obviously a non-existent path, for example: "/cygdrive/c//C:\". + # See https://docs.python.org/2/library/tempfile.html#tempfile.mkstemp for more details + base_temp_dir = "" if IsCygwin() else os.path.dirname(filename) # Pick temporary file. tmp_fd, self.tmp_path = tempfile.mkstemp( suffix='.tmp', prefix=os.path.split(filename)[1] + '.gyp.', - dir=os.path.split(filename)[0]) + dir=base_temp_dir) try: self.tmp_file = os.fdopen(tmp_fd, 'wb') except Exception: @@ -363,7 +373,7 @@ def close(self): same = False try: same = filecmp.cmp(self.tmp_path, filename, False) - except OSError, e: + except OSError as e: if e.errno != errno.ENOENT: raise @@ -382,9 +392,9 @@ def close(self): # # No way to get the umask without setting a new one? Set a safe one # and then set it back to the old value. - umask = os.umask(077) + umask = os.umask(0o77) os.umask(umask) - os.chmod(self.tmp_path, 0666 & ~umask) + os.chmod(self.tmp_path, 0o666 & ~umask) if sys.platform == 'win32' and os.path.exists(filename): # NOTE: on windows (but not cygwin) rename will not replace an # existing file, so it must be preceded with a remove. Sadly there @@ -396,6 +406,9 @@ def close(self): os.unlink(self.tmp_path) raise + def write(self, s): + self.tmp_file.write(s.encode('utf-8')) + return Writer() @@ -464,7 +477,7 @@ def CopyTool(flavor, out_path): ''.join([source[0], '# Generated by gyp. Do not edit.\n'] + source[1:])) # Make file executable. - os.chmod(tool_path, 0755) + os.chmod(tool_path, 0o755) # From Alex Martelli, @@ -487,7 +500,7 @@ def uniquer(seq, idfun=None): # Based on http://code.activestate.com/recipes/576694/. -class OrderedSet(collections.MutableSet): +class OrderedSet(MutableSet): def __init__(self, iterable=None): self.end = end = [] end += [None, end, end] # sentinel node for doubly linked list @@ -610,3 +623,16 @@ def CrossCompileRequested(): os.environ.get('AR_target') or os.environ.get('CC_target') or os.environ.get('CXX_target')) + +def IsCygwin(): + try: + out = subprocess.Popen("uname", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + stdout, stderr = out.communicate() + if PY3: + stdout = stdout.decode("utf-8") + return "CYGWIN" in str(stdout) + except Exception: + return False + diff --git a/node_modules/node-gyp/gyp/pylib/gyp/common_test.py b/node_modules/node-gyp/gyp/pylib/gyp/common_test.py index ad6f9a1438f30..b75bbb8412b6f 100755 --- a/node_modules/node-gyp/gyp/pylib/gyp/common_test.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/common_test.py @@ -59,10 +59,10 @@ def test_platform_default(self): self.assertFlavor('freebsd', 'freebsd9' , {}) self.assertFlavor('freebsd', 'freebsd10', {}) self.assertFlavor('openbsd', 'openbsd5' , {}) - self.assertFlavor('solaris', 'sunos5' , {}); - self.assertFlavor('solaris', 'sunos' , {}); - self.assertFlavor('linux' , 'linux2' , {}); - self.assertFlavor('linux' , 'linux3' , {}); + self.assertFlavor('solaris', 'sunos5' , {}) + self.assertFlavor('solaris', 'sunos' , {}) + self.assertFlavor('linux' , 'linux2' , {}) + self.assertFlavor('linux' , 'linux3' , {}) def test_param(self): self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'}) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py b/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py index 841f31f925b05..1ddd9091756a2 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py @@ -5,6 +5,7 @@ import re import os import locale +from functools import reduce def XmlToString(content, encoding='utf-8', pretty=False): @@ -80,7 +81,7 @@ def _ConstructContentList(xml_parts, specification, pretty, level=0): # Optionally in second position is a dictionary of the attributes. rest = specification[1:] if rest and isinstance(rest[0], dict): - for at, val in sorted(rest[0].iteritems()): + for at, val in sorted(rest[0].items()): xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True))) rest = rest[1:] if rest: @@ -119,7 +120,7 @@ def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False, default_encoding = locale.getdefaultlocale()[1] if default_encoding.upper() != encoding.upper(): - xml_string = xml_string.decode(default_encoding).encode(encoding) + xml_string = xml_string.encode(encoding) # Get the old content try: @@ -131,7 +132,7 @@ def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False, # It has changed, write it if existing != xml_string: - f = open(path, 'w') + f = open(path, 'wb') f.write(xml_string) f.close() diff --git a/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py b/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py index df64354982c01..2a80b8a4564aa 100755 --- a/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py @@ -8,13 +8,16 @@ import gyp.easy_xml as easy_xml import unittest -import StringIO +try: + from cStringIO import StringIO +except ImportError: + from io import StringIO class TestSequenceFunctions(unittest.TestCase): def setUp(self): - self.stderr = StringIO.StringIO() + self.stderr = StringIO() def test_EasyXml_simple(self): self.assertEqual( diff --git a/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py b/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py index b38d8660f7285..81fb79d136a0a 100755 --- a/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py @@ -39,7 +39,7 @@ def ExecFlock(self, lockfile, *cmd_list): # where fcntl.flock(fd, LOCK_EX) always fails # with EBADF, that's why we use this F_SETLK # hack instead. - fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666) + fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0o666) if sys.platform.startswith('aix'): # Python on AIX is compiled with LARGEFILE support, which changes the # struct size. diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py index 921c1a6b71432..59d73dbedbd31 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py @@ -62,6 +62,8 @@ then the "all" target includes "b1" and "b2". """ +from __future__ import print_function + import gyp.common import gyp.ninja_syntax as ninja_syntax import json @@ -155,7 +157,7 @@ def _AddSources(sources, base_path, base_path_components, result): continue result.append(base_path + source) if debug: - print 'AddSource', org_source, result[len(result) - 1] + print('AddSource', org_source, result[len(result) - 1]) def _ExtractSourcesFromAction(action, base_path, base_path_components, @@ -185,7 +187,7 @@ def _ExtractSources(target, target_dict, toplevel_dir): base_path += '/' if debug: - print 'ExtractSources', target, base_path + print('ExtractSources', target, base_path) results = [] if 'sources' in target_dict: @@ -278,7 +280,7 @@ def _WasBuildFileModified(build_file, data, files, toplevel_dir): the root of the source tree.""" if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files: if debug: - print 'gyp file modified', build_file + print('gyp file modified', build_file) return True # First element of included_files is the file itself. @@ -291,8 +293,8 @@ def _WasBuildFileModified(build_file, data, files, toplevel_dir): _ToGypPath(gyp.common.UnrelativePath(include_file, build_file)) if _ToLocalPath(toplevel_dir, rel_include_file) in files: if debug: - print 'included gyp file modified, gyp_file=', build_file, \ - 'included file=', rel_include_file + print('included gyp file modified, gyp_file=', build_file, + 'included file=', rel_include_file) return True return False @@ -373,7 +375,7 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, # If a build file (or any of its included files) is modified we assume all # targets in the file are modified. if build_file_in_files[build_file]: - print 'matching target from modified build file', target_name + print('matching target from modified build file', target_name) target.match_status = MATCH_STATUS_MATCHES matching_targets.append(target) else: @@ -381,7 +383,7 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, toplevel_dir) for source in sources: if _ToGypPath(os.path.normpath(source)) in files: - print 'target', target_name, 'matches', source + print('target', target_name, 'matches', source) target.match_status = MATCH_STATUS_MATCHES matching_targets.append(target) break @@ -433,7 +435,7 @@ def _DoesTargetDependOnMatchingTargets(target): for dep in target.deps: if _DoesTargetDependOnMatchingTargets(dep): target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY - print '\t', target.name, 'matches by dep', dep.name + print('\t', target.name, 'matches by dep', dep.name) return True target.match_status = MATCH_STATUS_DOESNT_MATCH return False @@ -445,7 +447,7 @@ def _GetTargetsDependingOnMatchingTargets(possible_targets): supplied as input to analyzer. possible_targets: targets to search from.""" found = [] - print 'Targets that matched by dependency:' + print('Targets that matched by dependency:') for target in possible_targets: if _DoesTargetDependOnMatchingTargets(target): found.append(target) @@ -484,12 +486,12 @@ def _AddCompileTargets(target, roots, add_if_no_ancestor, result): (add_if_no_ancestor or target.requires_build)) or (target.is_static_library and add_if_no_ancestor and not target.is_or_has_linked_ancestor)): - print '\t\tadding to compile targets', target.name, 'executable', \ - target.is_executable, 'added_to_compile_targets', \ - target.added_to_compile_targets, 'add_if_no_ancestor', \ - add_if_no_ancestor, 'requires_build', target.requires_build, \ - 'is_static_library', target.is_static_library, \ - 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor + print('\t\tadding to compile targets', target.name, 'executable', + target.is_executable, 'added_to_compile_targets', + target.added_to_compile_targets, 'add_if_no_ancestor', + add_if_no_ancestor, 'requires_build', target.requires_build, + 'is_static_library', target.is_static_library, + 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor) result.add(target) target.added_to_compile_targets = True @@ -500,7 +502,7 @@ def _GetCompileTargets(matching_targets, supplied_targets): supplied_targets: set of targets supplied to analyzer to search from.""" result = set() for target in matching_targets: - print 'finding compile targets for match', target.name + print('finding compile targets for match', target.name) _AddCompileTargets(target, supplied_targets, True, result) return result @@ -508,46 +510,46 @@ def _GetCompileTargets(matching_targets, supplied_targets): def _WriteOutput(params, **values): """Writes the output, either to stdout or a file is specified.""" if 'error' in values: - print 'Error:', values['error'] + print('Error:', values['error']) if 'status' in values: - print values['status'] + print(values['status']) if 'targets' in values: values['targets'].sort() - print 'Supplied targets that depend on changed files:' + print('Supplied targets that depend on changed files:') for target in values['targets']: - print '\t', target + print('\t', target) if 'invalid_targets' in values: values['invalid_targets'].sort() - print 'The following targets were not found:' + print('The following targets were not found:') for target in values['invalid_targets']: - print '\t', target + print('\t', target) if 'build_targets' in values: values['build_targets'].sort() - print 'Targets that require a build:' + print('Targets that require a build:') for target in values['build_targets']: - print '\t', target + print('\t', target) if 'compile_targets' in values: values['compile_targets'].sort() - print 'Targets that need to be built:' + print('Targets that need to be built:') for target in values['compile_targets']: - print '\t', target + print('\t', target) if 'test_targets' in values: values['test_targets'].sort() - print 'Test targets:' + print('Test targets:') for target in values['test_targets']: - print '\t', target + print('\t', target) output_path = params.get('generator_flags', {}).get( 'analyzer_output_path', None) if not output_path: - print json.dumps(values) + print(json.dumps(values)) return try: f = open(output_path, 'w') f.write(json.dumps(values) + '\n') f.close() except IOError as e: - print 'Error writing to output file', output_path, str(e) + print('Error writing to output file', output_path, str(e)) def _WasGypIncludeFileModified(params, files): @@ -556,7 +558,7 @@ def _WasGypIncludeFileModified(params, files): if params['options'].includes: for include in params['options'].includes: if _ToGypPath(os.path.normpath(include)) in files: - print 'Include file modified, assuming all changed', include + print('Include file modified, assuming all changed', include) return True return False @@ -613,7 +615,7 @@ def _supplied_target_names(self): def _supplied_target_names_no_all(self): """Returns the supplied test targets without 'all'.""" - result = self._supplied_target_names(); + result = self._supplied_target_names() result.discard('all') return result @@ -638,13 +640,13 @@ def find_matching_test_target_names(self): set(self._root_targets))] else: test_targets = [x for x in test_targets_no_all] - print 'supplied test_targets' + print('supplied test_targets') for target_name in self._test_target_names: - print '\t', target_name - print 'found test_targets' + print('\t', target_name) + print('found test_targets') for target in test_targets: - print '\t', target.name - print 'searching for matching test targets' + print('\t', target.name) + print('searching for matching test targets') matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets) matching_test_targets_contains_all = (test_target_names_contains_all and set(matching_test_targets) & @@ -654,22 +656,22 @@ def find_matching_test_target_names(self): # 'all' is subsequentely added to the matching names below. matching_test_targets = [x for x in (set(matching_test_targets) & set(test_targets_no_all))] - print 'matched test_targets' + print('matched test_targets') for target in matching_test_targets: - print '\t', target.name + print('\t', target.name) matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1] for target in matching_test_targets] if matching_test_targets_contains_all: matching_target_names.append('all') - print '\tall' + print('\tall') return matching_target_names def find_matching_compile_target_names(self): """Returns the set of output compile targets.""" - assert self.is_build_impacted(); + assert self.is_build_impacted() # Compile targets are found by searching up from changed targets. # Reset the visited status for _GetBuildTargets. - for target in self._name_to_target.itervalues(): + for target in self._name_to_target.values(): target.visited = False supplied_targets = _LookupTargets(self._supplied_target_names_no_all(), @@ -677,10 +679,10 @@ def find_matching_compile_target_names(self): if 'all' in self._supplied_target_names(): supplied_targets = [x for x in (set(supplied_targets) | set(self._root_targets))] - print 'Supplied test_targets & compile_targets' + print('Supplied test_targets & compile_targets') for target in supplied_targets: - print '\t', target.name - print 'Finding compile targets' + print('\t', target.name) + print('Finding compile targets') compile_targets = _GetCompileTargets(self._changed_targets, supplied_targets) return [gyp.common.ParseQualifiedTarget(target.name)[1] @@ -699,7 +701,7 @@ def GenerateOutput(target_list, target_dicts, data, params): toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir)) if debug: - print 'toplevel_dir', toplevel_dir + print('toplevel_dir', toplevel_dir) if _WasGypIncludeFileModified(params, config.files): result_dict = { 'status': all_changed_string, diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py index 5b26cc785a80e..cecb28c3660b5 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py @@ -14,6 +14,8 @@ # variables set potentially clash with other Android build system variables. # Try to avoid setting global variables where possible. +from __future__ import print_function + import gyp import gyp.common import gyp.generator.make as make # Reuse global functions from make backend. @@ -250,7 +252,7 @@ def WriteActions(self, actions, extra_sources, extra_outputs): dirs = set() for out in outputs: if not out.startswith('$'): - print ('WARNING: Action for target "%s" writes output to local path ' + print('WARNING: Action for target "%s" writes output to local path ' '"%s".' % (self.target, out)) dir = os.path.split(out)[0] if dir: @@ -355,7 +357,7 @@ def WriteRules(self, rules, extra_sources, extra_outputs): dirs = set() for out in outputs: if not out.startswith('$'): - print ('WARNING: Rule for target %s writes output to local path %s' + print('WARNING: Rule for target %s writes output to local path %s' % (self.target, out)) dir = os.path.dirname(out) if dir: @@ -429,7 +431,7 @@ def WriteCopies(self, copies, extra_outputs): # $(gyp_shared_intermediate_dir). Note that we can't use an assertion # because some of the gyp tests depend on this. if not copy['destination'].startswith('$'): - print ('WARNING: Copy rule for target %s writes output to ' + print('WARNING: Copy rule for target %s writes output to ' 'local path %s' % (self.target, copy['destination'])) # LocalPathify() calls normpath, stripping trailing slashes. @@ -458,7 +460,7 @@ def WriteSourceFlags(self, spec, configs): Args: spec, configs: input from gyp. """ - for configname, config in sorted(configs.iteritems()): + for configname, config in sorted(configs.items()): extracted_includes = [] self.WriteLn('\n# Flags passed to both C and C++ files.') @@ -548,7 +550,7 @@ def WriteSources(self, spec, configs, extra_sources): # to work properly. If the file has the wrong C++ extension, then we add # a rule to copy that to intermediates and use the new version. final_generated_sources = [] - # If a source file gets copied, we still need to add the orginal source + # If a source file gets copied, we still need to add the original source # directory as header search path, for GCC searches headers in the # directory that contains the source file by default. origin_src_dirs = [] @@ -636,7 +638,7 @@ def ComputeOutputParts(self, spec): elif self.type == 'none': target_ext = '.stamp' elif self.type != 'executable': - print ("ERROR: What output file should be generated?", + print("ERROR: What output file should be generated?", "type", self.type, "target", target) if self.type != 'static_library' and self.type != 'shared_library': @@ -788,7 +790,7 @@ def WriteTargetFlags(self, spec, configs, link_deps): static_libs, dynamic_libs, ldflags_libs = self.FilterLibraries(libraries) if self.type != 'static_library': - for configname, config in sorted(configs.iteritems()): + for configname, config in sorted(configs.items()): ldflags = list(config.get('ldflags', [])) self.WriteLn('') self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname) @@ -837,7 +839,7 @@ def WriteTarget(self, spec, configs, deps, link_deps, part_of_all, settings = spec.get('aosp_build_settings', {}) if settings: self.WriteLn('### Set directly by aosp_build_settings.') - for k, v in settings.iteritems(): + for k, v in settings.items(): if isinstance(v, list): self.WriteList(v, k) else: @@ -956,7 +958,7 @@ def PerformBuild(data, configurations, params): env = dict(os.environ) env['ONE_SHOT_MAKEFILE'] = makefile arguments = ['make', '-C', os.environ['ANDROID_BUILD_TOP'], 'gyp_all_modules'] - print 'Building: %s' % arguments + print('Building: %s' % arguments) subprocess.check_call(arguments, env=env) @@ -1065,7 +1067,7 @@ def CalculateMakefilePath(build_file, base_name): write_alias_target=write_alias_targets, sdk_version=sdk_version) if android_module in android_modules: - print ('ERROR: Android module names must be unique. The following ' + print('ERROR: Android module names must be unique. The following ' 'targets both generate Android module name %s.\n %s\n %s' % (android_module, android_modules[android_module], qualified_target)) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py index 17f5e6396c630..5601a6657e3d0 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py @@ -28,6 +28,8 @@ CMakeLists.txt file. """ +from __future__ import print_function + import multiprocessing import os import signal @@ -237,7 +239,10 @@ def StringToCMakeTargetName(a): Invalid for make: ':' Invalid for unknown reasons but cause failures: '.' """ - return a.translate(string.maketrans(' /():."', '_______')) + try: + return a.translate(str.maketrans(' /():."', '_______')) + except AttributeError: + return a.translate(string.maketrans(' /():."', '_______')) def WriteActions(target_name, actions, extra_sources, extra_deps, @@ -572,7 +577,7 @@ class CMakeNamer(object): """Converts Gyp target names into CMake target names. CMake requires that target names be globally unique. One way to ensure - this is to fully qualify the names of the targets. Unfortunatly, this + this is to fully qualify the names of the targets. Unfortunately, this ends up with all targets looking like "chrome_chrome_gyp_chrome" instead of just "chrome". If this generator were only interested in building, it would be possible to fully qualify all target names, then create @@ -639,8 +644,8 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type) if cmake_target_type is None: - print ('Target %s has unknown target type %s, skipping.' % - ( target_name, target_type ) ) + print('Target %s has unknown target type %s, skipping.' % + ( target_name, target_type )) return SetVariable(output, 'TARGET', target_name) @@ -679,7 +684,7 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, for src in srcs: _, ext = os.path.splitext(src) src_type = COMPILABLE_EXTENSIONS.get(ext, None) - src_norm_path = NormjoinPath(path_from_cmakelists_to_gyp, src); + src_norm_path = NormjoinPath(path_from_cmakelists_to_gyp, src) if src_type == 's': s_sources.append(src_norm_path) @@ -863,7 +868,7 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX'] elif target_type != 'executable': - print ('ERROR: What output file should be generated?', + print('ERROR: What output file should be generated?', 'type', target_type, 'target', target_name) product_prefix = spec.get('product_prefix', default_product_prefix) @@ -1180,11 +1185,11 @@ def PerformBuild(data, configurations, params): output_dir, config_name)) arguments = ['cmake', '-G', 'Ninja'] - print 'Generating [%s]: %s' % (config_name, arguments) + print('Generating [%s]: %s' % (config_name, arguments)) subprocess.check_call(arguments, cwd=build_dir) arguments = ['ninja', '-C', build_dir] - print 'Building [%s]: %s' % (config_name, arguments) + print('Building [%s]: %s' % (config_name, arguments)) subprocess.check_call(arguments) @@ -1212,7 +1217,7 @@ def GenerateOutput(target_list, target_dicts, data, params): arglists.append((target_list, target_dicts, data, params, config_name)) pool.map(CallGenerateOutputForConfig, arglists) - except KeyboardInterrupt, e: + except KeyboardInterrupt as e: pool.terminate() raise e else: diff --git a/node_modules/node-gyp/tools/gyp/pylib/gyp/generator/compile_commands_json.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py similarity index 96% rename from node_modules/node-gyp/tools/gyp/pylib/gyp/generator/compile_commands_json.py rename to node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py index 575db63c4e194..1b8490451f979 100644 --- a/node_modules/node-gyp/tools/gyp/pylib/gyp/generator/compile_commands_json.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py @@ -43,7 +43,7 @@ def CalculateVariables(default_variables, params): def AddCommandsForTarget(cwd, target, params, per_config_commands): output_dir = params['generator_flags']['output_dir'] - for configuration_name, configuration in target['configurations'].iteritems(): + for configuration_name, configuration in target['configurations'].items(): builddir_name = os.path.join(output_dir, configuration_name) if IsMac(params): @@ -92,7 +92,7 @@ def resolve(filename): def GenerateOutput(target_list, target_dicts, data, params): per_config_commands = {} - for qualified_target, target in target_dicts.iteritems(): + for qualified_target, target in target_dicts.items(): build_file, target_name, toolset = ( gyp.common.ParseQualifiedTarget(qualified_target)) if IsMac(params): @@ -102,7 +102,7 @@ def GenerateOutput(target_list, target_dicts, data, params): AddCommandsForTarget(cwd, target, params, per_config_commands) output_dir = params['generator_flags']['output_dir'] - for configuration_name, commands in per_config_commands.iteritems(): + for configuration_name, commands in per_config_commands.items(): filename = os.path.join(output_dir, configuration_name, 'compile_commands.json') diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py index 160eafe2efeca..8e4f3168f3e7e 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py @@ -1,3 +1,4 @@ +from __future__ import print_function # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -96,4 +97,4 @@ def GenerateOutput(target_list, target_dicts, data, params): f = open(filename, 'w') json.dump(edges, f) f.close() - print 'Wrote json to %s.' % filename + print('Wrote json to %s.' % filename) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py index 3544347b3bacd..91f187d685ad5 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py @@ -26,6 +26,8 @@ import shlex import xml.etree.cElementTree as ET +PY3 = bytes != str + generator_wants_static_library_dependencies_adjusted = False generator_default_variables = { @@ -97,6 +99,8 @@ def GetAllIncludeDirectories(target_list, target_dicts, proc = subprocess.Popen(args=command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output = proc.communicate()[1] + if PY3: + output = output.decode('utf-8') # Extract the list of include dirs from the output, which has this format: # ... # #include "..." search starts here: @@ -141,7 +145,7 @@ def GetAllIncludeDirectories(target_list, target_dicts, compiler_includes_list.append(include_dir) # Find standard gyp include dirs. - if config.has_key('include_dirs'): + if 'include_dirs' in config: include_dirs = config['include_dirs'] for shared_intermediate_dir in shared_intermediate_dirs: for include_dir in include_dirs: @@ -195,7 +199,7 @@ def GetAllDefines(target_list, target_dicts, data, config_name, params, """Calculate the defines for a project. Returns: - A dict that includes explict defines declared in gyp files along with all of + A dict that includes explicit defines declared in gyp files along with all of the default defines that the compiler uses. """ @@ -234,6 +238,8 @@ def GetAllDefines(target_list, target_dicts, data, config_name, params, cpp_proc = subprocess.Popen(args=command, cwd='.', stdin=subprocess.PIPE, stdout=subprocess.PIPE) cpp_output = cpp_proc.communicate()[0] + if PY3: + cpp_output = cpp_output.decode('utf-8') cpp_lines = cpp_output.split('\n') for cpp_line in cpp_lines: if not cpp_line.strip(): @@ -272,7 +278,7 @@ def WriteMacros(out, eclipse_langs, defines): out.write(' \n') for lang in eclipse_langs: out.write(' \n' % lang) - for key in sorted(defines.iterkeys()): + for key in sorted(defines): out.write(' %s%s\n' % (escape(key), escape(defines[key]))) out.write(' \n') diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py index 3efdb9966a69a..78eeaa61b2294 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py @@ -88,7 +88,7 @@ def GenerateOutput(target_list, target_dicts, data, params): if not output_file in output_files: output_files[output_file] = input_file - for output_file, input_file in output_files.iteritems(): + for output_file, input_file in output_files.items(): output = open(output_file, 'w') pprint.pprint(data[input_file], output) output.close() diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py index fe801b77ce3b9..196053679492f 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py @@ -12,7 +12,7 @@ # all are sourced by the top-level Makefile. This means that all # variables in .mk-files clobber one another. Be careful to use := # where appropriate for immediate evaluation, and similarly to watch -# that you're not relying on a variable value to last beween different +# that you're not relying on a variable value to last between different # .mk files. # # TODOs: @@ -21,6 +21,8 @@ # toplevel Makefile. It may make sense to generate some .mk files on # the side to keep the files readable. +from __future__ import print_function + import os import re import sys @@ -672,14 +674,13 @@ def _ValidateSourcesForOSX(spec, all_sources): basenames.setdefault(basename, []).append(source) error = '' - for basename, files in basenames.iteritems(): + for basename, files in basenames.items(): if len(files) > 1: error += ' %s: %s\n' % (basename, ' '.join(files)) if error: - print('static library %s has several files with the same basename:\n' % - spec['target_name'] + error + 'libtool on OS X will generate' + - ' warnings for them.') + print(('static library %s has several files with the same basename:\n' % spec['target_name']) + + error + 'libtool on OS X will generate' + ' warnings for them.') raise GypError('Duplicate basenames in sources section, see list above') @@ -820,7 +821,7 @@ def Write(self, qualified_target, base_path, output_filename, spec, configs, gyp.xcode_emulation.MacPrefixHeader( self.xcode_settings, lambda p: Sourceify(self.Absolutify(p)), self.Pchify)) - sources = filter(Compilable, all_sources) + sources = list(filter(Compilable, all_sources)) if sources: self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1) extensions = set([os.path.splitext(s)[1] for s in sources]) @@ -949,7 +950,7 @@ def WriteActions(self, actions, extra_sources, extra_outputs, '%s%s' % (name, cd_action, command)) self.WriteLn() - outputs = map(self.Absolutify, outputs) + outputs = [self.Absolutify(output) for output in outputs] # The makefile rules are all relative to the top dir, but the gyp actions # are defined relative to their containing dir. This replaces the obj # variable for the action rule with an absolute version so that the output @@ -973,7 +974,7 @@ def WriteActions(self, actions, extra_sources, extra_outputs, outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] - self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)), + self.WriteDoCmd(outputs, [Sourceify(self.Absolutify(i)) for i in inputs], part_of_all=part_of_all, command=name) # Stuff the outputs in a variable so we can refer to them later. @@ -1022,8 +1023,8 @@ def WriteRules(self, rules, extra_sources, extra_outputs, extra_sources += outputs if int(rule.get('process_outputs_as_mac_bundle_resources', False)): extra_mac_bundle_resources += outputs - inputs = map(Sourceify, map(self.Absolutify, [rule_source] + - rule.get('inputs', []))) + inputs = [Sourceify(self.Absolutify(i)) for i + in [rule_source] + rule.get('inputs', [])] actions = ['$(call do_cmd,%s_%d)' % (name, count)] if name == 'resources_grit': @@ -1039,7 +1040,7 @@ def WriteRules(self, rules, extra_sources, extra_outputs, outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] - outputs = map(self.Absolutify, outputs) + outputs = [self.Absolutify(output) for output in outputs] all_outputs += outputs # Only write the 'obj' and 'builddir' rules for the "primary" output # (:1); it's superfluous for the "extra outputs", and this avoids @@ -1146,7 +1147,7 @@ def WriteCopies(self, copies, extra_outputs, part_of_all): path = gyp.xcode_emulation.ExpandEnvVars(path, env) self.WriteDoCmd([output], [path], 'copy', part_of_all) outputs.append(output) - self.WriteLn('%s = %s' % (variable, ' '.join(map(QuoteSpaces, outputs)))) + self.WriteLn('%s = %s' % (variable, ' '.join(QuoteSpaces(o) for o in outputs))) extra_outputs.append('$(%s)' % variable) self.WriteLn() @@ -1157,7 +1158,7 @@ def WriteMacBundleResources(self, resources, bundle_deps): for output, res in gyp.xcode_emulation.GetMacBundleResources( generator_default_variables['PRODUCT_DIR'], self.xcode_settings, - map(Sourceify, map(self.Absolutify, resources))): + [Sourceify(self.Absolutify(r)) for r in resources]): _, ext = os.path.splitext(output) if ext != '.xcassets': # Make does not supports '.xcassets' emulation. @@ -1224,24 +1225,24 @@ def WriteSources(self, configs, deps, sources, cflags_c = config.get('cflags_c') cflags_cc = config.get('cflags_cc') - self.WriteLn("# Flags passed to all source files."); + self.WriteLn("# Flags passed to all source files.") self.WriteList(cflags, 'CFLAGS_%s' % configname) - self.WriteLn("# Flags passed to only C files."); + self.WriteLn("# Flags passed to only C files.") self.WriteList(cflags_c, 'CFLAGS_C_%s' % configname) - self.WriteLn("# Flags passed to only C++ files."); + self.WriteLn("# Flags passed to only C++ files.") self.WriteList(cflags_cc, 'CFLAGS_CC_%s' % configname) if self.flavor == 'mac': - self.WriteLn("# Flags passed to only ObjC files."); + self.WriteLn("# Flags passed to only ObjC files.") self.WriteList(cflags_objc, 'CFLAGS_OBJC_%s' % configname) - self.WriteLn("# Flags passed to only ObjC++ files."); + self.WriteLn("# Flags passed to only ObjC++ files.") self.WriteList(cflags_objcc, 'CFLAGS_OBJCC_%s' % configname) includes = config.get('include_dirs') if includes: - includes = map(Sourceify, map(self.Absolutify, includes)) + includes = [Sourceify(self.Absolutify(i)) for i in includes] self.WriteList(includes, 'INCS_%s' % configname, prefix='-I') - compilable = filter(Compilable, sources) - objs = map(self.Objectify, map(self.Absolutify, map(Target, compilable))) + compilable = list(filter(Compilable, sources)) + objs = [self.Objectify(self.Absolutify(Target(c))) for c in compilable] self.WriteList(objs, 'OBJS') for obj in objs: @@ -1313,7 +1314,7 @@ def WriteSources(self, configs, deps, sources, # If there are any object files in our input file list, link them into our # output. - extra_link_deps += filter(Linkable, sources) + extra_link_deps += list(filter(Linkable, sources)) self.WriteLn() @@ -1381,7 +1382,7 @@ def ComputeOutputBasename(self, spec): elif self.type == 'none': target = '%s.stamp' % target elif self.type != 'executable': - print ("ERROR: What output file should be generated?", + print("ERROR: What output file should be generated?", "type", self.type, "target", target) target_prefix = spec.get('product_prefix', target_prefix) @@ -1546,7 +1547,7 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, # Postbuilds expect to be run in the gyp file's directory, so insert an # implicit postbuild to cd to there. postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path])) - for i in xrange(len(postbuilds)): + for i in range(len(postbuilds)): if not postbuilds[i].startswith('$'): postbuilds[i] = EscapeShellArgument(postbuilds[i]) self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output)) @@ -1563,7 +1564,7 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, # Bundle dependencies. Note that the code below adds actions to this # target, so if you move these two lines, move the lines below as well. - self.WriteList(map(QuoteSpaces, bundle_deps), 'BUNDLE_DEPS') + self.WriteList([QuoteSpaces(dep) for dep in bundle_deps], 'BUNDLE_DEPS') self.WriteLn('%s: $(BUNDLE_DEPS)' % QuoteSpaces(self.output)) # After the framework is built, package it. Needs to happen before @@ -1597,7 +1598,7 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, if self.type == 'executable': self.WriteLn('%s: LD_INPUTS := %s' % ( QuoteSpaces(self.output_binary), - ' '.join(map(QuoteSpaces, link_deps)))) + ' '.join(QuoteSpaces(dep) for dep in link_deps))) if self.toolset == 'host' and self.flavor == 'android': self.WriteDoCmd([self.output_binary], link_deps, 'link_host', part_of_all, postbuilds=postbuilds) @@ -1619,7 +1620,7 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, elif self.type == 'shared_library': self.WriteLn('%s: LD_INPUTS := %s' % ( QuoteSpaces(self.output_binary), - ' '.join(map(QuoteSpaces, link_deps)))) + ' '.join(QuoteSpaces(dep) for dep in link_deps))) self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all, postbuilds=postbuilds) elif self.type == 'loadable_module': @@ -1638,7 +1639,7 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all, postbuilds=postbuilds) else: - print "WARNING: no output for", self.type, target + print("WARNING: no output for", self.type, self.target) # Add an alias for each target (if there are any outputs). # Installable target aliases are created below. @@ -1745,8 +1746,8 @@ def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, output is just a name to run the rule command: (optional) command name to generate unambiguous labels """ - outputs = map(QuoteSpaces, outputs) - inputs = map(QuoteSpaces, inputs) + outputs = [QuoteSpaces(o) for o in outputs] + inputs = [QuoteSpaces(i) for i in inputs] if comment: self.WriteLn('# ' + comment) @@ -1775,10 +1776,10 @@ def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, # - The multi-output rule will have an do-nothing recipe. # Hash the target name to avoid generating overlong filenames. - cmddigest = hashlib.sha1(command if command else self.target).hexdigest() + cmddigest = hashlib.sha1((command or self.target).encode('utf-8')).hexdigest() intermediate = "%s.intermediate" % cmddigest self.WriteLn('%s: %s' % (' '.join(outputs), intermediate)) - self.WriteLn('\t%s' % '@:'); + self.WriteLn('\t%s' % '@:') self.WriteLn('%s: %s' % ('.INTERMEDIATE', intermediate)) self.WriteLn('%s: %s%s' % (intermediate, ' '.join(inputs), force_append)) @@ -1835,7 +1836,7 @@ def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps): default_cpp_ext = ext self.WriteLn('LOCAL_CPP_EXTENSION := ' + default_cpp_ext) - self.WriteList(map(self.Absolutify, filter(Compilable, all_sources)), + self.WriteList(list(map(self.Absolutify, filter(Compilable, all_sources))), 'LOCAL_SRC_FILES') # Filter out those which do not match prefix and suffix and produce @@ -1978,7 +1979,7 @@ def WriteAutoRegenerationRule(params, root_makefile, makefile_name, "%(makefile_name)s: %(deps)s\n" "\t$(call do_cmd,regen_makefile)\n\n" % { 'makefile_name': makefile_name, - 'deps': ' '.join(map(SourceifyAndQuoteSpaces, build_files)), + 'deps': ' '.join(SourceifyAndQuoteSpaces(bf) for bf in build_files), 'cmd': gyp.common.EncodePOSIXShellList( [gyp_binary, '-fmake'] + gyp.RegenerateFlags(options) + @@ -1992,7 +1993,7 @@ def PerformBuild(data, configurations, params): if options.toplevel_dir and options.toplevel_dir != '.': arguments += '-C', options.toplevel_dir arguments.append('BUILDTYPE=' + config) - print 'Building [%s]: %s' % (config, arguments) + print('Building [%s]: %s' % (config, arguments)) subprocess.check_call(arguments) @@ -2058,6 +2059,14 @@ def CalculateMakefilePath(build_file, base_name): 'srcdir': srcdir, 'copy_archive_args': copy_archive_arguments, 'makedep_args': makedep_arguments, + 'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'), + 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'), + 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'), + 'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'), + 'CC.host': GetEnvironFallback(('CC_host', 'CC'), 'gcc'), + 'AR.host': GetEnvironFallback(('AR_host', 'AR'), 'ar'), + 'CXX.host': GetEnvironFallback(('CXX_host', 'CXX'), 'g++'), + 'LINK.host': GetEnvironFallback(('LINK_host', 'LINK'), '$(CXX.host)'), } if flavor == 'mac': flock_command = './gyp-mac-tool flock' @@ -2078,6 +2087,10 @@ def CalculateMakefilePath(build_file, base_name): 'copy_archive_args': copy_archive_arguments, 'makedep_args': makedep_arguments, 'link_commands': LINK_COMMANDS_OS390, + 'CC.target': GetEnvironFallback(('CC_target', 'CC'), 'njsc'), + 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), 'njsc++'), + 'CC.host': GetEnvironFallback(('CC_host', 'CC'), 'njsc'), + 'CXX.host': GetEnvironFallback(('CXX_host', 'CXX'), 'njsc++'), }) elif flavor == 'solaris': header_params.update({ @@ -2103,17 +2116,6 @@ def CalculateMakefilePath(build_file, base_name): 'flock_index': 2, }) - header_params.update({ - 'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'), - 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'), - 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'), - 'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'), - 'CC.host': GetEnvironFallback(('CC_host', 'CC'), 'gcc'), - 'AR.host': GetEnvironFallback(('AR_host', 'AR'), 'ar'), - 'CXX.host': GetEnvironFallback(('CXX_host', 'CXX'), 'g++'), - 'LINK.host': GetEnvironFallback(('LINK_host', 'LINK'), '$(CXX.host)'), - }) - build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) make_global_settings_array = data[build_file].get('make_global_settings', []) wrappers = {} diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py index 3901ba94161e6..8dbe0dc05b86b 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py @@ -2,6 +2,8 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function + import copy import ntpath import os @@ -10,6 +12,8 @@ import subprocess import sys +from collections import OrderedDict + import gyp.common import gyp.easy_xml as easy_xml import gyp.generator.ninja as ninja_generator @@ -23,15 +27,7 @@ from gyp.common import GypError from gyp.common import OrderedSet -# TODO: Remove once bots are on 2.7, http://crbug.com/241769 -def _import_OrderedDict(): - import collections - try: - return collections.OrderedDict - except AttributeError: - import gyp.ordered_dict - return gyp.ordered_dict.OrderedDict -OrderedDict = _import_OrderedDict() +PY3 = bytes != str # Regular expression for validating Visual Studio GUIDs. If the GUID @@ -86,6 +82,7 @@ def _import_OrderedDict(): 'msvs_enable_winrt', 'msvs_requires_importlibrary', 'msvs_enable_winphone', + 'msvs_enable_marmasm', 'msvs_application_type_revision', 'msvs_target_platform_version', 'msvs_target_platform_minversion', @@ -121,6 +118,8 @@ def _GetDomainAndUserName(): call = subprocess.Popen(['net', 'config', 'Workstation'], stdout=subprocess.PIPE) config = call.communicate()[0] + if PY3: + config = config.decode('utf-8') username_re = re.compile(r'^User name\s+(\S+)', re.MULTILINE) username_match = username_re.search(config) if username_match: @@ -162,7 +161,7 @@ def _FixPath(path): Returns: The path with all slashes made into backslashes. """ - if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$': + if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$' and not _IsWindowsAbsPath(path): path = os.path.join(fixpath_prefix, path) path = path.replace('/', '\\') path = _NormalizedSource(path) @@ -171,6 +170,16 @@ def _FixPath(path): return path +def _IsWindowsAbsPath(path): + r""" + On Cygwin systems Python needs a little help determining if a path is an absolute Windows path or not, so that + it does not treat those as relative, which results in bad paths like: + + '..\C:\\some_source_code_file.cc' + """ + return path.startswith('c:') or path.startswith('C:') + + def _FixPaths(paths): """Fix each of the paths of the list.""" return [_FixPath(i) for i in paths] @@ -449,7 +458,7 @@ def _AddCustomBuildToolForMSVS(p, spec, primary_input, 'CommandLine': cmd, }) # Add to the properties of primary input for each config. - for config_name, c_data in spec['configurations'].iteritems(): + for config_name, c_data in spec['configurations'].items(): p.AddFileConfig(_FixPath(primary_input), _ConfigFullName(config_name, c_data), tools=[tool]) @@ -755,8 +764,8 @@ def _Replace(match): # the VCProj but cause the same problem on the final command-line. Moving # the item to the end of the list does works, but that's only possible if # there's only one such item. Let's just warn the user. - print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' + - 'quotes in ' + s) + print('Warning: MSVS may misinterpret the odd number of ' + + 'quotes in ' + s, file=sys.stderr) return s @@ -969,13 +978,13 @@ def _ValidateSourcesForMSVSProject(spec, version): basenames.setdefault(basename, []).append(source) error = '' - for basename, files in basenames.iteritems(): + for basename, files in basenames.items(): if len(files) > 1: error += ' %s: %s\n' % (basename, ' '.join(files)) if error: - print('static library %s has several files with the same basename:\n' % - spec['target_name'] + error + 'MSVC08 cannot handle that.') + print('static library %s has several files with the same basename:\n' % spec['target_name'] + + error + 'MSVC08 cannot handle that.') raise GypError('Duplicate basenames in sources section, see list above') @@ -1001,7 +1010,7 @@ def _GenerateMSVSProject(project, options, version, generator_flags): relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) config_type = _GetMSVSConfigurationType(spec, project.build_file) - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config) # MSVC08 and prior version cannot handle duplicate basenames in the same @@ -1367,10 +1376,10 @@ def _ConvertToolsToExpectedForm(tools): A list of Tool objects. """ tool_list = [] - for tool, settings in tools.iteritems(): + for tool, settings in tools.items(): # Collapse settings with lists. settings_fixed = {} - for setting, value in settings.iteritems(): + for setting, value in settings.items(): if type(value) == list: if ((tool == 'VCLinkerTool' and setting == 'AdditionalDependencies') or @@ -1545,7 +1554,7 @@ def _IdlFilesHandledNonNatively(spec, sources): def _GetPrecompileRelatedFiles(spec): # Gather a list of precompiled header related sources. precompiled_related = [] - for _, config in spec['configurations'].iteritems(): + for _, config in spec['configurations'].items(): for k in precomp_keys: f = config.get(k) if f: @@ -1556,7 +1565,7 @@ def _GetPrecompileRelatedFiles(spec): def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, list_excluded): exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) - for file_name, excluded_configs in exclusions.iteritems(): + for file_name, excluded_configs in exclusions.items(): if (not list_excluded and len(excluded_configs) == len(spec['configurations'])): # If we're not listing excluded files, then they won't appear in the @@ -1573,7 +1582,7 @@ def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl): # Exclude excluded sources from being built. for f in excluded_sources: excluded_configs = [] - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): precomped = [_FixPath(config.get(i, '')) for i in precomp_keys] # Don't do this for ones that are precompiled header related. if f not in precomped: @@ -1583,7 +1592,7 @@ def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl): # Exclude them now. for f in excluded_idl: excluded_configs = [] - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): excluded_configs.append((config_name, config)) exclusions[f] = excluded_configs return exclusions @@ -1592,7 +1601,7 @@ def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl): def _AddToolFilesToMSVS(p, spec): # Add in tool files (rules). tool_files = OrderedSet() - for _, config in spec['configurations'].iteritems(): + for _, config in spec['configurations'].items(): for f in config.get('msvs_tool_files', []): tool_files.add(f) for f in tool_files: @@ -1605,7 +1614,7 @@ def _HandlePreCompiledHeaders(p, sources, spec): # kind (i.e. C vs. C++) as the precompiled header source stub needs # to have use of precompiled headers disabled. extensions_excluded_from_precompile = [] - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): source = config.get('msvs_precompiled_source') if source: source = _FixPath(source) @@ -1626,7 +1635,7 @@ def DisableForSourceTree(source_tree): else: basename, extension = os.path.splitext(source) if extension in extensions_excluded_from_precompile: - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): tool = MSVSProject.Tool('VCCLCompilerTool', {'UsePrecompiledHeader': '0', 'ForcedIncludeFiles': '$(NOINHERIT)'}) @@ -1677,7 +1686,7 @@ def _WriteMSVSUserFile(project_path, version, spec): return # Nothing to add # Write out the user file. user_file = _CreateMSVSUserFile(project_path, version, spec) - for config_name, c_data in spec['configurations'].iteritems(): + for config_name, c_data in spec['configurations'].items(): user_file.AddDebugSettings(_ConfigFullName(config_name, c_data), action, environment, working_directory) user_file.WriteIfChanged() @@ -1728,7 +1737,7 @@ def _GetPathDict(root, path): def _DictsToFolders(base_path, bucket, flat): # Convert to folders recursively. children = [] - for folder, contents in bucket.iteritems(): + for folder, contents in bucket.items(): if type(contents) == dict: folder_children = _DictsToFolders(os.path.join(base_path, folder), contents, flat) @@ -1750,8 +1759,8 @@ def _CollapseSingles(parent, node): # such projects up one level. if (type(node) == dict and len(node) == 1 and - node.keys()[0] == parent + '.vcproj'): - return node[node.keys()[0]] + list(node)[0] == parent + '.vcproj'): + return node[list(node)[0]] if type(node) != dict: return node for child in node: @@ -1770,8 +1779,8 @@ def _GatherSolutionFolders(sln_projects, project_objects, flat): # Walk down from the top until we hit a folder that has more than one entry. # In practice, this strips the top-level "src/" dir from the hierarchy in # the solution. - while len(root) == 1 and type(root[root.keys()[0]]) == dict: - root = root[root.keys()[0]] + while len(root) == 1 and type(root[list(root)[0]]) == dict: + root = root[list(root)[0]] # Collapse singles. root = _CollapseSingles('', root) # Merge buckets until everything is a root entry. @@ -1800,7 +1809,7 @@ def _GetPlatformOverridesOfProject(spec): # Prepare a dict indicating which project configurations are used for which # solution configurations for this target. config_platform_overrides = {} - for config_name, c in spec['configurations'].iteritems(): + for config_name, c in spec['configurations'].items(): config_fullname = _ConfigFullName(config_name, c) platform = c.get('msvs_target_platform', _ConfigPlatform(c)) fixed_config_fullname = '%s|%s' % ( @@ -1885,6 +1894,8 @@ def _InitNinjaFlavor(params, target_list, target_dicts): configuration = '$(Configuration)' if params.get('target_arch') == 'x64': configuration += '_x64' + if params.get('target_arch') == 'arm64': + configuration += '_arm64' spec['msvs_external_builder_out_dir'] = os.path.join( gyp.common.RelativePath(params['options'].toplevel_dir, gyp_dir), ninja_generator.ComputeOutputDir(params), @@ -1939,7 +1950,7 @@ def PerformBuild(data, configurations, params): msvs_version = params['msvs_version'] devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com') - for build_file, build_file_dict in data.iteritems(): + for build_file, build_file_dict in data.items(): (build_file_root, build_file_ext) = os.path.splitext(build_file) if build_file_ext != '.gyp': continue @@ -1949,7 +1960,7 @@ def PerformBuild(data, configurations, params): for config in configurations: arguments = [devenv, sln_path, '/Build', config] - print 'Building [%s]: %s' % (config, arguments) + print('Building [%s]: %s' % (config, arguments)) rtn = subprocess.check_call(arguments) @@ -1988,7 +1999,7 @@ def GenerateOutput(target_list, target_dicts, data, params): configs = set() for qualified_target in target_list: spec = target_dicts[qualified_target] - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): configs.add(_ConfigFullName(config_name, config)) configs = list(configs) @@ -2031,11 +2042,12 @@ def GenerateOutput(target_list, target_dicts, data, params): if generator_flags.get('msvs_error_on_missing_sources', False): raise GypError(error_message) else: - print >> sys.stdout, "Warning: " + error_message + print("Warning: " + error_message, file=sys.stdout) def _GenerateMSBuildFiltersFile(filters_path, source_files, - rule_dependencies, extension_to_rule_name): + rule_dependencies, extension_to_rule_name, + platforms): """Generate the filters file. This file is used by Visual Studio to organize the presentation of source @@ -2049,7 +2061,8 @@ def _GenerateMSBuildFiltersFile(filters_path, source_files, filter_group = [] source_group = [] _AppendFiltersForMSBuild('', source_files, rule_dependencies, - extension_to_rule_name, filter_group, source_group) + extension_to_rule_name, platforms, + filter_group, source_group) if filter_group: content = ['Project', {'ToolsVersion': '4.0', @@ -2065,7 +2078,7 @@ def _GenerateMSBuildFiltersFile(filters_path, source_files, def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies, - extension_to_rule_name, + extension_to_rule_name, platforms, filter_group, source_group): """Creates the list of filters and sources to be added in the filter file. @@ -2091,11 +2104,12 @@ def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies, # Recurse and add its dependents. _AppendFiltersForMSBuild(filter_name, source.contents, rule_dependencies, extension_to_rule_name, - filter_group, source_group) + platforms, filter_group, source_group) else: # It's a source. Create a source entry. _, element = _MapFileToMsBuildSourceType(source, rule_dependencies, - extension_to_rule_name) + extension_to_rule_name, + platforms) source_entry = [element, {'Include': source}] # Specify the filter it is part of, if any. if parent_filter_name: @@ -2104,7 +2118,7 @@ def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies, def _MapFileToMsBuildSourceType(source, rule_dependencies, - extension_to_rule_name): + extension_to_rule_name, platforms): """Returns the group and element type of the source file. Arguments: @@ -2130,6 +2144,9 @@ def _MapFileToMsBuildSourceType(source, rule_dependencies, elif ext == '.asm': group = 'masm' element = 'MASM' + for platform in platforms: + if platform.lower() in ['arm', 'arm64']: + element = 'MARMASM' elif ext == '.idl': group = 'midl' element = 'Midl' @@ -2628,7 +2645,7 @@ def _GetConfigurationCondition(name, settings): def _GetMSBuildProjectConfigurations(configurations): group = ['ItemGroup', {'Label': 'ProjectConfigurations'}] - for (name, settings) in sorted(configurations.iteritems()): + for (name, settings) in sorted(configurations.items()): configuration, platform = _GetConfigurationAndPlatform(name, settings) designation = '%s|%s' % (configuration, platform) group.append( @@ -2680,7 +2697,7 @@ def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name): platform_name = None msvs_windows_target_platform_version = None - for configuration in spec['configurations'].itervalues(): + for configuration in spec['configurations'].values(): platform_name = platform_name or _ConfigPlatform(configuration) msvs_windows_target_platform_version = \ msvs_windows_target_platform_version or \ @@ -2698,7 +2715,7 @@ def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name): def _GetMSBuildConfigurationDetails(spec, build_file): properties = {} - for name, settings in spec['configurations'].iteritems(): + for name, settings in spec['configurations'].items(): msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file) condition = _GetConfigurationCondition(name, settings) character_set = msbuild_attributes.get('CharacterSet') @@ -2727,9 +2744,9 @@ def _GetMSBuildPropertySheets(configurations): user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props' additional_props = {} props_specified = False - for name, settings in sorted(configurations.iteritems()): + for name, settings in sorted(configurations.items()): configuration = _GetConfigurationCondition(name, settings) - if settings.has_key('msbuild_props'): + if 'msbuild_props' in settings: additional_props[configuration] = _FixPaths(settings['msbuild_props']) props_specified = True else: @@ -2749,7 +2766,7 @@ def _GetMSBuildPropertySheets(configurations): ] else: sheets = [] - for condition, props in additional_props.iteritems(): + for condition, props in additional_props.items(): import_group = [ 'ImportGroup', {'Label': 'PropertySheets', @@ -2782,7 +2799,7 @@ def _ConvertMSVSBuildAttributes(spec, config, build_file): elif a == 'ConfigurationType': msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a]) else: - print 'Warning: Do not know how to convert MSVS attribute ' + a + print('Warning: Do not know how to convert MSVS attribute ' + a) return msbuild_attributes @@ -2876,7 +2893,7 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file): new_paths = '$(ExecutablePath);' + ';'.join(new_paths) properties = {} - for (name, configuration) in sorted(configurations.iteritems()): + for (name, configuration) in sorted(configurations.items()): condition = _GetConfigurationCondition(name, configuration) attributes = _GetMSBuildAttributes(spec, configuration, build_file) msbuild_settings = configuration['finalized_msbuild_settings'] @@ -2901,7 +2918,7 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file): _AddConditionalProperty(properties, condition, 'ExecutablePath', new_paths) tool_settings = msbuild_settings.get('', {}) - for name, value in sorted(tool_settings.iteritems()): + for name, value in sorted(tool_settings.items()): formatted_value = _GetValueFormattedForMSBuild('', name, value) _AddConditionalProperty(properties, condition, name, formatted_value) return _GetMSBuildPropertyGroup(spec, None, properties) @@ -2970,7 +2987,7 @@ def GetEdges(node): # NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG)) for name in reversed(properties_ordered): values = properties[name] - for value, conditions in sorted(values.iteritems()): + for value, conditions in sorted(values.items()): if len(conditions) == num_configurations: # If the value is the same all configurations, # just add one unconditional entry. @@ -2983,18 +3000,18 @@ def GetEdges(node): def _GetMSBuildToolSettingsSections(spec, configurations): groups = [] - for (name, configuration) in sorted(configurations.iteritems()): + for (name, configuration) in sorted(configurations.items()): msbuild_settings = configuration['finalized_msbuild_settings'] group = ['ItemDefinitionGroup', {'Condition': _GetConfigurationCondition(name, configuration)} ] - for tool_name, tool_settings in sorted(msbuild_settings.iteritems()): + for tool_name, tool_settings in sorted(msbuild_settings.items()): # Skip the tool named '' which is a holder of global settings handled # by _GetMSBuildConfigurationGlobalProperties. if tool_name: if tool_settings: tool = [tool_name] - for name, value in sorted(tool_settings.iteritems()): + for name, value in sorted(tool_settings.items()): formatted_value = _GetValueFormattedForMSBuild(tool_name, name, value) tool.append([name, formatted_value]) @@ -3027,7 +3044,7 @@ def _FinalizeMSBuildSettings(spec, configuration): for ignored_setting in ignored_settings: value = configuration.get(ignored_setting) if value: - print ('Warning: The automatic conversion to MSBuild does not handle ' + print('Warning: The automatic conversion to MSBuild does not handle ' '%s. Ignoring setting of %s' % (ignored_setting, str(value))) defines = [_EscapeCppDefineForMSBuild(d) for d in defines] @@ -3048,7 +3065,7 @@ def _FinalizeMSBuildSettings(spec, configuration): _ToolAppend(msbuild_settings, 'ResourceCompile', 'AdditionalIncludeDirectories', resource_include_dirs) # Add in libraries, note that even for empty libraries, we want this - # set, to prevent inheriting default libraries from the enviroment. + # set, to prevent inheriting default libraries from the environment. _ToolSetOrAppend(msbuild_settings, 'Link', 'AdditionalDependencies', libraries) _ToolAppend(msbuild_settings, 'Link', 'AdditionalLibraryDirectories', @@ -3194,7 +3211,7 @@ def _AddSources2(spec, sources, exclusions, grouped_sources, {'Condition': condition}, 'true']) # Add precompile if needed - for config_name, configuration in spec['configurations'].iteritems(): + for config_name, configuration in spec['configurations'].items(): precompiled_source = configuration.get('msvs_precompiled_source', '') if precompiled_source != '': precompiled_source = _FixPath(precompiled_source) @@ -3223,7 +3240,8 @@ def _AddSources2(spec, sources, exclusions, grouped_sources, detail.append(['ForcedIncludeFiles', '']) group, element = _MapFileToMsBuildSourceType(source, rule_dependencies, - extension_to_rule_name) + extension_to_rule_name, + _GetUniquePlatforms(spec)) grouped_sources[group].append([element, {'Include': source}] + detail) @@ -3240,7 +3258,7 @@ def _GetMSBuildProjectReferences(project): ['Project', guid], ['ReferenceOutputAssembly', 'false'] ] - for config in dependency.spec.get('configurations', {}).itervalues(): + for config in dependency.spec.get('configurations', {}).values(): if config.get('msvs_use_library_dependency_inputs', 0): project_ref.append(['UseLibraryDependencyInputs', 'true']) break @@ -3306,10 +3324,10 @@ def _GenerateMSBuildProject(project, options, version, generator_flags): _GenerateMSBuildFiltersFile(project.path + '.filters', sources, rule_dependencies, - extension_to_rule_name) + extension_to_rule_name, _GetUniquePlatforms(spec)) missing_sources = _VerifySourcesExist(sources, project_dir) - for configuration in configurations.itervalues(): + for configuration in configurations.values(): _FinalizeMSBuildSettings(spec, configuration) # Add attributes to root element @@ -3326,6 +3344,12 @@ def _GenerateMSBuildProject(project, options, version, generator_flags): import_masm_targets_section = [ ['Import', {'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.targets'}]] + import_marmasm_props_section = [ + ['Import', + {'Project': r'$(VCTargetsPath)\BuildCustomizations\marmasm.props'}]] + import_marmasm_targets_section = [ + ['Import', + {'Project': r'$(VCTargetsPath)\BuildCustomizations\marmasm.targets'}]] macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]] content = [ @@ -3345,6 +3369,8 @@ def _GenerateMSBuildProject(project, options, version, generator_flags): content += _GetMSBuildLocalProperties(project.msbuild_toolset) content += import_cpp_props_section content += import_masm_props_section + if spec.get('msvs_enable_marmasm'): + content += import_marmasm_props_section content += _GetMSBuildExtensions(props_files_of_rules) content += _GetMSBuildPropertySheets(configurations) content += macro_section @@ -3357,6 +3383,8 @@ def _GenerateMSBuildProject(project, options, version, generator_flags): content += _GetMSBuildProjectReferences(project) content += import_cpp_targets_section content += import_masm_targets_section + if spec.get('msvs_enable_marmasm'): + content += import_marmasm_targets_section content += _GetMSBuildExtensionTargets(targets_files_of_rules) if spec.get('msvs_external_builder'): @@ -3434,7 +3462,7 @@ def _GenerateActionsForMSBuild(spec, actions_to_add): """ sources_handled_by_action = OrderedSet() actions_spec = [] - for primary_input, actions in actions_to_add.iteritems(): + for primary_input, actions in actions_to_add.items(): inputs = OrderedSet() outputs = OrderedSet() descriptions = [] diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py index c0b021df502bf..daf4f411bc481 100755 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py @@ -7,13 +7,16 @@ import gyp.generator.msvs as msvs import unittest -import StringIO +try: + from cStringIO import StringIO +except ImportError: + from io import StringIO class TestSequenceFunctions(unittest.TestCase): def setUp(self): - self.stderr = StringIO.StringIO() + self.stderr = StringIO() def test_GetLibraries(self): self.assertEqual( diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py index a00573ebf2345..33cc253aba67b 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py @@ -1,3 +1,4 @@ +from __future__ import print_function # Copyright (c) 2013 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -18,7 +19,10 @@ import gyp.msvs_emulation import gyp.MSVSUtil as MSVSUtil import gyp.xcode_emulation -from cStringIO import StringIO +try: + from cStringIO import StringIO +except ImportError: + from io import StringIO from gyp.common import GetEnvironFallback import gyp.ninja_syntax as ninja_syntax @@ -472,8 +476,8 @@ def WriteSpec(self, spec, config_name, generator_flags): if self.flavor != 'mac' or len(self.archs) == 1: link_deps += [self.GypPathToNinja(o) for o in obj_outputs] else: - print "Warning: Actions/rules writing object files don't work with " \ - "multiarch targets, dropping. (target %s)" % spec['target_name'] + print("Warning: Actions/rules writing object files don't work with " \ + "multiarch targets, dropping. (target %s)" % spec['target_name']) elif self.flavor == 'mac' and len(self.archs) > 1: link_deps = collections.defaultdict(list) @@ -722,7 +726,7 @@ def cygwin_munge(path): elif var == 'name': extra_bindings.append(('name', cygwin_munge(basename))) else: - assert var == None, repr(var) + assert var is None, repr(var) outputs = [self.GypPathToNinja(o, env) for o in outputs] if self.flavor == 'win': @@ -796,7 +800,7 @@ def WriteMacXCassets(self, xcassets, bundle_depends): 'XCASSETS_LAUNCH_IMAGE': 'launch-image', } settings = self.xcode_settings.xcode_settings[self.config_name] - for settings_key, arg_name in settings_to_arg.iteritems(): + for settings_key, arg_name in settings_to_arg.items(): value = settings.get(settings_key) if value: extra_arguments[arg_name] = value @@ -1817,7 +1821,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, # - The priority from low to high is gcc/g++, the 'make_global_settings' in # gyp, the environment variable. # - If there is no 'make_global_settings' for CC.host/CXX.host or - # 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set + # 'CC_host'/'CXX_host' environment variable, cc_host/cxx_host should be set # to cc/cxx. if flavor == 'win': ar = 'lib.exe' @@ -1889,7 +1893,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value) # Support wrappers from environment variables too. - for key, value in os.environ.iteritems(): + for key, value in os.environ.items(): if key.lower().endswith('_wrapper'): key_prefix = key[:-len('_wrapper')] key_prefix = re.sub(r'\.HOST$', '.host', key_prefix) @@ -1905,7 +1909,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, configs, generator_flags) cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles( toplevel_build, generator_flags, shared_system_includes, OpenOutput) - for arch, path in cl_paths.iteritems(): + for arch, path in cl_paths.items(): if clang_cl: # If we have selected clang-cl, use that instead. path = clang_cl @@ -2376,7 +2380,7 @@ def PerformBuild(data, configurations, params): for config in configurations: builddir = os.path.join(options.toplevel_dir, 'out', config) arguments = ['ninja', '-C', builddir] - print 'Building [%s]: %s' % (config, arguments) + print('Building [%s]: %s' % (config, arguments)) subprocess.check_call(arguments) @@ -2413,7 +2417,7 @@ def GenerateOutput(target_list, target_dicts, data, params): arglists.append( (target_list, target_dicts, data, params, config_name)) pool.map(CallGenerateOutputForConfig, arglists) - except KeyboardInterrupt, e: + except KeyboardInterrupt as e: pool.terminate() raise e else: diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py index 1767b2f45a04c..5ecfbdf004d21 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py @@ -8,9 +8,7 @@ import gyp.generator.ninja as ninja import unittest -import StringIO import sys -import TestCommon class TestPrefixesAndSuffixes(unittest.TestCase): diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py index 0e3fb9301ecb9..6317d04c70302 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py @@ -1,3 +1,4 @@ +from __future__ import print_function # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -128,7 +129,7 @@ def __init__(self, gyp_path, path, build_file_dict): try: os.makedirs(self.path) self.created_dir = True - except OSError, e: + except OSError as e: if e.errno != errno.EEXIST: raise @@ -182,7 +183,7 @@ def Finalize1(self, xcode_targets, serialize_all_tests): # the tree tree view for UI display. # Any values set globally are applied to all configurations, then any # per-configuration values are applied. - for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems(): + for xck, xcv in self.build_file_dict.get('xcode_settings', {}).items(): xccl.SetBuildSetting(xck, xcv) if 'xcode_config_file' in self.build_file_dict: config_ref = self.project.AddOrGetFileInRootGroup( @@ -196,7 +197,7 @@ def Finalize1(self, xcode_targets, serialize_all_tests): if build_file_configuration_named: xcc = xccl.ConfigurationNamed(config_name) for xck, xcv in build_file_configuration_named.get('xcode_settings', - {}).iteritems(): + {}).items(): xcc.SetBuildSetting(xck, xcv) if 'xcode_config_file' in build_file_configuration_named: config_ref = self.project.AddOrGetFileInRootGroup( @@ -245,7 +246,7 @@ def Finalize1(self, xcode_targets, serialize_all_tests): targets_for_all.append(xcode_target) if target_name.lower() == 'all': - has_custom_all = True; + has_custom_all = True # If this target has a 'run_as' attribute, add its target to the # targets, and add it to the test targets. @@ -272,7 +273,7 @@ def Finalize1(self, xcode_targets, serialize_all_tests): script = script + "\n".join( ['export %s="%s"' % (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val)) - for (key, val) in command.get('environment').iteritems()]) + "\n" + for (key, val) in command.get('environment').items()]) + "\n" # Some test end up using sockets, files on disk, etc. and can get # confused if more then one test runs at a time. The generator @@ -453,7 +454,7 @@ def Write(self): same = False try: same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False) - except OSError, e: + except OSError as e: if e.errno != errno.ENOENT: raise @@ -472,10 +473,10 @@ def Write(self): # # No way to get the umask without setting a new one? Set a safe one # and then set it back to the old value. - umask = os.umask(077) + umask = os.umask(0o77) os.umask(umask) - os.chmod(new_pbxproj_path, 0666 & ~umask) + os.chmod(new_pbxproj_path, 0o666 & ~umask) os.rename(new_pbxproj_path, pbxproj_path) except Exception: @@ -538,7 +539,7 @@ def ExpandXcodeVariables(string, expansions): """ matches = _xcode_variable_re.findall(string) - if matches == None: + if matches is None: return string matches.reverse() @@ -565,7 +566,7 @@ def EscapeXcodeDefine(s): def PerformBuild(data, configurations, params): options = params['options'] - for build_file, build_file_dict in data.iteritems(): + for build_file, build_file_dict in data.items(): (build_file_root, build_file_ext) = os.path.splitext(build_file) if build_file_ext != '.gyp': continue @@ -576,7 +577,7 @@ def PerformBuild(data, configurations, params): for config in configurations: arguments = ['xcodebuild', '-project', xcodeproj_path] arguments += ['-configuration', config] - print "Building [%s]: %s" % (config, arguments) + print("Building [%s]: %s" % (config, arguments)) subprocess.check_call(arguments) @@ -624,7 +625,7 @@ def GenerateOutput(target_list, target_dicts, data, params): skip_excluded_files = \ not generator_flags.get('xcode_list_excluded_files', True) xcode_projects = {} - for build_file, build_file_dict in data.iteritems(): + for build_file, build_file_dict in data.items(): (build_file_root, build_file_ext) = os.path.splitext(build_file) if build_file_ext != '.gyp': continue @@ -636,7 +637,7 @@ def GenerateOutput(target_list, target_dicts, data, params): pbxp = xcp.project # Set project-level attributes from multiple options - project_attributes = {}; + project_attributes = {} if parallel_builds: project_attributes['BuildIndependentTargetsInParallel'] = 'YES' if upgrade_check_project_version: @@ -736,7 +737,7 @@ def GenerateOutput(target_list, target_dicts, data, params): xctarget_type = gyp.xcodeproj_file.PBXNativeTarget try: target_properties['productType'] = _types[type_bundle_key] - except KeyError, e: + except KeyError as e: gyp.common.ExceptionAppend(e, "-- unknown product type while " "writing target %s" % target_name) raise @@ -775,7 +776,7 @@ def GenerateOutput(target_list, target_dicts, data, params): # logic all happens in ninja. Don't bother creating the extra targets in # that case. if type != 'none' and (spec_actions or spec_rules) and not ninja_wrapper: - support_xccl = CreateXCConfigurationList(configuration_names); + support_xccl = CreateXCConfigurationList(configuration_names) support_target_suffix = generator_flags.get( 'support_target_suffix', ' Support') support_target_properties = { @@ -997,7 +998,7 @@ def GenerateOutput(target_list, target_dicts, data, params): actions.append(action) if len(concrete_outputs_all) > 0: - # TODO(mark): There's a possibilty for collision here. Consider + # TODO(mark): There's a possibility for collision here. Consider # target "t" rule "A_r" and target "t_A" rule "r". makefile_name = '%s.make' % re.sub( '[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name'])) @@ -1013,7 +1014,7 @@ def GenerateOutput(target_list, target_dicts, data, params): # target. makefile.write('all: \\\n') for concrete_output_index in \ - xrange(0, len(concrete_outputs_by_rule_source)): + range(0, len(concrete_outputs_by_rule_source)): # Only list the first (index [0]) concrete output of each input # in the "all" target. Otherwise, a parallel make (-j > 1) would # attempt to process each input multiple times simultaneously. @@ -1036,7 +1037,7 @@ def GenerateOutput(target_list, target_dicts, data, params): # rule source. Collect the names of the directories that are # required. concrete_output_dirs = [] - for concrete_output_index in xrange(0, len(concrete_outputs)): + for concrete_output_index in range(0, len(concrete_outputs)): concrete_output = concrete_outputs[concrete_output_index] if concrete_output_index == 0: bol = '' @@ -1055,7 +1056,7 @@ def GenerateOutput(target_list, target_dicts, data, params): # the set of additional rule inputs, if any. prerequisites = [rule_source] prerequisites.extend(rule.get('inputs', [])) - for prerequisite_index in xrange(0, len(prerequisites)): + for prerequisite_index in range(0, len(prerequisites)): prerequisite = prerequisites[prerequisite_index] if prerequisite_index == len(prerequisites) - 1: eol = '' @@ -1170,7 +1171,7 @@ def GenerateOutput(target_list, target_dicts, data, params): dest = '$(SRCROOT)/' + dest code_sign = int(copy_group.get('xcode_code_sign', 0)) - settings = (None, '{ATTRIBUTES = (CodeSignOnCopy, ); }')[code_sign]; + settings = (None, '{ATTRIBUTES = (CodeSignOnCopy, ); }')[code_sign] # Coalesce multiple "copies" sections in the same target with the same # "destination" property into the same PBXCopyFilesBuildPhase, otherwise @@ -1277,7 +1278,7 @@ def GenerateOutput(target_list, target_dicts, data, params): set_define = EscapeXcodeDefine(define) xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define) if 'xcode_settings' in configuration: - for xck, xcv in configuration['xcode_settings'].iteritems(): + for xck, xcv in configuration['xcode_settings'].items(): xcbc.SetBuildSetting(xck, xcv) if 'xcode_config_file' in configuration: config_ref = pbxp.AddOrGetFileInRootGroup( @@ -1285,7 +1286,7 @@ def GenerateOutput(target_list, target_dicts, data, params): xcbc.SetBaseConfiguration(config_ref) build_files = [] - for build_file, build_file_dict in data.iteritems(): + for build_file, build_file_dict in data.items(): if build_file.endswith('.gyp'): build_files.append(build_file) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/input.py b/node_modules/node-gyp/gyp/pylib/gyp/input.py index 10f6e0dba16de..2973c078fc82b 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/input.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/input.py @@ -2,14 +2,10 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from compiler.ast import Const -from compiler.ast import Dict -from compiler.ast import Discard -from compiler.ast import List -from compiler.ast import Module -from compiler.ast import Node -from compiler.ast import Stmt -import compiler +from __future__ import print_function + +import ast + import gyp.common import gyp.simple_copy import multiprocessing @@ -26,6 +22,7 @@ from gyp.common import GypError from gyp.common import OrderedSet +PY3 = bytes != str # A list of types that are treated as linkable. linkable_types = [ @@ -159,7 +156,7 @@ def GetIncludedBuildFiles(build_file_path, aux_data, included=None): in the list will be relative to the current directory. """ - if included == None: + if included is None: included = [] if build_file_path in included: @@ -182,43 +179,39 @@ def CheckedEval(file_contents): Note that this is slower than eval() is. """ - ast = compiler.parse(file_contents) - assert isinstance(ast, Module) - c1 = ast.getChildren() - assert c1[0] is None - assert isinstance(c1[1], Stmt) - c2 = c1[1].getChildren() - assert isinstance(c2[0], Discard) - c3 = c2[0].getChildren() - assert len(c3) == 1 - return CheckNode(c3[0], []) + syntax_tree = ast.parse(file_contents) + assert isinstance(syntax_tree, ast.Module) + c1 = syntax_tree.body + assert len(c1) == 1 + c2 = c1[0] + assert isinstance(c2, ast.Expr) + return CheckNode(c2.value, []) def CheckNode(node, keypath): - if isinstance(node, Dict): + if isinstance(node, ast.Dict): c = node.getChildren() dict = {} - for n in range(0, len(c), 2): - assert isinstance(c[n], Const) - key = c[n].getChildren()[0] + for key, value in zip(node.keys, node.values): + assert isinstance(key, ast.Str) + key = key.s if key in dict: raise GypError("Key '" + key + "' repeated at level " + repr(len(keypath) + 1) + " with key path '" + '.'.join(keypath) + "'") kp = list(keypath) # Make a copy of the list for descending this node. kp.append(key) - dict[key] = CheckNode(c[n + 1], kp) + dict[key] = CheckNode(value, kp) return dict - elif isinstance(node, List): - c = node.getChildren() + elif isinstance(node, ast.List): children = [] - for index, child in enumerate(c): + for index, child in enumerate(node.elts): kp = list(keypath) # Copy list. kp.append(repr(index)) children.append(CheckNode(child, kp)) return children - elif isinstance(node, Const): - return node.getChildren()[0] + elif isinstance(node, ast.Str): + return node.s else: raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) + "': " + repr(node)) @@ -247,12 +240,12 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, if check: build_file_data = CheckedEval(build_file_contents) else: - build_file_data = eval(build_file_contents, {'__builtins__': None}, + build_file_data = eval(build_file_contents, {'__builtins__': {}}, None) - except SyntaxError, e: + except SyntaxError as e: e.filename = build_file_path raise - except Exception, e: + except Exception as e: gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path) raise @@ -272,7 +265,7 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, else: LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, aux_data, None, check) - except Exception, e: + except Exception as e: gyp.common.ExceptionAppend(e, 'while reading includes of ' + build_file_path) raise @@ -309,7 +302,7 @@ def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, subdict_path, include) # Recurse into subdictionaries. - for k, v in subdict.iteritems(): + for k, v in subdict.items(): if type(v) is dict: LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, None, check) @@ -474,7 +467,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, try: LoadTargetBuildFile(dependency, data, aux_data, variables, includes, depth, check, load_dependencies) - except Exception, e: + except Exception as e: gyp.common.ExceptionAppend( e, 'while loading dependencies of %s' % build_file_path) raise @@ -495,7 +488,7 @@ def CallLoadTargetBuildFile(global_flags, signal.signal(signal.SIGINT, signal.SIG_IGN) # Apply globals so that the worker process behaves the same. - for key, value in global_flags.iteritems(): + for key, value in global_flags.items(): globals()[key] = value SetGeneratorGlobals(generator_input_info) @@ -517,12 +510,12 @@ def CallLoadTargetBuildFile(global_flags, return (build_file_path, build_file_data, dependencies) - except GypError, e: + except GypError as e: sys.stderr.write("gyp: %s\n" % e) return None - except Exception, e: - print >>sys.stderr, 'Exception:', e - print >>sys.stderr, traceback.format_exc() + except Exception as e: + print('Exception:', e, file=sys.stderr) + print(traceback.format_exc(), file=sys.stderr) return None @@ -612,7 +605,7 @@ def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth, args = (global_flags, dependency, variables, includes, depth, check, generator_input_info), callback = parallel_state.LoadTargetBuildFileCallback) - except KeyboardInterrupt, e: + except KeyboardInterrupt as e: parallel_state.pool.terminate() raise e @@ -912,11 +905,14 @@ def ExpandVariables(input, phase, variables, build_file): stderr=subprocess.PIPE, stdin=subprocess.PIPE, cwd=build_file_dir) - except Exception, e: + except Exception as e: raise GypError("%s while executing command '%s' in %s" % (e, contents, build_file)) p_stdout, p_stderr = p.communicate('') + if PY3: + p_stdout = p_stdout.decode('utf-8') + p_stderr = p_stderr.decode('utf-8') if p.wait() != 0 or p_stderr: sys.stderr.write(p_stderr) @@ -952,8 +948,12 @@ def ExpandVariables(input, phase, variables, build_file): else: replacement = variables[contents] + if isinstance(replacement, bytes) and not isinstance(replacement, str): + replacement = replacement.decode("utf-8") # done on Python 3 only if type(replacement) is list: for item in replacement: + if isinstance(item, bytes) and not isinstance(item, str): + item = item.decode("utf-8") # done on Python 3 only if not contents[-1] == '/' and type(item) not in (str, int): raise GypError('Variable ' + contents + ' must expand to a string or list of strings; ' + @@ -1026,7 +1026,7 @@ def ExpandVariables(input, phase, variables, build_file): # Convert all strings that are canonically-represented integers into integers. if type(output) is list: - for index in xrange(0, len(output)): + for index in range(0, len(output)): if IsStrCanonicalInt(output[index]): output[index] = int(output[index]) elif IsStrCanonicalInt(output): @@ -1066,7 +1066,7 @@ def EvalCondition(condition, conditions_key, phase, variables, build_file): else: false_dict = None i = i + 2 - if result == None: + if result is None: result = EvalSingleCondition( cond_expr, true_dict, false_dict, phase, variables, build_file) @@ -1077,7 +1077,7 @@ def EvalSingleCondition( cond_expr, true_dict, false_dict, phase, variables, build_file): """Returns true_dict if cond_expr evaluates to true, and false_dict otherwise.""" - # Do expansions on the condition itself. Since the conditon can naturally + # Do expansions on the condition itself. Since the condition can naturally # contain variable references without needing to resort to GYP expansion # syntax, this is of dubious value for variables, but someone might want to # use a command expansion directly inside a condition. @@ -1094,16 +1094,16 @@ def EvalSingleCondition( else: ast_code = compile(cond_expr_expanded, '', 'eval') cached_conditions_asts[cond_expr_expanded] = ast_code - if eval(ast_code, {'__builtins__': None}, variables): + if eval(ast_code, {'__builtins__': {}}, variables): return true_dict return false_dict - except SyntaxError, e: + except SyntaxError as e: syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s ' 'at character %d.' % (str(e.args[0]), e.text, build_file, e.offset), e.filename, e.lineno, e.offset, e.text) raise syntax_error - except NameError, e: + except NameError as e: gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' % (cond_expr_expanded, build_file)) raise GypError(e) @@ -1158,7 +1158,7 @@ def ProcessConditionsInDict(the_dict, phase, variables, build_file): def LoadAutomaticVariablesFromDict(variables, the_dict): # Any keys with plain string values in the_dict become automatic variables. # The variable name is the key name with a "_" character prepended. - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): if type(value) in (str, int, list): variables['_' + key] = value @@ -1171,7 +1171,7 @@ def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): # the_dict in the_dict's parent dict. If the_dict's parent is not a dict # (it could be a list or it could be parentless because it is a root dict), # the_dict_key will be None. - for key, value in the_dict.get('variables', {}).iteritems(): + for key, value in the_dict.get('variables', {}).items(): if type(value) not in (str, int, list): continue @@ -1180,9 +1180,9 @@ def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): if variable_name in variables: # If the variable is already set, don't set it. continue - if the_dict_key is 'variables' and variable_name in the_dict: + if the_dict_key == 'variables' and variable_name in the_dict: # If the variable is set without a % in the_dict, and the_dict is a - # variables dict (making |variables| a varaibles sub-dict of a + # variables dict (making |variables| a variables sub-dict of a # variables dict), use the_dict's definition. value = the_dict[variable_name] else: @@ -1210,7 +1210,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, # list before we process them so that you can reference one # variable from another. They will be fully expanded by recursion # in ExpandVariables. - for key, value in the_dict['variables'].iteritems(): + for key, value in the_dict['variables'].items(): variables[key] = value # Handle the associated variables dict first, so that any variable @@ -1223,7 +1223,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): # Skip "variables", which was already processed if present. if key != 'variables' and type(value) is str: expanded = ExpandVariables(value, phase, variables, build_file) @@ -1281,7 +1281,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, # Recurse into child dicts, or process child lists which may result in # further recursion into descendant dicts. - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): # Skip "variables" and string values, which were already processed if # present. if key == 'variables' or type(value) is str: @@ -1378,12 +1378,12 @@ def QualifyDependencies(targets): for dep in dependency_sections for op in ('', '!', '/')] - for target, target_dict in targets.iteritems(): + for target, target_dict in targets.items(): target_build_file = gyp.common.BuildFile(target) toolset = target_dict['toolset'] for dependency_key in all_dependency_sections: dependencies = target_dict.get(dependency_key, []) - for index in xrange(0, len(dependencies)): + for index in range(0, len(dependencies)): dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( target_build_file, dependencies[index], toolset) if not multiple_toolsets: @@ -1418,13 +1418,13 @@ def ExpandWildcardDependencies(targets, data): dependency list, must be qualified when this function is called. """ - for target, target_dict in targets.iteritems(): + for target, target_dict in targets.items(): toolset = target_dict['toolset'] target_build_file = gyp.common.BuildFile(target) for dependency_key in dependency_sections: dependencies = target_dict.get(dependency_key, []) - # Loop this way instead of "for dependency in" or "for index in xrange" + # Loop this way instead of "for dependency in" or "for index in range" # because the dependencies list will be modified within the loop body. index = 0 while index < len(dependencies): @@ -1480,7 +1480,7 @@ def Unify(l): def RemoveDuplicateDependencies(targets): """Makes sure every dependency appears only once in all targets's dependency lists.""" - for target_name, target_dict in targets.iteritems(): + for target_name, target_dict in targets.items(): for dependency_key in dependency_sections: dependencies = target_dict.get(dependency_key, []) if dependencies: @@ -1496,7 +1496,7 @@ def Filter(l, item): def RemoveSelfDependencies(targets): """Remove self dependencies from targets that have the prune_self_dependency variable set.""" - for target_name, target_dict in targets.iteritems(): + for target_name, target_dict in targets.items(): for dependency_key in dependency_sections: dependencies = target_dict.get(dependency_key, []) if dependencies: @@ -1509,7 +1509,7 @@ def RemoveSelfDependencies(targets): def RemoveLinkDependenciesFromNoneTargets(targets): """Remove dependencies having the 'link_dependency' attribute from the 'none' targets.""" - for target_name, target_dict in targets.iteritems(): + for target_name, target_dict in targets.items(): for dependency_key in dependency_sections: dependencies = target_dict.get(dependency_key, []) if dependencies: @@ -1607,7 +1607,7 @@ def Visit(node, path): def DirectDependencies(self, dependencies=None): """Returns a list of just direct dependencies.""" - if dependencies == None: + if dependencies is None: dependencies = [] for dependency in self.dependencies: @@ -1635,7 +1635,7 @@ def _AddImportedDependencies(self, targets, dependencies=None): public entry point. """ - if dependencies == None: + if dependencies is None: dependencies = [] index = 0 @@ -1795,14 +1795,14 @@ def BuildDependencyList(targets): # Create a DependencyGraphNode for each target. Put it into a dict for easy # access. dependency_nodes = {} - for target, spec in targets.iteritems(): + for target, spec in targets.items(): if target not in dependency_nodes: dependency_nodes[target] = DependencyGraphNode(target) # Set up the dependency links. Targets that have no dependencies are treated # as dependent on root_node. root_node = DependencyGraphNode(None) - for target, spec in targets.iteritems(): + for target, spec in targets.items(): target_node = dependency_nodes[target] target_build_file = gyp.common.BuildFile(target) dependencies = spec.get('dependencies') @@ -1845,20 +1845,20 @@ def VerifyNoGYPFileCircularDependencies(targets): # Create a DependencyGraphNode for each gyp file containing a target. Put # it into a dict for easy access. dependency_nodes = {} - for target in targets.iterkeys(): + for target in targets: build_file = gyp.common.BuildFile(target) if not build_file in dependency_nodes: dependency_nodes[build_file] = DependencyGraphNode(build_file) # Set up the dependency links. - for target, spec in targets.iteritems(): + for target, spec in targets.items(): build_file = gyp.common.BuildFile(target) build_file_node = dependency_nodes[build_file] target_dependencies = spec.get('dependencies', []) for dependency in target_dependencies: try: dependency_build_file = gyp.common.BuildFile(dependency) - except GypError, e: + except GypError as e: gyp.common.ExceptionAppend( e, 'while computing dependencies of .gyp file %s' % build_file) raise @@ -1868,7 +1868,7 @@ def VerifyNoGYPFileCircularDependencies(targets): continue dependency_node = dependency_nodes.get(dependency_build_file) if not dependency_node: - raise GypError("Dependancy '%s' not found" % dependency_build_file) + raise GypError("Dependency '%s' not found" % dependency_build_file) if dependency_node not in build_file_node.dependencies: build_file_node.dependencies.append(dependency_node) dependency_node.dependents.append(build_file_node) @@ -1876,7 +1876,7 @@ def VerifyNoGYPFileCircularDependencies(targets): # Files that have no dependencies are treated as dependent on root_node. root_node = DependencyGraphNode(None) - for build_file_node in dependency_nodes.itervalues(): + for build_file_node in dependency_nodes.values(): if len(build_file_node.dependencies) == 0: build_file_node.dependencies.append(root_node) root_node.dependents.append(build_file_node) @@ -2116,7 +2116,7 @@ def is_in_set_or_list(x, s, l): def MergeDicts(to, fro, to_file, fro_file): # I wanted to name the parameter "from" but it's a Python keyword... - for k, v in fro.iteritems(): + for k, v in fro.items(): # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give # copy semantics. Something else may want to merge from the |fro| dict # later, and having the same dict ref pointed to twice in the tree isn't @@ -2251,13 +2251,13 @@ def SetUpConfigurations(target, target_dict): if not 'configurations' in target_dict: target_dict['configurations'] = {'Default': {}} if not 'default_configuration' in target_dict: - concrete = [i for (i, config) in target_dict['configurations'].iteritems() + concrete = [i for (i, config) in target_dict['configurations'].items() if not config.get('abstract')] target_dict['default_configuration'] = sorted(concrete)[0] merged_configurations = {} configs = target_dict['configurations'] - for (configuration, old_configuration_dict) in configs.iteritems(): + for (configuration, old_configuration_dict) in configs.items(): # Skip abstract configurations (saves work only). if old_configuration_dict.get('abstract'): continue @@ -2265,7 +2265,7 @@ def SetUpConfigurations(target, target_dict): # Get the inheritance relationship right by making a copy of the target # dict. new_configuration_dict = {} - for (key, target_val) in target_dict.iteritems(): + for (key, target_val) in target_dict.items(): key_ext = key[-1:] if key_ext in key_suffixes: key_base = key[:-1] @@ -2349,7 +2349,7 @@ def ProcessListFiltersInDict(name, the_dict): lists = [] del_lists = [] - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): operation = key[-1] if operation != '!' and operation != '/': continue @@ -2397,7 +2397,7 @@ def ProcessListFiltersInDict(name, the_dict): exclude_key = list_key + '!' if exclude_key in the_dict: for exclude_item in the_dict[exclude_key]: - for index in xrange(0, len(the_list)): + for index in range(0, len(the_list)): if exclude_item == the_list[index]: # This item matches the exclude_item, so set its action to 0 # (exclude). @@ -2423,7 +2423,7 @@ def ProcessListFiltersInDict(name, the_dict): raise ValueError('Unrecognized action ' + action + ' in ' + name + \ ' key ' + regex_key) - for index in xrange(0, len(the_list)): + for index in range(0, len(the_list)): list_item = the_list[index] if list_actions[index] == action_value: # Even if the regex matches, nothing will change so continue (regex @@ -2454,7 +2454,7 @@ def ProcessListFiltersInDict(name, the_dict): # the indices of items that haven't been seen yet don't shift. That means # that things need to be prepended to excluded_list to maintain them in the # same order that they existed in the_list. - for index in xrange(len(list_actions) - 1, -1, -1): + for index in range(len(list_actions) - 1, -1, -1): if list_actions[index] == 0: # Dump anything with action 0 (exclude). Keep anything with action 1 # (include) or -1 (no include or exclude seen for the item). @@ -2467,7 +2467,7 @@ def ProcessListFiltersInDict(name, the_dict): the_dict[excluded_key] = excluded_list # Now recurse into subdicts and lists that may contain dicts. - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): if type(value) is dict: ProcessListFiltersInDict(key, value) elif type(value) is list: @@ -2524,13 +2524,13 @@ def ValidateSourcesInTarget(target, target_dict, build_file, basenames.setdefault(basename, []).append(source) error = '' - for basename, files in basenames.iteritems(): + for basename, files in basenames.items(): if len(files) > 1: error += ' %s: %s\n' % (basename, ' '.join(files)) if error: - print('static library %s has several files with the same basename:\n' % - target + error + 'libtool on Mac cannot handle that. Use ' + print('static library %s has several files with the same basename:\n' % target + + error + 'libtool on Mac cannot handle that. Use ' '--no-duplicate-basename-check to disable this validation.') raise GypError('Duplicate basenames in sources section, see list above') @@ -2644,7 +2644,7 @@ def ValidateActionsInTarget(target, target_dict, build_file): def TurnIntIntoStrInDict(the_dict): """Given dict the_dict, recursively converts all integers into strings. """ - # Use items instead of iteritems because there's no need to try to look at + # Use items instead of items because there's no need to try to look at # reinserted keys and their associated values. for k, v in the_dict.items(): if type(v) is int: @@ -2663,7 +2663,7 @@ def TurnIntIntoStrInDict(the_dict): def TurnIntIntoStrInList(the_list): """Given list the_list, recursively converts all integers into strings. """ - for index in xrange(0, len(the_list)): + for index in range(0, len(the_list)): item = the_list[index] if type(item) is int: the_list[index] = str(item) @@ -2781,7 +2781,7 @@ def Load(build_files, variables, includes, depth, generator_input_info, check, try: LoadTargetBuildFile(build_file, data, aux_data, variables, includes, depth, check, True) - except Exception, e: + except Exception as e: gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) raise @@ -2803,7 +2803,7 @@ def Load(build_files, variables, includes, depth, generator_input_info, check, RemoveLinkDependenciesFromNoneTargets(targets) # Apply exclude (!) and regex (/) list filters only for dependency_sections. - for target_name, target_dict in targets.iteritems(): + for target_name, target_dict in targets.items(): tmp_dict = {} for key_base in dependency_sections: for op in ('', '!', '/'): diff --git a/node_modules/node-gyp/gyp/pylib/gyp/input_test.py b/node_modules/node-gyp/gyp/pylib/gyp/input_test.py index 4234fbb8302eb..1bc5e3d3081ea 100755 --- a/node_modules/node-gyp/gyp/pylib/gyp/input_test.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/input_test.py @@ -22,38 +22,38 @@ def _create_dependency(self, dependent, dependency): dependency.dependents.append(dependent) def test_no_cycle_empty_graph(self): - for label, node in self.nodes.iteritems(): - self.assertEquals([], node.FindCycles()) + for label, node in self.nodes.items(): + self.assertEqual([], node.FindCycles()) def test_no_cycle_line(self): self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['b'], self.nodes['c']) self._create_dependency(self.nodes['c'], self.nodes['d']) - for label, node in self.nodes.iteritems(): - self.assertEquals([], node.FindCycles()) + for label, node in self.nodes.items(): + self.assertEqual([], node.FindCycles()) def test_no_cycle_dag(self): self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['a'], self.nodes['c']) self._create_dependency(self.nodes['b'], self.nodes['c']) - for label, node in self.nodes.iteritems(): - self.assertEquals([], node.FindCycles()) + for label, node in self.nodes.items(): + self.assertEqual([], node.FindCycles()) def test_cycle_self_reference(self): self._create_dependency(self.nodes['a'], self.nodes['a']) - self.assertEquals([[self.nodes['a'], self.nodes['a']]], + self.assertEqual([[self.nodes['a'], self.nodes['a']]], self.nodes['a'].FindCycles()) def test_cycle_two_nodes(self): self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['b'], self.nodes['a']) - self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]], + self.assertEqual([[self.nodes['a'], self.nodes['b'], self.nodes['a']]], self.nodes['a'].FindCycles()) - self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]], + self.assertEqual([[self.nodes['b'], self.nodes['a'], self.nodes['b']]], self.nodes['b'].FindCycles()) def test_two_cycles(self): @@ -68,7 +68,7 @@ def test_two_cycles(self): [self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles) self.assertTrue( [self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles) - self.assertEquals(2, len(cycles)) + self.assertEqual(2, len(cycles)) def test_big_cycle(self): self._create_dependency(self.nodes['a'], self.nodes['b']) @@ -77,7 +77,7 @@ def test_big_cycle(self): self._create_dependency(self.nodes['d'], self.nodes['e']) self._create_dependency(self.nodes['e'], self.nodes['a']) - self.assertEquals([[self.nodes['a'], + self.assertEqual([[self.nodes['a'], self.nodes['b'], self.nodes['c'], self.nodes['d'], diff --git a/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py b/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py index eeeaceb0c7aa2..222befb982a9d 100755 --- a/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py @@ -8,6 +8,8 @@ These functions are executed via gyp-mac-tool when using the Makefile generator. """ +from __future__ import print_function + import fcntl import fnmatch import glob @@ -21,6 +23,8 @@ import sys import tempfile +PY3 = bytes != str + def main(args): executor = MacTool() @@ -125,7 +129,7 @@ def _DetectInputEncoding(self, file_name): fp = open(file_name, 'rb') try: header = fp.read(3) - except e: + except Exception: fp.close() return None fp.close() @@ -241,9 +245,11 @@ def ExecFilterLibtool(self, *cmd_list): env['ZERO_AR_DATE'] = '1' libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) _, err = libtoolout.communicate() + if PY3: + err = err.decode('utf-8') for line in err.splitlines(): if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line + print(line, file=sys.stderr) # Unconditionally touch the output .a file on the command line if present # and the command succeeded. A bit hacky. if not libtoolout.returncode: @@ -324,7 +330,7 @@ def ExecCompileXcassets(self, keys, *inputs): ]) if keys: keys = json.loads(keys) - for key, value in keys.iteritems(): + for key, value in keys.items(): arg_name = '--' + key if isinstance(value, bool): if value: @@ -440,8 +446,7 @@ def _FindProvisioningProfile(self, profile, bundle_identifier): profiles_dir = os.path.join( os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) + print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr) sys.exit(1) provisioning_profiles = None if profile: @@ -462,8 +467,7 @@ def _FindProvisioningProfile(self, profile, bundle_identifier): valid_provisioning_profiles[app_id_pattern] = ( profile_path, profile_data, team_identifier) if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) + print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr) sys.exit(1) # If the user has multiple provisioning profiles installed that can be # used for ${bundle_identifier}, pick the most specific one (ie. the @@ -487,7 +491,7 @@ def _LoadProvisioningProfile(self, profile_path): def _MergePlist(self, merged_plist, plist): """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): + for key, value in plist.items(): if isinstance(value, dict): merged_value = merged_plist.get(key, {}) if isinstance(merged_value, dict): @@ -597,7 +601,7 @@ def _ExpandVariables(self, data, substitutions): the key was not found. """ if isinstance(data, str): - for key, value in substitutions.iteritems(): + for key, value in substitutions.items(): data = data.replace('$(%s)' % key, value) return data if isinstance(data, list): diff --git a/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py b/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py index ca67b122f0b9b..9f1f547b90775 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py @@ -16,6 +16,7 @@ import gyp.MSVSUtil import gyp.MSVSVersion +PY3 = bytes != str windows_quoter_regex = re.compile(r'(\\*)"') @@ -126,7 +127,10 @@ def _FindDirectXInstallation(): # Setup params to pass to and attempt to launch reg.exe. cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s'] p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - for line in p.communicate()[0].splitlines(): + stdout = p.communicate()[0] + if PY3: + stdout = stdout.decode('utf-8') + for line in stdout.splitlines(): if 'InstallPath' in line: dxsdk_dir = line.split(' ')[3] + "\\" @@ -205,7 +209,7 @@ def __init__(self, spec, generator_flags): configs = spec['configurations'] for field, default in supported_fields: setattr(self, field, {}) - for configname, config in configs.iteritems(): + for configname, config in configs.items(): getattr(self, field)[configname] = config.get(field, default()) self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.']) @@ -237,7 +241,11 @@ def GetExtension(self): def GetVSMacroEnv(self, base_to_build=None, config=None): """Get a dict of variables mapping internal VS macro names to their gyp equivalents.""" - target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64' + target_arch = self.GetArch(config) + if target_arch == 'x86': + target_platform = 'Win32' + else: + target_platform = target_arch target_name = self.spec.get('product_prefix', '') + \ self.spec.get('product_name', self.spec['target_name']) target_dir = base_to_build + '\\' if base_to_build else '' @@ -299,7 +307,7 @@ def GetArch(self, config): if not platform: # If no specific override, use the configuration's. platform = configuration_platform # Map from platform to architecture. - return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86') + return {'Win32': 'x86', 'x64': 'x64', 'ARM64': 'arm64'}.get(platform, 'x86') def _TargetConfig(self, config): """Returns the target-specific configuration.""" @@ -371,7 +379,7 @@ def GetCompilerPdbName(self, config, expand_special): return pdbname def GetMapFileName(self, config, expand_special): - """Gets the explicitly overriden map file name for a target or returns None + """Gets the explicitly overridden map file name for a target or returns None if it's not set.""" config = self._TargetConfig(config) map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config) @@ -563,7 +571,10 @@ def GetLdflags(self, config, gyp_to_build_path, expand_special, 'VCLinkerTool', append=ldflags) self._GetDefFileAsLdflags(ldflags, gyp_to_build_path) ld('GenerateDebugInformation', map={'true': '/DEBUG'}) - ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'}, + # TODO: These 'map' values come from machineTypeOption enum, + # and does not have an official value for ARM64 in VS2017 (yet). + # It needs to verify the ARM64 value when machineTypeOption is updated. + ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM', '18': 'ARM64'}, prefix='/MACHINE:') ldflags.extend(self._GetAdditionalLibraryDirectories( 'VCLinkerTool', config, gyp_to_build_path)) @@ -860,7 +871,9 @@ def midl(name, default=None): ('iid', iid), ('proxy', proxy)] # TODO(scottmg): Are there configuration settings to set these flags? - target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64' + target_platform = self.GetArch(config) + if target_platform == 'x86': + target_platform = 'win32' flags = ['/char', 'signed', '/env', target_platform, '/Oicf'] return outdir, output, variables, flags @@ -941,7 +954,7 @@ def ExpandMacros(string, expansions): """Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv for the canonical way to retrieve a suitable dict.""" if '$' in string: - for old, new in expansions.iteritems(): + for old, new in expansions.items(): assert '$(' not in new, new string = string.replace(old, new) return string @@ -985,7 +998,7 @@ def _FormatAsEnvironmentBlock(envvar_dict): CreateProcess documentation for more details.""" block = '' nul = '\0' - for key, value in envvar_dict.iteritems(): + for key, value in envvar_dict.items(): block += key + '=' + value + nul block += nul return block @@ -1029,6 +1042,8 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, popen = subprocess.Popen( args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) variables, _ = popen.communicate() + if PY3: + variables = variables.decode('utf-8') env = _ExtractImportantEnvironment(variables) # Inject system includes from gyp files into INCLUDE. @@ -1048,6 +1063,8 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, 'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i')) popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE) output, _ = popen.communicate() + if PY3: + output = output.decode('utf-8') cl_paths[arch] = _ExtractCLPath(output) return cl_paths diff --git a/node_modules/node-gyp/gyp/pylib/gyp/ordered_dict.py b/node_modules/node-gyp/gyp/pylib/gyp/ordered_dict.py deleted file mode 100644 index a1e89f9199087..0000000000000 --- a/node_modules/node-gyp/gyp/pylib/gyp/ordered_dict.py +++ /dev/null @@ -1,289 +0,0 @@ -# Unmodified from http://code.activestate.com/recipes/576693/ -# other than to add MIT license header (as specified on page, but not in code). -# Linked from Python documentation here: -# http://docs.python.org/2/library/collections.html#collections.OrderedDict -# -# This should be deleted once Py2.7 is available on all bots, see -# http://crbug.com/241769. -# -# Copyright (c) 2009 Raymond Hettinger. -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. -# Passes Python2.7's test suite and incorporates all the latest updates. - -try: - from thread import get_ident as _get_ident -except ImportError: - from dummy_thread import get_ident as _get_ident - -try: - from _abcoll import KeysView, ValuesView, ItemsView -except ImportError: - pass - - -class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as for regular dictionaries. - - # The internal self.__map dictionary maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # Each link is stored as a list of length three: [PREV, NEXT, KEY]. - - def __init__(self, *args, **kwds): - '''Initialize an ordered dictionary. Signature is the same as for - regular dictionaries, but keyword arguments are not recommended - because their insertion order is arbitrary. - - ''' - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__root = root = [] # sentinel node - root[:] = [root, root, None] - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, dict_setitem=dict.__setitem__): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link which goes at the end of the linked - # list, and the inherited dictionary is updated with the new key/value pair. - if key not in self: - root = self.__root - last = root[0] - last[1] = root[0] = self.__map[key] = [last, root, key] - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which is - # then removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link_prev, link_next, key = self.__map.pop(key) - link_prev[1] = link_next - link_next[0] = link_prev - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - root = self.__root - curr = root[1] - while curr is not root: - yield curr[2] - curr = curr[1] - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - root = self.__root - curr = root[0] - while curr is not root: - yield curr[2] - curr = curr[0] - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - try: - for node in self.__map.itervalues(): - del node[:] - root = self.__root - root[:] = [root, root, None] - self.__map.clear() - except AttributeError: - pass - dict.clear(self) - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root[0] - link_prev = link[0] - link_prev[1] = root - root[0] = link_prev - else: - link = root[1] - link_next = link[1] - root[1] = link_next - link_next[0] = root - key = link[2] - del self.__map[key] - value = dict.pop(self, key) - return key, value - - # -- the following methods do not depend on the internal structure -- - - def keys(self): - 'od.keys() -> list of keys in od' - return list(self) - - def values(self): - 'od.values() -> list of values in od' - return [self[key] for key in self] - - def items(self): - 'od.items() -> list of (key, value) pairs in od' - return [(key, self[key]) for key in self] - - def iterkeys(self): - 'od.iterkeys() -> an iterator over the keys in od' - return iter(self) - - def itervalues(self): - 'od.itervalues -> an iterator over the values in od' - for k in self: - yield self[k] - - def iteritems(self): - 'od.iteritems -> an iterator over the (key, value) items in od' - for k in self: - yield (k, self[k]) - - # Suppress 'OrderedDict.update: Method has no argument': - # pylint: disable=E0211 - def update(*args, **kwds): - '''od.update(E, **F) -> None. Update od from dict/iterable E and F. - - If E is a dict instance, does: for k in E: od[k] = E[k] - If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] - Or if E is an iterable of items, does: for k, v in E: od[k] = v - In either case, this is followed by: for k, v in F.items(): od[k] = v - - ''' - if len(args) > 2: - raise TypeError('update() takes at most 2 positional ' - 'arguments (%d given)' % (len(args),)) - elif not args: - raise TypeError('update() takes at least 1 argument (0 given)') - self = args[0] - # Make progressively weaker assumptions about "other" - other = () - if len(args) == 2: - other = args[1] - if isinstance(other, dict): - for key in other: - self[key] = other[key] - elif hasattr(other, 'keys'): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - __update = update # let subclasses override update without breaking __init__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - def __repr__(self, _repr_running={}): - 'od.__repr__() <==> repr(od)' - call_key = id(self), _get_ident() - if call_key in _repr_running: - return '...' - _repr_running[call_key] = 1 - try: - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, self.items()) - finally: - del _repr_running[call_key] - - def __reduce__(self): - 'Return state information for pickling' - items = [[k, self[k]] for k in self] - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - if inst_dict: - return (self.__class__, (items,), inst_dict) - return self.__class__, (items,) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S - and values equal to v (which defaults to None). - - ''' - d = cls() - for key in iterable: - d[key] = value - return d - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return len(self)==len(other) and self.items() == other.items() - return dict.__eq__(self, other) - - def __ne__(self, other): - return not self == other - - # -- the following methods are only used in Python 2.7 -- - - def viewkeys(self): - "od.viewkeys() -> a set-like object providing a view on od's keys" - return KeysView(self) - - def viewvalues(self): - "od.viewvalues() -> an object providing a view on od's values" - return ValuesView(self) - - def viewitems(self): - "od.viewitems() -> a set-like object providing a view on od's items" - return ItemsView(self) - diff --git a/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py b/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py index 74c98c5a79594..94a6f17dab5d1 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py @@ -28,8 +28,12 @@ def deepcopy(x): def _deepcopy_atomic(x): return x -for x in (type(None), int, long, float, - bool, str, unicode, type): +try: + types = bool, float, int, str, type, type(None), long, unicode +except NameError: # Python 3 + types = bool, float, int, str, type, type(None) + +for x in types: d[x] = _deepcopy_atomic def _deepcopy_list(x): @@ -38,7 +42,7 @@ def _deepcopy_list(x): def _deepcopy_dict(x): y = {} - for key, value in x.iteritems(): + for key, value in x.items(): y[deepcopy(key)] = deepcopy(value) return y d[dict] = _deepcopy_dict diff --git a/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py b/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py index bb6f1ea436f25..0a6daf203931e 100755 --- a/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py @@ -9,6 +9,8 @@ These functions are executed via gyp-win-tool when using the ninja generator. """ +from __future__ import print_function + import os import re import shutil @@ -18,6 +20,7 @@ import sys BASE_DIR = os.path.dirname(os.path.abspath(__file__)) +PY3 = bytes != str # A regex matching an argument corresponding to the output filename passed to # link.exe. @@ -122,11 +125,13 @@ def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args): stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, _ = link.communicate() + if PY3: + out = out.decode('utf-8') for line in out.splitlines(): if (not line.startswith(' Creating library ') and not line.startswith('Generating code') and not line.startswith('Finished generating code')): - print line + print(line) return link.returncode def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname, @@ -213,9 +218,11 @@ def ExecManifestWrapper(self, arch, *args): popen = subprocess.Popen(args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, _ = popen.communicate() + if PY3: + out = out.decode('utf-8') for line in out.splitlines(): if line and 'manifest authoring warning 81010002' not in line: - print line + print(line) return popen.returncode def ExecManifestToRc(self, arch, *args): @@ -245,6 +252,8 @@ def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, popen = subprocess.Popen(args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, _ = popen.communicate() + if PY3: + out = out.decode('utf-8') # Filter junk out of stdout, and write filtered versions. Output we want # to filter is pairs of lines that look like this: # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl @@ -255,7 +264,7 @@ def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, for x in lines if x.startswith(prefixes)) for line in lines: if not line.startswith(prefixes) and line not in processing: - print line + print(line) return popen.returncode def ExecAsmWrapper(self, arch, *args): @@ -264,12 +273,14 @@ def ExecAsmWrapper(self, arch, *args): popen = subprocess.Popen(args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, _ = popen.communicate() + if PY3: + out = out.decode('utf-8') for line in out.splitlines(): if (not line.startswith('Copyright (C) Microsoft Corporation') and not line.startswith('Microsoft (R) Macro Assembler') and not line.startswith(' Assembling: ') and line): - print line + print(line) return popen.returncode def ExecRcWrapper(self, arch, *args): @@ -279,11 +290,13 @@ def ExecRcWrapper(self, arch, *args): popen = subprocess.Popen(args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, _ = popen.communicate() + if PY3: + out = out.decode('utf-8') for line in out.splitlines(): if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and not line.startswith('Copyright (C) Microsoft Corporation') and line): - print line + print(line) return popen.returncode def ExecActionWrapper(self, arch, rspfile, *dir): @@ -292,7 +305,7 @@ def ExecActionWrapper(self, arch, rspfile, *dir): env = self._GetEnv(arch) # TODO(scottmg): This is a temporary hack to get some specific variables # through to actions that are set after gyp-time. http://crbug.com/333738. - for k, v in os.environ.iteritems(): + for k, v in os.environ.items(): if k not in env: env[k] = v args = open(rspfile).read() diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py b/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py index 69f7d97cfa69a..60171649900aa 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py @@ -7,6 +7,8 @@ other build systems, such as make and ninja. """ +from __future__ import print_function + import copy import gyp.common import os @@ -18,6 +20,8 @@ import tempfile from gyp.common import GypError +PY3 = bytes != str + # Populated lazily by XcodeVersion, for efficiency, and to fix an issue when # "xcodebuild" is called too quickly (it has been found to return incorrect # version number). @@ -73,7 +77,7 @@ def _ExpandArchs(self, archs, sdkroot): if arch not in expanded_archs: expanded_archs.append(arch) except KeyError as e: - print 'Warning: Ignoring unsupported variable "%s".' % variable + print('Warning: Ignoring unsupported variable "%s".' % variable) elif arch not in expanded_archs: expanded_archs.append(arch) return expanded_archs @@ -168,7 +172,7 @@ def __init__(self, spec): # the same for all configs are implicitly per-target settings. self.xcode_settings = {} configs = spec['configurations'] - for configname, config in configs.iteritems(): + for configname, config in configs.items(): self.xcode_settings[configname] = config.get('xcode_settings', {}) self._ConvertConditionalKeys(configname) if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET', @@ -194,8 +198,8 @@ def _ConvertConditionalKeys(self, configname): new_key = key.split("[")[0] settings[new_key] = settings[key] else: - print 'Warning: Conditional keys not implemented, ignoring:', \ - ' '.join(conditional_keys) + print('Warning: Conditional keys not implemented, ignoring:', \ + ' '.join(conditional_keys)) del settings[key] def _Settings(self): @@ -213,7 +217,7 @@ def _Appendf(self, lst, test_key, format_str, default=None): def _WarnUnimplemented(self, test_key): if test_key in self._Settings(): - print 'Warning: Ignoring not yet implemented key "%s".' % test_key + print('Warning: Ignoring not yet implemented key "%s".' % test_key) def IsBinaryOutputFormat(self, configname): default = "binary" if self.isIOS else "xml" @@ -433,7 +437,7 @@ def _GetSdkVersionInfoItem(self, sdk, infoitem): # most sensible route and should still do the right thing. try: return GetStdoutQuiet(['xcodebuild', '-version', '-sdk', sdk, infoitem]) - except: + except GypError: pass def _SdkRoot(self, configname): @@ -776,7 +780,7 @@ def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None): product_dir: The directory where products such static and dynamic libraries are placed. This is added to the library search path. gyp_to_build_path: A function that converts paths relative to the - current gyp file to paths relative to the build direcotry. + current gyp file to paths relative to the build directory. """ self.configname = configname ldflags = [] @@ -842,7 +846,7 @@ def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None): if self._IsXCTest(): platform_root = self._XcodePlatformPath(configname) if platform_root: - cflags.append('-F' + platform_root + '/Developer/Library/Frameworks/') + ldflags.append('-F' + platform_root + '/Developer/Library/Frameworks/') is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension() if sdk_root and is_extension: @@ -851,7 +855,8 @@ def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None): # These flags reflect the compilation options used by xcode to compile # extensions. ldflags.append('-lpkstart') - if XcodeVersion() < '0900': + xcode_version, _ = XcodeVersion() + if xcode_version < '0900': ldflags.append(sdk_root + '/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit') ldflags.append('-fapplication-extension') @@ -889,7 +894,7 @@ def GetPerTargetSettings(self): result = dict(self.xcode_settings[configname]) first_pass = False else: - for key, value in self.xcode_settings[configname].iteritems(): + for key, value in self.xcode_settings[configname].items(): if key not in result: continue elif result[key] != value: @@ -921,7 +926,7 @@ def GetPerTargetSetting(self, setting, default=None): def _GetStripPostbuilds(self, configname, output_binary, quiet): """Returns a list of shell commands that contain the shell commands - neccessary to strip this target's binary. These should be run as postbuilds + necessary to strip this target's binary. These should be run as postbuilds before the actual postbuilds run.""" self.configname = configname @@ -955,7 +960,7 @@ def _GetStripPostbuilds(self, configname, output_binary, quiet): def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet): """Returns a list of shell commands that contain the shell commands - neccessary to massage this target's debug information. These should be run + necessary to massage this target's debug information. These should be run as postbuilds before the actual postbuilds run.""" self.configname = configname @@ -997,8 +1002,8 @@ def _GetIOSPostbuilds(self, configname, output_binary): unimpl = ['OTHER_CODE_SIGN_FLAGS'] unimpl = set(unimpl) & set(self.xcode_settings[configname].keys()) if unimpl: - print 'Warning: Some codesign keys not implemented, ignoring: %s' % ( - ', '.join(sorted(unimpl))) + print('Warning: Some codesign keys not implemented, ignoring: %s' % ( + ', '.join(sorted(unimpl)))) return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % ( os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key, @@ -1050,7 +1055,7 @@ def _AdjustLibrary(self, library, config_name=None): # "/usr/lib" libraries, is do "-L/usr/lib -lname" which is dependent on the # library order and cause collision when building Chrome. # - # Instead substitude ".tbd" to ".dylib" in the generated project when the + # Instead substitute ".tbd" to ".dylib" in the generated project when the # following conditions are both true: # - library is referenced in the gyp file as "$(SDKROOT)/**/*.dylib", # - the ".dylib" file does not exists but a ".tbd" file do. @@ -1084,15 +1089,15 @@ def GetExtraPlistItems(self, configname=None): cache = {} cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild() - xcode, xcode_build = XcodeVersion() - cache['DTXcode'] = xcode + xcode_version, xcode_build = XcodeVersion() + cache['DTXcode'] = xcode_version cache['DTXcodeBuild'] = xcode_build sdk_root = self._SdkRoot(configname) if not sdk_root: sdk_root = self._DefaultSdkRoot() cache['DTSDKName'] = sdk_root - if xcode >= '0430': + if xcode_version >= '0430': cache['DTSDKBuild'] = self._GetSdkVersionInfoItem( sdk_root, 'ProductBuildVersion') else: @@ -1122,7 +1127,7 @@ def _DefaultSdkRoot(self): project, then the environment variable was empty. Starting with this version, Xcode uses the name of the newest SDK installed. """ - xcode_version, xcode_build = XcodeVersion() + xcode_version, _ = XcodeVersion() if xcode_version < '0500': return '' default_sdk_path = self._XcodeSdkPath('') @@ -1131,7 +1136,7 @@ def _DefaultSdkRoot(self): return default_sdk_root try: all_sdks = GetStdout(['xcodebuild', '-showsdks']) - except: + except GypError: # If xcodebuild fails, there will be no valid SDKs return '' for line in all_sdks.splitlines(): @@ -1259,10 +1264,12 @@ def XcodeVersion(): # Xcode 3.2.6 # Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0 # BuildVersion: 10M2518 - # Convert that to '0463', '4H1503'. + # Convert that to ('0463', '4H1503') or ('0326', '10M2518'). global XCODE_VERSION_CACHE if XCODE_VERSION_CACHE: return XCODE_VERSION_CACHE + version = "" + build = "" try: version_list = GetStdoutQuiet(['xcodebuild', '-version']).splitlines() # In some circumstances xcodebuild exits 0 but doesn't return @@ -1272,21 +1279,16 @@ def XcodeVersion(): # checking that version. if len(version_list) < 2: raise GypError("xcodebuild returned unexpected results") - except: - version = CLTVersion() - if version: - version = re.match(r'(\d+\.\d+\.?\d*)', version).groups()[0] - else: + version = version_list[0].split()[-1] # Last word on first line + build = version_list[-1].split()[-1] # Last word on last line + except GypError: # Xcode not installed so look for XCode Command Line Tools + version = CLTVersion() # macOS Catalina returns 11.0.0.0.1.1567737322 + if not version: raise GypError("No Xcode or CLT version detected!") - # The CLT has no build information, so we return an empty string. - version_list = [version, ''] - version = version_list[0] - build = version_list[-1] - # Be careful to convert "4.2" to "0420": - version = version.split()[-1].replace('.', '') - version = (version + '0' * (3 - len(version))).zfill(4) - if build: - build = build.split()[-1] + # Be careful to convert "4.2.3" to "0423" and "11.0.0" to "1100": + version = version.split(".")[:3] # Just major, minor, micro + version[0] = version[0].zfill(2) # Add a leading zero if major is one digit + version = ("".join(version) + "00")[:4] # Limit to exactly four characters XCODE_VERSION_CACHE = (version, build) return XCODE_VERSION_CACHE @@ -1310,7 +1312,7 @@ def CLTVersion(): try: output = GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key]) return re.search(regex, output).groupdict()['version'] - except: + except GypError: continue @@ -1320,6 +1322,8 @@ def GetStdoutQuiet(cmdlist): Raises |GypError| if the command return with a non-zero return code.""" job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out = job.communicate()[0] + if PY3: + out = out.decode("utf-8") if job.returncode != 0: raise GypError('Error %d running %s' % (job.returncode, cmdlist[0])) return out.rstrip('\n') @@ -1330,6 +1334,8 @@ def GetStdout(cmdlist): Raises |GypError| if the command return with a non-zero return code.""" job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE) out = job.communicate()[0] + if PY3: + out = out.decode("utf-8") if job.returncode != 0: sys.stderr.write(out + '\n') raise GypError('Error %d running %s' % (job.returncode, cmdlist[0])) @@ -1339,7 +1345,7 @@ def GetStdout(cmdlist): def MergeGlobalXcodeSettingsToSpec(global_dict, spec): """Merges the global xcode_settings dictionary into each configuration of the target represented by spec. For keys that are both in the global and the local - xcode_settings dict, the local key gets precendence. + xcode_settings dict, the local key gets precedence. """ # The xcode generator special-cases global xcode_settings and does something # that amounts to merging in the global xcode_settings into each local @@ -1382,7 +1388,7 @@ def GetMacBundleResources(product_dir, xcode_settings, resources): output = dest # The make generator doesn't support it, so forbid it everywhere - # to keep the generators more interchangable. + # to keep the generators more interchangeable. assert ' ' not in res, ( "Spaces in resource filenames not supported (%s)" % res) @@ -1424,14 +1430,14 @@ def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path): relative to the build directory. xcode_settings: The XcodeSettings of the current target. gyp_to_build_path: A function that converts paths relative to the - current gyp file to paths relative to the build direcotry. + current gyp file to paths relative to the build directory. """ info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE') if not info_plist: return None, None, [], {} # The make generator doesn't support it, so forbid it everywhere - # to keep the generators more interchangable. + # to keep the generators more interchangeable. assert ' ' not in info_plist, ( "Spaces in Info.plist filenames not supported (%s)" % info_plist) @@ -1516,7 +1522,8 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, install_name_base = xcode_settings.GetInstallNameBase() if install_name_base: env['DYLIB_INSTALL_NAME_BASE'] = install_name_base - if XcodeVersion() >= '0500' and not env.get('SDKROOT'): + xcode_version, _ = XcodeVersion() + if xcode_version >= '0500' and not env.get('SDKROOT'): sdk_root = xcode_settings._SdkRoot(configuration) if not sdk_root: sdk_root = xcode_settings._XcodeSdkPath('') @@ -1597,7 +1604,7 @@ def GetEdges(node): order = gyp.common.TopologicallySorted(env.keys(), GetEdges) order.reverse() return order - except gyp.common.CycleError, e: + except gyp.common.CycleError as e: raise GypError( 'Xcode environment variables are cyclically dependent: ' + str(e.nodes)) @@ -1634,10 +1641,10 @@ def _HasIOSTarget(targets): def _AddIOSDeviceConfigurations(targets): """Clone all targets and append -iphoneos to the name. Configure these targets to build for iOS devices and use correct architectures for those builds.""" - for target_dict in targets.itervalues(): + for target_dict in targets.values(): toolset = target_dict['toolset'] configs = target_dict['configurations'] - for config_name, config_dict in dict(configs).iteritems(): + for config_name, config_dict in dict(configs).items(): iphoneos_config_dict = copy.deepcopy(config_dict) configs[config_name + '-iphoneos'] = iphoneos_config_dict configs[config_name + '-iphonesimulator'] = config_dict diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py b/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py index 3820d6bf04817..d70eddc90a57d 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py @@ -28,7 +28,7 @@ def _WriteWorkspace(main_gyp, sources_gyp, params): workspace_path = os.path.join(options.generator_output, workspace_path) try: os.makedirs(workspace_path) - except OSError, e: + except OSError as e: if e.errno != errno.EEXIST: raise output_string = '\n' + \ @@ -85,7 +85,7 @@ def _TargetFromSpec(old_spec, params): "%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel if 'configurations' in old_spec: - for config in old_spec['configurations'].iterkeys(): + for config in old_spec['configurations']: old_xcode_settings = \ old_spec['configurations'][config].get('xcode_settings', {}) if 'IPHONEOS_DEPLOYMENT_TARGET' in old_xcode_settings: @@ -161,7 +161,7 @@ def CreateWrapper(target_list, target_dicts, data, params): params: Dict of global options for gyp. """ orig_gyp = params['build_files'][0] - for gyp_name, gyp_dict in data.iteritems(): + for gyp_name, gyp_dict in data.items(): if gyp_name == orig_gyp: depth = gyp_dict['_DEPTH'] @@ -228,7 +228,7 @@ def CreateWrapper(target_list, target_dicts, data, params): sources_target['configurations'] = {'Default': { 'include_dirs': [ depth ] } } sources = [] - for target, target_dict in target_dicts.iteritems(): + for target, target_dict in target_dicts.items(): base = os.path.dirname(target) files = target_dict.get('sources', []) + \ target_dict.get('mac_bundle_resources', []) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py b/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py index d08b7f777002f..93ffca7c90156 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py @@ -138,21 +138,18 @@ """ import gyp.common +import hashlib import posixpath import re import struct import sys -# hashlib is supplied as of Python 2.5 as the replacement interface for sha -# and other secure hashes. In 2.6, sha is deprecated. Import hashlib if -# available, avoiding a deprecation warning under 2.6. Import sha otherwise, -# preserving 2.4 compatibility. try: - import hashlib - _new_sha1 = hashlib.sha1 -except ImportError: - import sha - _new_sha1 = sha.new + basestring, cmp, unicode +except NameError: # Python 3 + basestring = unicode = str + def cmp(x, y): + return (x > y) - (x < y) # See XCObject._EncodeString. This pattern is used to determine when a string @@ -223,7 +220,7 @@ class XCObject(object): an empty string ("", in the case of property_type str) or list ([], in the case of is_list True) from being set for the property. - default: Optional. If is_requried is True, default may be set + default: Optional. If is_required is True, default may be set to provide a default value for objects that do not supply their own value. If is_required is True and default is not provided, users of the class must supply their own @@ -314,7 +311,7 @@ def Copy(self): """ that = self.__class__(id=self.id, parent=self.parent) - for key, value in self._properties.iteritems(): + for key, value in self._properties.items(): is_strong = self._schema[key][2] if isinstance(value, XCObject): @@ -324,8 +321,7 @@ def Copy(self): that._properties[key] = new_value else: that._properties[key] = value - elif isinstance(value, str) or isinstance(value, unicode) or \ - isinstance(value, int): + elif isinstance(value, (basestring, int)): that._properties[key] = value elif isinstance(value, list): if is_strong: @@ -422,7 +418,7 @@ def _HashUpdate(hash, data): hash.update(data) if seed_hash is None: - seed_hash = _new_sha1() + seed_hash = hashlib.sha1() hash = seed_hash.copy() @@ -452,7 +448,7 @@ def _HashUpdate(hash, data): digest_int_count = hash.digest_size / 4 digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest()) id_ints = [0, 0, 0] - for index in xrange(0, digest_int_count): + for index in range(0, digest_int_count): id_ints[index % 3] ^= digest_ints[index] self.id = '%08X%08X%08X' % tuple(id_ints) @@ -475,7 +471,7 @@ def Children(self): """Returns a list of all of this object's owned (strong) children.""" children = [] - for property, attributes in self._schema.iteritems(): + for property, attributes in self._schema.items(): (is_list, property_type, is_strong) = attributes[0:3] if is_strong and property in self._properties: if not is_list: @@ -622,7 +618,7 @@ def _XCPrintableValue(self, tabs, value, flatten_list=False): printable += end_tabs + ')' elif isinstance(value, dict): printable = '{' + sep - for item_key, item_value in sorted(value.iteritems()): + for item_key, item_value in sorted(value.items()): printable += element_tabs + \ self._XCPrintableValue(tabs + 1, item_key, flatten_list) + ' = ' + \ self._XCPrintableValue(tabs + 1, item_value, flatten_list) + ';' + \ @@ -691,7 +687,7 @@ def _XCKVPrint(self, file, tabs, key, value): printable_value[0] == '"' and printable_value[-1] == '"': printable_value = printable_value[1:-1] printable += printable_key + ' = ' + printable_value + ';' + after_kv - except TypeError, e: + except TypeError as e: gyp.common.ExceptionAppend(e, 'while printing key "%s"' % key) raise @@ -730,7 +726,7 @@ def Print(self, file=sys.stdout): self._XCKVPrint(file, 3, 'isa', self.__class__.__name__) # The remaining elements of an object dictionary are sorted alphabetically. - for property, value in sorted(self._properties.iteritems()): + for property, value in sorted(self._properties.items()): self._XCKVPrint(file, 3, property, value) # End the object. @@ -752,7 +748,7 @@ def UpdateProperties(self, properties, do_copy=False): if properties is None: return - for property, value in properties.iteritems(): + for property, value in properties.items(): # Make sure the property is in the schema. if not property in self._schema: raise KeyError(property + ' not in ' + self.__class__.__name__) @@ -788,8 +784,7 @@ def UpdateProperties(self, properties, do_copy=False): self._properties[property] = value.Copy() else: self._properties[property] = value - elif isinstance(value, str) or isinstance(value, unicode) or \ - isinstance(value, int): + elif isinstance(value, (basestring, int)): self._properties[property] = value elif isinstance(value, list): if is_strong: @@ -865,7 +860,7 @@ def VerifyHasRequiredProperties(self): # TODO(mark): A stronger verification mechanism is needed. Some # subclasses need to perform validation beyond what the schema can enforce. - for property, attributes in self._schema.iteritems(): + for property, attributes in self._schema.items(): (is_list, property_type, is_strong, is_required) = attributes[0:4] if is_required and not property in self._properties: raise KeyError(self.__class__.__name__ + ' requires ' + property) @@ -875,7 +870,7 @@ def _SetDefaultsFromSchema(self): overwrite properties that have already been set.""" defaults = {} - for property, attributes in self._schema.iteritems(): + for property, attributes in self._schema.items(): (is_list, property_type, is_strong, is_required) = attributes[0:4] if is_required and len(attributes) >= 5 and \ not property in self._properties: @@ -1426,7 +1421,7 @@ def PathHashables(self): xche = self while xche != None and isinstance(xche, XCHierarchicalElement): xche_hashables = xche.Hashables() - for index in xrange(0, len(xche_hashables)): + for index in range(0, len(xche_hashables)): hashables.insert(index, xche_hashables[index]) xche = xche.parent return hashables @@ -2401,7 +2396,7 @@ def HeadersPhase(self): # The headers phase should come before the resources, sources, and # frameworks phases, if any. insert_at = len(self._properties['buildPhases']) - for index in xrange(0, len(self._properties['buildPhases'])): + for index in range(0, len(self._properties['buildPhases'])): phase = self._properties['buildPhases'][index] if isinstance(phase, PBXResourcesBuildPhase) or \ isinstance(phase, PBXSourcesBuildPhase) or \ @@ -2422,7 +2417,7 @@ def ResourcesPhase(self): # The resources phase should come before the sources and frameworks # phases, if any. insert_at = len(self._properties['buildPhases']) - for index in xrange(0, len(self._properties['buildPhases'])): + for index in range(0, len(self._properties['buildPhases'])): phase = self._properties['buildPhases'][index] if isinstance(phase, PBXSourcesBuildPhase) or \ isinstance(phase, PBXFrameworksBuildPhase): @@ -2844,7 +2839,7 @@ def CompareProducts(x, y, remote_products): # determine the sort order. return cmp(x_index, y_index) - for other_pbxproject, ref_dict in self._other_pbxprojects.iteritems(): + for other_pbxproject, ref_dict in self._other_pbxprojects.items(): # Build up a list of products in the remote project file, ordered the # same as the targets that produce them. remote_products = [] @@ -2889,7 +2884,7 @@ def Print(self, file=sys.stdout): self._XCPrint(file, 0, '{ ') else: self._XCPrint(file, 0, '{\n') - for property, value in sorted(self._properties.iteritems(), + for property, value in sorted(self._properties.items(), cmp=lambda x, y: cmp(x, y)): if property == 'objects': self._PrintObjects(file) diff --git a/node_modules/node-gyp/gyp/tools/graphviz.py b/node_modules/node-gyp/gyp/tools/graphviz.py index 326ae221cf8e8..538b059da4a7e 100755 --- a/node_modules/node-gyp/gyp/tools/graphviz.py +++ b/node_modules/node-gyp/gyp/tools/graphviz.py @@ -8,6 +8,8 @@ generate input suitable for graphviz to render a dependency graph of targets.""" +from __future__ import print_function + import collections import json import sys @@ -50,9 +52,9 @@ def WriteGraph(edges): build_file, target_name, toolset = ParseTarget(src) files[build_file].append(src) - print 'digraph D {' - print ' fontsize=8' # Used by subgraphs. - print ' node [fontsize=8]' + print('digraph D {') + print(' fontsize=8') # Used by subgraphs. + print(' node [fontsize=8]') # Output nodes by file. We must first write out each node within # its file grouping before writing out any edges that may refer @@ -63,31 +65,31 @@ def WriteGraph(edges): # the display by making it a box without an internal node. target = targets[0] build_file, target_name, toolset = ParseTarget(target) - print ' "%s" [shape=box, label="%s\\n%s"]' % (target, filename, - target_name) + print(' "%s" [shape=box, label="%s\\n%s"]' % (target, filename, + target_name)) else: # Group multiple nodes together in a subgraph. - print ' subgraph "cluster_%s" {' % filename - print ' label = "%s"' % filename + print(' subgraph "cluster_%s" {' % filename) + print(' label = "%s"' % filename) for target in targets: build_file, target_name, toolset = ParseTarget(target) - print ' "%s" [label="%s"]' % (target, target_name) - print ' }' + print(' "%s" [label="%s"]' % (target, target_name)) + print(' }') # Now that we've placed all the nodes within subgraphs, output all # the edges between nodes. for src, dsts in edges.items(): for dst in dsts: - print ' "%s" -> "%s"' % (src, dst) + print(' "%s" -> "%s"' % (src, dst)) - print '}' + print('}') def main(): if len(sys.argv) < 2: - print >>sys.stderr, __doc__ - print >>sys.stderr - print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0]) + print(__doc__, file=sys.stderr) + print(file=sys.stderr) + print('usage: %s target1 target2...' % (sys.argv[0]), file=sys.stderr) return 1 edges = LoadEdges('dump.json', sys.argv[1:]) diff --git a/node_modules/node-gyp/gyp/tools/pretty_gyp.py b/node_modules/node-gyp/gyp/tools/pretty_gyp.py index c51d35872cce6..633048a59ad28 100755 --- a/node_modules/node-gyp/gyp/tools/pretty_gyp.py +++ b/node_modules/node-gyp/gyp/tools/pretty_gyp.py @@ -6,6 +6,8 @@ """Pretty-prints the contents of a GYP file.""" +from __future__ import print_function + import sys import re @@ -16,7 +18,7 @@ # Regex to remove quoted strings when we're counting braces. # It takes into account quoted quotes, and makes sure that the quotes match. # NOTE: It does not handle quotes that span more than one line, or -# cases where an escaped quote is preceeded by an escaped backslash. +# cases where an escaped quote is preceded by an escaped backslash. QUOTE_RE_STR = r'(?P[\'"])(.*?)(? 0: (brace_diff, after) = count_braces(line) if brace_diff != 0: if after: - print " " * (basic_offset * indent) + line + print(" " * (basic_offset * indent) + line) indent += brace_diff else: indent += brace_diff - print " " * (basic_offset * indent) + line + print(" " * (basic_offset * indent) + line) else: - print " " * (basic_offset * indent) + line + print(" " * (basic_offset * indent) + line) else: - print "" + print("") last_line = line diff --git a/node_modules/node-gyp/gyp/tools/pretty_sln.py b/node_modules/node-gyp/gyp/tools/pretty_sln.py index ca8cf4ad3fb83..196566fb9e447 100755 --- a/node_modules/node-gyp/gyp/tools/pretty_sln.py +++ b/node_modules/node-gyp/gyp/tools/pretty_sln.py @@ -12,13 +12,15 @@ Then it outputs a possible build order. """ -__author__ = 'nsylvain (Nicolas Sylvain)' +from __future__ import print_function import os import re import sys import pretty_vcproj +__author__ = 'nsylvain (Nicolas Sylvain)' + def BuildProject(project, built, projects, deps): # if all dependencies are done, we can build it, otherwise we try to build the # dependency. @@ -26,7 +28,7 @@ def BuildProject(project, built, projects, deps): for dep in deps[project]: if dep not in built: BuildProject(dep, built, projects, deps) - print project + print(project) built.append(project) def ParseSolution(solution_file): @@ -100,44 +102,44 @@ def ParseSolution(solution_file): return (projects, dependencies) def PrintDependencies(projects, deps): - print "---------------------------------------" - print "Dependencies for all projects" - print "---------------------------------------" - print "-- --" + print("---------------------------------------") + print("Dependencies for all projects") + print("---------------------------------------") + print("-- --") for (project, dep_list) in sorted(deps.items()): - print "Project : %s" % project - print "Path : %s" % projects[project][0] + print("Project : %s" % project) + print("Path : %s" % projects[project][0]) if dep_list: for dep in dep_list: - print " - %s" % dep - print "" + print(" - %s" % dep) + print("") - print "-- --" + print("-- --") def PrintBuildOrder(projects, deps): - print "---------------------------------------" - print "Build order " - print "---------------------------------------" - print "-- --" + print("---------------------------------------") + print("Build order ") + print("---------------------------------------") + print("-- --") built = [] for (project, _) in sorted(deps.items()): if project not in built: BuildProject(project, built, projects, deps) - print "-- --" + print("-- --") def PrintVCProj(projects): for project in projects: - print "-------------------------------------" - print "-------------------------------------" - print project - print project - print project - print "-------------------------------------" - print "-------------------------------------" + print("-------------------------------------") + print("-------------------------------------") + print(project) + print(project) + print(project) + print("-------------------------------------") + print("-------------------------------------") project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]), projects[project][2])) @@ -153,7 +155,7 @@ def PrintVCProj(projects): def main(): # check if we have exactly 1 parameter. if len(sys.argv) < 2: - print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0] + print('Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]) return 1 (projects, deps) = ParseSolution(sys.argv[1]) diff --git a/node_modules/node-gyp/gyp/tools/pretty_vcproj.py b/node_modules/node-gyp/gyp/tools/pretty_vcproj.py index 6099bd7cc4d8c..24e99282da9d9 100755 --- a/node_modules/node-gyp/gyp/tools/pretty_vcproj.py +++ b/node_modules/node-gyp/gyp/tools/pretty_vcproj.py @@ -12,7 +12,7 @@ It outputs the resulting xml to stdout. """ -__author__ = 'nsylvain (Nicolas Sylvain)' +from __future__ import print_function import os import sys @@ -20,6 +20,14 @@ from xml.dom.minidom import parse from xml.dom.minidom import Node +__author__ = 'nsylvain (Nicolas Sylvain)' + +try: + cmp +except NameError: + def cmp(x, y): + return (x > y) - (x < y) + REPLACEMENTS = dict() ARGUMENTS = None @@ -61,7 +69,7 @@ def get_string(node): def PrettyPrintNode(node, indent=0): if node.nodeType == Node.TEXT_NODE: if node.data.strip(): - print '%s%s' % (' '*indent, node.data.strip()) + print('%s%s' % (' '*indent, node.data.strip())) return if node.childNodes: @@ -73,23 +81,23 @@ def PrettyPrintNode(node, indent=0): # Print the main tag if attr_count == 0: - print '%s<%s>' % (' '*indent, node.nodeName) + print('%s<%s>' % (' '*indent, node.nodeName)) else: - print '%s<%s' % (' '*indent, node.nodeName) + print('%s<%s' % (' '*indent, node.nodeName)) all_attributes = [] for (name, value) in node.attributes.items(): all_attributes.append((name, value)) all_attributes.sort(CmpTuple()) for (name, value) in all_attributes: - print '%s %s="%s"' % (' '*indent, name, value) - print '%s>' % (' '*indent) + print('%s %s="%s"' % (' '*indent, name, value)) + print('%s>' % (' '*indent)) if node.nodeValue: - print '%s %s' % (' '*indent, node.nodeValue) + print('%s %s' % (' '*indent, node.nodeValue)) for sub_node in node.childNodes: PrettyPrintNode(sub_node, indent=indent+2) - print '%s' % (' '*indent, node.nodeName) + print('%s' % (' '*indent, node.nodeName)) def FlattenFilter(node): @@ -257,7 +265,7 @@ def MergeAttributes(node1, node2): if value1 != value2: node1.setAttribute(name, ';'.join([value1, value2])) else: - # The attribute does nto exist in the main node. We append this one. + # The attribute does not exist in the main node. We append this one. node1.setAttribute(name, value2) # If the attribute was a property sheet attributes, we remove it, since @@ -283,8 +291,8 @@ def main(argv): # check if we have exactly 1 parameter. if len(argv) < 2: - print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] ' - '[key2=value2]' % argv[0]) + print(('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] ' + '[key2=value2]' % argv[0])) return 1 # Parse the keys diff --git a/node_modules/node-gyp/lib/Find-VS2017.cs b/node_modules/node-gyp/lib/Find-VS2017.cs deleted file mode 100644 index 6e7429b771385..0000000000000 --- a/node_modules/node-gyp/lib/Find-VS2017.cs +++ /dev/null @@ -1,273 +0,0 @@ -// Copyright 2017 - Refael Ackermann -// Distributed under MIT style license -// See accompanying file LICENSE at https://github.com/node4good/windows-autoconf - -// Usage: -// powershell -ExecutionPolicy Unrestricted -Version "2.0" -Command "&{Add-Type -Path Find-VS2017.cs; [VisualStudioConfiguration.Main]::Query()}" -using System; -using System.Text; -using System.Runtime.InteropServices; - -namespace VisualStudioConfiguration -{ - [Flags] - public enum InstanceState : uint - { - None = 0, - Local = 1, - Registered = 2, - NoRebootRequired = 4, - NoErrors = 8, - Complete = 4294967295, - } - - [Guid("6380BCFF-41D3-4B2E-8B2E-BF8A6810C848")] - [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] - [ComImport] - public interface IEnumSetupInstances - { - - void Next([MarshalAs(UnmanagedType.U4), In] int celt, - [MarshalAs(UnmanagedType.LPArray, ArraySubType = UnmanagedType.Interface), Out] ISetupInstance[] rgelt, - [MarshalAs(UnmanagedType.U4)] out int pceltFetched); - - void Skip([MarshalAs(UnmanagedType.U4), In] int celt); - - void Reset(); - - [return: MarshalAs(UnmanagedType.Interface)] - IEnumSetupInstances Clone(); - } - - [Guid("42843719-DB4C-46C2-8E7C-64F1816EFD5B")] - [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] - [ComImport] - public interface ISetupConfiguration - { - } - - [Guid("26AAB78C-4A60-49D6-AF3B-3C35BC93365D")] - [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] - [ComImport] - public interface ISetupConfiguration2 : ISetupConfiguration - { - - [return: MarshalAs(UnmanagedType.Interface)] - IEnumSetupInstances EnumInstances(); - - [return: MarshalAs(UnmanagedType.Interface)] - ISetupInstance GetInstanceForCurrentProcess(); - - [return: MarshalAs(UnmanagedType.Interface)] - ISetupInstance GetInstanceForPath([MarshalAs(UnmanagedType.LPWStr), In] string path); - - [return: MarshalAs(UnmanagedType.Interface)] - IEnumSetupInstances EnumAllInstances(); - } - - [Guid("B41463C3-8866-43B5-BC33-2B0676F7F42E")] - [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] - [ComImport] - public interface ISetupInstance - { - } - - [Guid("89143C9A-05AF-49B0-B717-72E218A2185C")] - [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] - [ComImport] - public interface ISetupInstance2 : ISetupInstance - { - [return: MarshalAs(UnmanagedType.BStr)] - string GetInstanceId(); - - [return: MarshalAs(UnmanagedType.Struct)] - System.Runtime.InteropServices.ComTypes.FILETIME GetInstallDate(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetInstallationName(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetInstallationPath(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetInstallationVersion(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetDisplayName([MarshalAs(UnmanagedType.U4), In] int lcid); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetDescription([MarshalAs(UnmanagedType.U4), In] int lcid); - - [return: MarshalAs(UnmanagedType.BStr)] - string ResolvePath([MarshalAs(UnmanagedType.LPWStr), In] string pwszRelativePath); - - [return: MarshalAs(UnmanagedType.U4)] - InstanceState GetState(); - - [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_UNKNOWN)] - ISetupPackageReference[] GetPackages(); - - ISetupPackageReference GetProduct(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetProductPath(); - - [return: MarshalAs(UnmanagedType.VariantBool)] - bool IsLaunchable(); - - [return: MarshalAs(UnmanagedType.VariantBool)] - bool IsComplete(); - - [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_UNKNOWN)] - ISetupPropertyStore GetProperties(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetEnginePath(); - } - - [Guid("DA8D8A16-B2B6-4487-A2F1-594CCCCD6BF5")] - [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] - [ComImport] - public interface ISetupPackageReference - { - - [return: MarshalAs(UnmanagedType.BStr)] - string GetId(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetVersion(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetChip(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetLanguage(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetBranch(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetType(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetUniqueId(); - - [return: MarshalAs(UnmanagedType.VariantBool)] - bool GetIsExtension(); - } - - [Guid("c601c175-a3be-44bc-91f6-4568d230fc83")] - [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] - [ComImport] - public interface ISetupPropertyStore - { - - [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_BSTR)] - string[] GetNames(); - - object GetValue([MarshalAs(UnmanagedType.LPWStr), In] string pwszName); - } - - [Guid("42843719-DB4C-46C2-8E7C-64F1816EFD5B")] - [CoClass(typeof(SetupConfigurationClass))] - [ComImport] - public interface SetupConfiguration : ISetupConfiguration2, ISetupConfiguration - { - } - - [Guid("177F0C4A-1CD3-4DE7-A32C-71DBBB9FA36D")] - [ClassInterface(ClassInterfaceType.None)] - [ComImport] - public class SetupConfigurationClass - { - } - - public static class Main - { - public static void Query() - { - ISetupConfiguration query = new SetupConfiguration(); - ISetupConfiguration2 query2 = (ISetupConfiguration2)query; - IEnumSetupInstances e = query2.EnumAllInstances(); - - int pceltFetched; - ISetupInstance2[] rgelt = new ISetupInstance2[1]; - StringBuilder log = new StringBuilder(); - while (true) - { - e.Next(1, rgelt, out pceltFetched); - if (pceltFetched <= 0) - { - Console.WriteLine(String.Format("{{\"log\":\"{0}\"}}", log.ToString())); - return; - } - if (CheckInstance(rgelt[0], ref log)) - return; - } - } - - private static bool CheckInstance(ISetupInstance2 setupInstance2, ref StringBuilder log) - { - // Visual Studio Community 2017 component directory: - // https://www.visualstudio.com/en-us/productinfo/vs2017-install-product-Community.workloads - - string path = setupInstance2.GetInstallationPath().Replace("\\", "\\\\"); - log.Append(String.Format("Found installation at: {0}\\n", path)); - - bool hasMSBuild = false; - bool hasVCTools = false; - uint Win10SDKVer = 0; - bool hasWin8SDK = false; - - foreach (ISetupPackageReference package in setupInstance2.GetPackages()) - { - const string Win10SDKPrefix = "Microsoft.VisualStudio.Component.Windows10SDK."; - - string id = package.GetId(); - if (id == "Microsoft.VisualStudio.VC.MSBuild.Base") - hasMSBuild = true; - else if (id == "Microsoft.VisualStudio.Component.VC.Tools.x86.x64") - hasVCTools = true; - else if (id.StartsWith(Win10SDKPrefix)) { - string[] parts = id.Substring(Win10SDKPrefix.Length).Split('.'); - if (parts.Length > 1 && parts[1] != "Desktop") - continue; - uint foundSdkVer; - if (UInt32.TryParse(parts[0], out foundSdkVer)) - Win10SDKVer = Math.Max(Win10SDKVer, foundSdkVer); - } else if (id == "Microsoft.VisualStudio.Component.Windows81SDK") - hasWin8SDK = true; - else - continue; - - log.Append(String.Format(" - Found {0}\\n", id)); - } - - if (!hasMSBuild) - log.Append(" - Missing Visual Studio C++ core features (Microsoft.VisualStudio.VC.MSBuild.Base)\\n"); - if (!hasVCTools) - log.Append(" - Missing VC++ 2017 v141 toolset (x86,x64) (Microsoft.VisualStudio.Component.VC.Tools.x86.x64)\\n"); - if ((Win10SDKVer == 0) && (!hasWin8SDK)) - log.Append(" - Missing a Windows SDK (Microsoft.VisualStudio.Component.Windows10SDK.* or Microsoft.VisualStudio.Component.Windows81SDK)\\n"); - - if (hasMSBuild && hasVCTools) - { - if (Win10SDKVer > 0) - { - log.Append(" - Using this installation with Windows 10 SDK"/*\\n*/); - Console.WriteLine(String.Format("{{\"log\":\"{0}\",\"path\":\"{1}\",\"sdk\":\"10.0.{2}.0\"}}", log.ToString(), path, Win10SDKVer)); - return true; - } - else if (hasWin8SDK) - { - log.Append(" - Using this installation with Windows 8.1 SDK"/*\\n*/); - Console.WriteLine(String.Format("{{\"log\":\"{0}\",\"path\":\"{1}\",\"sdk\":\"8.1\"}}", log.ToString(), path)); - return true; - } - } - - log.Append(" - Some required components are missing, not using this installation\\n"); - return false; - } - } -} diff --git a/node_modules/node-gyp/lib/Find-VisualStudio.cs b/node_modules/node-gyp/lib/Find-VisualStudio.cs new file mode 100644 index 0000000000000..0a533f42d6a4f --- /dev/null +++ b/node_modules/node-gyp/lib/Find-VisualStudio.cs @@ -0,0 +1,243 @@ +// Copyright 2017 - Refael Ackermann +// Distributed under MIT style license +// See accompanying file LICENSE at https://github.com/node4good/windows-autoconf + +// Usage: +// powershell -ExecutionPolicy Unrestricted -Command "Add-Type -Path Find-VisualStudio.cs; [VisualStudioConfiguration.Main]::PrintJson()" +// This script needs to be compatible with PowerShell v2 to run on Windows 2008R2 and Windows 7. + +using System; +using System.Text; +using System.Runtime.InteropServices; +using System.Collections.Generic; + +namespace VisualStudioConfiguration +{ + [Flags] + public enum InstanceState : uint + { + None = 0, + Local = 1, + Registered = 2, + NoRebootRequired = 4, + NoErrors = 8, + Complete = 4294967295, + } + + [Guid("6380BCFF-41D3-4B2E-8B2E-BF8A6810C848")] + [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] + [ComImport] + public interface IEnumSetupInstances + { + + void Next([MarshalAs(UnmanagedType.U4), In] int celt, + [MarshalAs(UnmanagedType.LPArray, ArraySubType = UnmanagedType.Interface), Out] ISetupInstance[] rgelt, + [MarshalAs(UnmanagedType.U4)] out int pceltFetched); + + void Skip([MarshalAs(UnmanagedType.U4), In] int celt); + + void Reset(); + + [return: MarshalAs(UnmanagedType.Interface)] + IEnumSetupInstances Clone(); + } + + [Guid("42843719-DB4C-46C2-8E7C-64F1816EFD5B")] + [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] + [ComImport] + public interface ISetupConfiguration + { + } + + [Guid("26AAB78C-4A60-49D6-AF3B-3C35BC93365D")] + [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] + [ComImport] + public interface ISetupConfiguration2 : ISetupConfiguration + { + + [return: MarshalAs(UnmanagedType.Interface)] + IEnumSetupInstances EnumInstances(); + + [return: MarshalAs(UnmanagedType.Interface)] + ISetupInstance GetInstanceForCurrentProcess(); + + [return: MarshalAs(UnmanagedType.Interface)] + ISetupInstance GetInstanceForPath([MarshalAs(UnmanagedType.LPWStr), In] string path); + + [return: MarshalAs(UnmanagedType.Interface)] + IEnumSetupInstances EnumAllInstances(); + } + + [Guid("B41463C3-8866-43B5-BC33-2B0676F7F42E")] + [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] + [ComImport] + public interface ISetupInstance + { + } + + [Guid("89143C9A-05AF-49B0-B717-72E218A2185C")] + [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] + [ComImport] + public interface ISetupInstance2 : ISetupInstance + { + [return: MarshalAs(UnmanagedType.BStr)] + string GetInstanceId(); + + [return: MarshalAs(UnmanagedType.Struct)] + System.Runtime.InteropServices.ComTypes.FILETIME GetInstallDate(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetInstallationName(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetInstallationPath(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetInstallationVersion(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetDisplayName([MarshalAs(UnmanagedType.U4), In] int lcid); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetDescription([MarshalAs(UnmanagedType.U4), In] int lcid); + + [return: MarshalAs(UnmanagedType.BStr)] + string ResolvePath([MarshalAs(UnmanagedType.LPWStr), In] string pwszRelativePath); + + [return: MarshalAs(UnmanagedType.U4)] + InstanceState GetState(); + + [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_UNKNOWN)] + ISetupPackageReference[] GetPackages(); + + ISetupPackageReference GetProduct(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetProductPath(); + + [return: MarshalAs(UnmanagedType.VariantBool)] + bool IsLaunchable(); + + [return: MarshalAs(UnmanagedType.VariantBool)] + bool IsComplete(); + + [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_UNKNOWN)] + ISetupPropertyStore GetProperties(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetEnginePath(); + } + + [Guid("DA8D8A16-B2B6-4487-A2F1-594CCCCD6BF5")] + [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] + [ComImport] + public interface ISetupPackageReference + { + + [return: MarshalAs(UnmanagedType.BStr)] + string GetId(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetVersion(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetChip(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetLanguage(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetBranch(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetType(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetUniqueId(); + + [return: MarshalAs(UnmanagedType.VariantBool)] + bool GetIsExtension(); + } + + [Guid("c601c175-a3be-44bc-91f6-4568d230fc83")] + [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] + [ComImport] + public interface ISetupPropertyStore + { + + [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_BSTR)] + string[] GetNames(); + + object GetValue([MarshalAs(UnmanagedType.LPWStr), In] string pwszName); + } + + [Guid("42843719-DB4C-46C2-8E7C-64F1816EFD5B")] + [CoClass(typeof(SetupConfigurationClass))] + [ComImport] + public interface SetupConfiguration : ISetupConfiguration2, ISetupConfiguration + { + } + + [Guid("177F0C4A-1CD3-4DE7-A32C-71DBBB9FA36D")] + [ClassInterface(ClassInterfaceType.None)] + [ComImport] + public class SetupConfigurationClass + { + } + + public static class Main + { + public static void PrintJson() + { + ISetupConfiguration query = new SetupConfiguration(); + ISetupConfiguration2 query2 = (ISetupConfiguration2)query; + IEnumSetupInstances e = query2.EnumAllInstances(); + + int pceltFetched; + ISetupInstance2[] rgelt = new ISetupInstance2[1]; + List instances = new List(); + while (true) + { + e.Next(1, rgelt, out pceltFetched); + if (pceltFetched <= 0) + { + Console.WriteLine(String.Format("[{0}]", string.Join(",", instances.ToArray()))); + return; + } + + instances.Add(InstanceJson(rgelt[0])); + } + } + + private static string JsonString(string s) + { + return "\"" + s.Replace("\\", "\\\\").Replace("\"", "\\\"") + "\""; + } + + private static string InstanceJson(ISetupInstance2 setupInstance2) + { + // Visual Studio component directory: + // https://docs.microsoft.com/en-us/visualstudio/install/workload-and-component-ids + + StringBuilder json = new StringBuilder(); + json.Append("{"); + + string path = JsonString(setupInstance2.GetInstallationPath()); + json.Append(String.Format("\"path\":{0},", path)); + + string version = JsonString(setupInstance2.GetInstallationVersion()); + json.Append(String.Format("\"version\":{0},", version)); + + List packages = new List(); + foreach (ISetupPackageReference package in setupInstance2.GetPackages()) + { + string id = JsonString(package.GetId()); + packages.Add(id); + } + json.Append(String.Format("\"packages\":[{0}]", string.Join(",", packages.ToArray()))); + + json.Append("}"); + return json.ToString(); + } + } +} diff --git a/node_modules/node-gyp/lib/build.js b/node_modules/node-gyp/lib/build.js index 2f8e14c37429d..c2388fb348cc5 100644 --- a/node_modules/node-gyp/lib/build.js +++ b/node_modules/node-gyp/lib/build.js @@ -1,21 +1,11 @@ +'use strict' -module.exports = exports = build - -/** - * Module dependencies. - */ - -var fs = require('graceful-fs') - , rm = require('rimraf') - , path = require('path') - , glob = require('glob') - , log = require('npmlog') - , which = require('which') - , exec = require('child_process').exec - , processRelease = require('./process-release') - , win = process.platform === 'win32' - -exports.usage = 'Invokes `' + (win ? 'msbuild' : 'make') + '` and builds the module' +const fs = require('graceful-fs') +const path = require('path') +const glob = require('glob') +const log = require('npmlog') +const which = require('which') +const win = process.platform === 'win32' function build (gyp, argv, callback) { var platformMake = 'make' @@ -24,21 +14,19 @@ function build (gyp, argv, callback) { } else if (process.platform.indexOf('bsd') !== -1) { platformMake = 'gmake' } else if (win && argv.length > 0) { - argv = argv.map(function(target) { + argv = argv.map(function (target) { return '/t:' + target }) } - var release = processRelease(argv, gyp, process.version, process.release) - , makeCommand = gyp.opts.make || process.env.MAKE || platformMake - , command = win ? 'msbuild' : makeCommand - , buildDir = path.resolve('build') - , configPath = path.resolve(buildDir, 'config.gypi') - , jobs = gyp.opts.jobs || process.env.JOBS - , buildType - , config - , arch - , nodeDir + var makeCommand = gyp.opts.make || process.env.MAKE || platformMake + var command = win ? 'msbuild' : makeCommand + var jobs = gyp.opts.jobs || process.env.JOBS + var buildType + var config + var arch + var nodeDir + var guessedSolution loadConfigGypi() @@ -47,16 +35,18 @@ function build (gyp, argv, callback) { */ function loadConfigGypi () { + var configPath = path.resolve('build', 'config.gypi') + fs.readFile(configPath, 'utf8', function (err, data) { if (err) { - if (err.code == 'ENOENT') { + if (err.code === 'ENOENT') { callback(new Error('You must run `node-gyp configure` first!')) } else { callback(err) } return } - config = JSON.parse(data.replace(/\#.+\n/, '')) + config = JSON.parse(data.replace(/#.+\n/, '')) // get the 'arch', 'buildType', and 'nodeDir' vars from the config buildType = config.target_defaults.default_configuration @@ -88,7 +78,9 @@ function build (gyp, argv, callback) { function findSolutionFile () { glob('build/*.sln', function (err, files) { - if (err) return callback(err) + if (err) { + return callback(err) + } if (files.length === 0) { return callback(new Error('Could not find *.sln file. Did you run "configure"?')) } @@ -103,102 +95,42 @@ function build (gyp, argv, callback) { */ function doWhich () { - // First make sure we have the build command in the PATH - which(command, function (err, execPath) { - if (err) { - if (win && /not found/.test(err.message)) { - // On windows and no 'msbuild' found. Let's guess where it is - findMsbuild() - } else { - // Some other error or 'make' not found on Unix, report that to the user - callback(err) - } - return + // On Windows use msbuild provided by node-gyp configure + if (win) { + if (!config.variables.msbuild_path) { + return callback(new Error( + 'MSBuild is not set, please run `node-gyp configure`.')) } - log.verbose('`which` succeeded for `' + command + '`', execPath) - doBuild() - }) - } - - /** - * Search for the location of "msbuild.exe" file on Windows. - */ - - function findMsbuild () { - if (config.variables.msbuild_path) { command = config.variables.msbuild_path log.verbose('using MSBuild:', command) doBuild() return } - - log.verbose('could not find "msbuild.exe" in PATH - finding location in registry') - var notfoundErr = 'Can\'t find "msbuild.exe". Do you have Microsoft Visual Studio C++ 2008+ installed?' - var cmd = 'reg query "HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions" /s' - if (process.arch !== 'ia32') - cmd += ' /reg:32' - exec(cmd, function (err, stdout, stderr) { + // First make sure we have the build command in the PATH + which(command, function (err, execPath) { if (err) { - return callback(new Error(err.message + '\n' + notfoundErr)) + // Some other error or 'make' not found on Unix, report that to the user + callback(err) + return } - var reVers = /ToolsVersions\\([^\\]+)$/i - , rePath = /\r\n[ \t]+MSBuildToolsPath[ \t]+REG_SZ[ \t]+([^\r]+)/i - , msbuilds = [] - , r - , msbuildPath - stdout.split('\r\n\r\n').forEach(function(l) { - if (!l) return - l = l.trim() - if (r = reVers.exec(l.substring(0, l.indexOf('\r\n')))) { - var ver = parseFloat(r[1], 10) - if (ver >= 3.5) { - if (r = rePath.exec(l)) { - msbuilds.push({ - version: ver, - path: r[1] - }) - } - } - } - }) - msbuilds.sort(function (x, y) { - return (x.version < y.version ? -1 : 1) - }) - ;(function verifyMsbuild () { - if (!msbuilds.length) return callback(new Error(notfoundErr)) - msbuildPath = path.resolve(msbuilds.pop().path, 'msbuild.exe') - fs.stat(msbuildPath, function (err, stat) { - if (err) { - if (err.code == 'ENOENT') { - if (msbuilds.length) { - return verifyMsbuild() - } else { - callback(new Error(notfoundErr)) - } - } else { - callback(err) - } - return - } - command = msbuildPath - doBuild() - }) - })() + log.verbose('`which` succeeded for `' + command + '`', execPath) + doBuild() }) } - /** * Actually spawn the process and compile the module. */ function doBuild () { - // Enable Verbose build var verbose = log.levels[log.level] <= log.levels.verbose + var j + if (!win && verbose) { argv.push('V=1') } + if (win && !verbose) { argv.push('/clp:Verbosity=minimal') } @@ -210,12 +142,16 @@ function build (gyp, argv, callback) { // Specify the build type, Release by default if (win) { + // Convert .gypi config target_arch to MSBuild /Platform + // Since there are many ways to state '32-bit Intel', default to it. + // N.B. msbuild's Condition string equality tests are case-insensitive. var archLower = arch.toLowerCase() - var p = archLower === 'x64' ? 'x64' : - (archLower === 'arm' ? 'ARM' : 'Win32') + var p = archLower === 'x64' ? 'x64' + : (archLower === 'arm' ? 'ARM' + : (archLower === 'arm64' ? 'ARM64' : 'Win32')) argv.push('/p:Configuration=' + buildType + ';Platform=' + p) if (jobs) { - var j = parseInt(jobs, 10) + j = parseInt(jobs, 10) if (!isNaN(j) && j > 0) { argv.push('/m:' + j) } else if (jobs.toUpperCase() === 'MAX') { @@ -228,7 +164,7 @@ function build (gyp, argv, callback) { argv.push('-C') argv.push('build') if (jobs) { - var j = parseInt(jobs, 10) + j = parseInt(jobs, 10) if (!isNaN(j) && j > 0) { argv.push('--jobs') argv.push(j) @@ -242,7 +178,7 @@ function build (gyp, argv, callback) { if (win) { // did the user specify their own .sln file? var hasSln = argv.some(function (arg) { - return path.extname(arg) == '.sln' + return path.extname(arg) === '.sln' }) if (!hasSln) { argv.unshift(gyp.opts.solution || guessedSolution) @@ -253,10 +189,6 @@ function build (gyp, argv, callback) { proc.on('exit', onExit) } - /** - * Invoked after the make/msbuild command exits. - */ - function onExit (code, signal) { if (code !== 0) { return callback(new Error('`' + command + '` failed with exit code: ' + code)) @@ -266,5 +198,7 @@ function build (gyp, argv, callback) { } callback() } - } + +module.exports = build +module.exports.usage = 'Invokes `' + (win ? 'msbuild' : 'make') + '` and builds the module' diff --git a/node_modules/node-gyp/lib/clean.js b/node_modules/node-gyp/lib/clean.js index e69164d45afcc..dbfa4dbb99d3c 100644 --- a/node_modules/node-gyp/lib/clean.js +++ b/node_modules/node-gyp/lib/clean.js @@ -1,22 +1,15 @@ +'use strict' -module.exports = exports = clean - -exports.usage = 'Removes any generated build files and the "out" dir' - -/** - * Module dependencies. - */ - -var rm = require('rimraf') -var log = require('npmlog') - +const rm = require('rimraf') +const log = require('npmlog') function clean (gyp, argv, callback) { - // Remove the 'build' dir var buildDir = 'build' log.verbose('clean', 'removing "%s" directory', buildDir) rm(buildDir, callback) - } + +module.exports = clean +module.exports.usage = 'Removes any generated build files and the "out" dir' diff --git a/node_modules/node-gyp/lib/configure.js b/node_modules/node-gyp/lib/configure.js index 35e6787d1f3e3..564564eea4e6f 100644 --- a/node_modules/node-gyp/lib/configure.js +++ b/node_modules/node-gyp/lib/configure.js @@ -1,42 +1,28 @@ -module.exports = exports = configure -module.exports.test = { - PythonFinder: PythonFinder, - findAccessibleSync: findAccessibleSync, - findPython: findPython, +'use strict' + +const fs = require('graceful-fs') +const path = require('path') +const log = require('npmlog') +const os = require('os') +const mkdirp = require('mkdirp') +const processRelease = require('./process-release') +const win = process.platform === 'win32' +const findNodeDirectory = require('./find-node-directory') +const msgFormat = require('util').format +var findPython = require('./find-python') +if (win) { + var findVisualStudio = require('./find-visualstudio') } -/** - * Module dependencies. - */ - -var fs = require('graceful-fs') - , path = require('path') - , log = require('npmlog') - , osenv = require('osenv') - , which = require('which') - , semver = require('semver') - , mkdirp = require('mkdirp') - , cp = require('child_process') - , extend = require('util')._extend - , processRelease = require('./process-release') - , win = process.platform === 'win32' - , findNodeDirectory = require('./find-node-directory') - , msgFormat = require('util').format -if (win) - var findVS2017 = require('./find-vs2017') - -exports.usage = 'Generates ' + (win ? 'MSVC project files' : 'a Makefile') + ' for the current module' - function configure (gyp, argv, callback) { - - var python = gyp.opts.python || process.env.PYTHON || 'python2' - , buildDir = path.resolve('build') - , configNames = [ 'config.gypi', 'common.gypi' ] - , configs = [] - , nodeDir - , release = processRelease(argv, gyp, process.version, process.release) - - findPython(python, function (err, found) { + var python + var buildDir = path.resolve('build') + var configNames = ['config.gypi', 'common.gypi'] + var configs = [] + var nodeDir + var release = processRelease(argv, gyp, process.version, process.release) + + findPython(gyp.opts.python, function (err, found) { if (err) { callback(err) } else { @@ -46,17 +32,15 @@ function configure (gyp, argv, callback) { }) function getNodeDir () { - // 'python' should be set by now process.env.PYTHON = python if (gyp.opts.nodedir) { // --nodedir was specified. use that for the dev files - nodeDir = gyp.opts.nodedir.replace(/^~/, osenv.home()) + nodeDir = gyp.opts.nodedir.replace(/^~/, os.homedir()) log.verbose('get node dir', 'compiling against specified --nodedir dev files: %s', nodeDir) createBuildDir() - } else { // if no --nodedir specified, ensure node dependencies are installed if ('v' + release.version !== process.version) { @@ -74,10 +58,12 @@ function configure (gyp, argv, callback) { // If the tarball option is set, always remove and reinstall the headers // into devdir. Otherwise only install if they're not already there. - gyp.opts.ensure = gyp.opts.tarball ? false : true + gyp.opts.ensure = !gyp.opts.tarball - gyp.commands.install([ release.version ], function (err, version) { - if (err) return callback(err) + gyp.commands.install([release.version], function (err) { + if (err) { + return callback(err) + } log.verbose('get node dir', 'target node version installed:', release.versionDir) nodeDir = path.resolve(gyp.devDir, release.versionDir) createBuildDir() @@ -88,25 +74,23 @@ function configure (gyp, argv, callback) { function createBuildDir () { log.verbose('build dir', 'attempting to create "build" dir: %s', buildDir) mkdirp(buildDir, function (err, isNew) { - if (err) return callback(err) + if (err) { + return callback(err) + } log.verbose('build dir', '"build" dir needed to be created?', isNew) - if (win && (!gyp.opts.msvs_version || gyp.opts.msvs_version === '2017')) { - findVS2017(function (err, vsSetup) { - if (err) { - log.verbose('Not using VS2017:', err.message) - createConfigFile() - } else { - createConfigFile(null, vsSetup) - } - }) + if (win) { + findVisualStudio(release.semver, gyp.opts.msvs_version, + createConfigFile) } else { createConfigFile() } }) } - function createConfigFile (err, vsSetup) { - if (err) return callback(err) + function createConfigFile (err, vsInfo) { + if (err) { + return callback(err) + } var configFilename = 'config.gypi' var configPath = path.resolve(buildDir, configFilename) @@ -114,14 +98,18 @@ function configure (gyp, argv, callback) { log.verbose('build/' + configFilename, 'creating config file') var config = process.config || {} - , defaults = config.target_defaults - , variables = config.variables + var defaults = config.target_defaults + var variables = config.variables // default "config.variables" - if (!variables) variables = config.variables = {} + if (!variables) { + variables = config.variables = {} + } // default "config.defaults" - if (!defaults) defaults = config.target_defaults = {} + if (!defaults) { + defaults = config.target_defaults = {} + } // don't inherit the "defaults" from node's `process.config` object. // doing so could cause problems in cases where the `node` executable was @@ -136,12 +124,16 @@ function configure (gyp, argv, callback) { if ('debug' in gyp.opts) { defaults.default_configuration = gyp.opts.debug ? 'Debug' : 'Release' } + if (!defaults.default_configuration) { defaults.default_configuration = 'Release' } // set the target_arch variable variables.target_arch = gyp.opts.arch || process.arch || 'ia32' + if (variables.target_arch === 'arm64') { + defaults.msvs_configuration_platform = 'ARM64' + } // set the node development directory variables.nodedir = nodeDir @@ -149,17 +141,23 @@ function configure (gyp, argv, callback) { // disable -T "thin" static archives by default variables.standalone_static_library = gyp.opts.thin ? 0 : 1 - if (vsSetup) { - // GYP doesn't (yet) have support for VS2017, so we force it to VS2015 - // to avoid pulling a floating patch that has not landed upstream. - // Ref: https://chromium-review.googlesource.com/#/c/433540/ - gyp.opts.msvs_version = '2015' - process.env['GYP_MSVS_VERSION'] = 2015 - process.env['GYP_MSVS_OVERRIDE_PATH'] = vsSetup.path - defaults['msbuild_toolset'] = 'v141' - defaults['msvs_windows_target_platform_version'] = vsSetup.sdk - variables['msbuild_path'] = path.join(vsSetup.path, 'MSBuild', '15.0', - 'Bin', 'MSBuild.exe') + if (win) { + process.env.GYP_MSVS_VERSION = Math.min(vsInfo.versionYear, 2015) + process.env.GYP_MSVS_OVERRIDE_PATH = vsInfo.path + defaults.msbuild_toolset = vsInfo.toolset + if (vsInfo.sdk) { + defaults.msvs_windows_target_platform_version = vsInfo.sdk + } + if (variables.target_arch === 'arm64') { + if (vsInfo.versionMajor > 15 || + (vsInfo.versionMajor === 15 && vsInfo.versionMajor >= 9)) { + defaults.msvs_enable_marmasm = 1 + } else { + log.warn('Compiling ARM64 assembly is only available in\n' + + 'Visual Studio 2017 version 15.9 and above') + } + } + variables.msbuild_path = vsInfo.msBuild } // loop through the rest of the opts and add the unknown ones as variables. @@ -167,15 +165,20 @@ function configure (gyp, argv, callback) { // // $ node-gyp configure --shared-libxml2 Object.keys(gyp.opts).forEach(function (opt) { - if (opt === 'argv') return - if (opt in gyp.configDefs) return + if (opt === 'argv') { + return + } + if (opt in gyp.configDefs) { + return + } variables[opt.replace(/-/g, '_')] = gyp.opts[opt] }) // ensures that any boolean values from `process.config` get stringified function boolsToString (k, v) { - if (typeof v === 'boolean') + if (typeof v === 'boolean') { return String(v) + } return v } @@ -183,21 +186,28 @@ function configure (gyp, argv, callback) { // now write out the config.gypi file to the build/ dir var prefix = '# Do not edit. File was generated by node-gyp\'s "configure" step' - , json = JSON.stringify(config, boolsToString, 2) + + var json = JSON.stringify(config, boolsToString, 2) log.verbose('build/' + configFilename, 'writing out config file: %s', configPath) configs.push(configPath) fs.writeFile(configPath, [prefix, json, ''].join('\n'), findConfigs) } function findConfigs (err) { - if (err) return callback(err) + if (err) { + return callback(err) + } + var name = configNames.shift() - if (!name) return runGyp() + if (!name) { + return runGyp() + } var fullPath = path.resolve(name) + log.verbose(name, 'checking for gypi file: %s', fullPath) - fs.stat(fullPath, function (err, stat) { + fs.stat(fullPath, function (err) { if (err) { - if (err.code == 'ENOENT') { + if (err.code === 'ENOENT') { findConfigs() // check next gypi filename } else { callback(err) @@ -211,7 +221,9 @@ function configure (gyp, argv, callback) { } function runGyp (err) { - if (err) return callback(err) + if (err) { + return callback(err) + } if (!~argv.indexOf('-f') && !~argv.indexOf('--format')) { if (win) { @@ -225,86 +237,84 @@ function configure (gyp, argv, callback) { } } - function hasMsvsVersion () { - return argv.some(function (arg) { - return arg.indexOf('msvs_version') === 0 - }) - } - - if (win && !hasMsvsVersion()) { - if ('msvs_version' in gyp.opts) { - argv.push('-G', 'msvs_version=' + gyp.opts.msvs_version) - } else { - argv.push('-G', 'msvs_version=auto') - } - } - // include all the ".gypi" files that were found configs.forEach(function (config) { argv.push('-I', config) }) // For AIX and z/OS we need to set up the path to the exports file - // which contains the symbols needed for linking. - var node_exp_file = undefined + // which contains the symbols needed for linking. + var nodeExpFile if (process.platform === 'aix' || process.platform === 'os390') { var ext = process.platform === 'aix' ? 'exp' : 'x' - var node_root_dir = findNodeDirectory() - var candidates = undefined + var nodeRootDir = findNodeDirectory() + var candidates + if (process.platform === 'aix') { - candidates = ['include/node/node', - 'out/Release/node', - 'out/Debug/node', - 'node' - ].map(function(file) { - return file + '.' + ext - }) + candidates = [ + 'include/node/node', + 'out/Release/node', + 'out/Debug/node', + 'node' + ].map(function (file) { + return file + '.' + ext + }) } else { - candidates = ['out/Release/obj.target/libnode', - 'out/Debug/obj.target/libnode', - 'lib/libnode' - ].map(function(file) { - return file + '.' + ext - }) + candidates = [ + 'out/Release/obj.target/libnode', + 'out/Debug/obj.target/libnode', + 'lib/libnode' + ].map(function (file) { + return file + '.' + ext + }) } + var logprefix = 'find exports file' - node_exp_file = findAccessibleSync(logprefix, node_root_dir, candidates) - if (node_exp_file !== undefined) { - log.verbose(logprefix, 'Found exports file: %s', node_exp_file) + nodeExpFile = findAccessibleSync(logprefix, nodeRootDir, candidates) + if (nodeExpFile !== undefined) { + log.verbose(logprefix, 'Found exports file: %s', nodeExpFile) } else { - var msg = msgFormat('Could not find node.%s file in %s', ext, node_root_dir) + var msg = msgFormat('Could not find node.%s file in %s', ext, nodeRootDir) log.error(logprefix, 'Could not find exports file') return callback(new Error(msg)) } } // this logic ported from the old `gyp_addon` python file - var gyp_script = path.resolve(__dirname, '..', 'gyp', 'gyp_main.py') - var addon_gypi = path.resolve(__dirname, '..', 'addon.gypi') - var common_gypi = path.resolve(nodeDir, 'include/node/common.gypi') - fs.stat(common_gypi, function (err, stat) { - if (err) - common_gypi = path.resolve(nodeDir, 'common.gypi') - - var output_dir = 'build' + var gypScript = path.resolve(__dirname, '..', 'gyp', 'gyp_main.py') + var addonGypi = path.resolve(__dirname, '..', 'addon.gypi') + var commonGypi = path.resolve(nodeDir, 'include/node/common.gypi') + fs.stat(commonGypi, function (err) { + if (err) { + commonGypi = path.resolve(nodeDir, 'common.gypi') + } + + var outputDir = 'build' if (win) { // Windows expects an absolute path - output_dir = buildDir + outputDir = buildDir } var nodeGypDir = path.resolve(__dirname, '..') + var nodeLibFile = path.join(nodeDir, !gyp.opts.nodedir ? '<(target_arch)' : '$(Configuration)', release.name + '.lib') - argv.push('-I', addon_gypi) - argv.push('-I', common_gypi) + argv.push('-I', addonGypi) + argv.push('-I', commonGypi) argv.push('-Dlibrary=shared_library') argv.push('-Dvisibility=default') argv.push('-Dnode_root_dir=' + nodeDir) if (process.platform === 'aix' || process.platform === 'os390') { - argv.push('-Dnode_exp_file=' + node_exp_file) + argv.push('-Dnode_exp_file=' + nodeExpFile) } argv.push('-Dnode_gyp_dir=' + nodeGypDir) + + // Do this to keep Cygwin environments happy, else the unescaped '\' gets eaten up, + // resulting in bad paths, Ex c:parentFolderfolderanotherFolder instead of c:\parentFolder\folder\anotherFolder + if (win) { + nodeLibFile = nodeLibFile.replace(/\\/g, '\\\\') + } argv.push('-Dnode_lib_file=' + nodeLibFile) argv.push('-Dmodule_root_dir=' + process.cwd()) argv.push('-Dnode_engine=' + @@ -313,7 +323,7 @@ function configure (gyp, argv, callback) { argv.push('--no-parallel') // tell gyp to write the Makefile/Solution files into output_dir - argv.push('--generator-output', output_dir) + argv.push('--generator-output', outputDir) // tell make to write its output into the same dir argv.push('-Goutput_dir=.') @@ -322,7 +332,7 @@ function configure (gyp, argv, callback) { argv.unshift('binding.gyp') // execute `gyp` from the current target nodedir - argv.unshift(gyp_script) + argv.unshift(gypScript) // make sure python uses files that came with this particular node package var pypath = [path.join(__dirname, '..', 'gyp', 'pylib')] @@ -336,11 +346,7 @@ function configure (gyp, argv, callback) { }) } - /** - * Called when the `gyp` child process exits. - */ - - function onCpExit (code, signal) { + function onCpExit (code) { if (code !== 0) { callback(new Error('`gyp` failed with exit code: ' + code)) } else { @@ -348,7 +354,6 @@ function configure (gyp, argv, callback) { callback() } } - } /** @@ -358,167 +363,24 @@ function configure (gyp, argv, callback) { */ function findAccessibleSync (logprefix, dir, candidates) { for (var next = 0; next < candidates.length; next++) { - var candidate = path.resolve(dir, candidates[next]) - try { - var fd = fs.openSync(candidate, 'r') - } catch (e) { - // this candidate was not found or not readable, do nothing - log.silly(logprefix, 'Could not open %s: %s', candidate, e.message) - continue - } - fs.closeSync(fd) - log.silly(logprefix, 'Found readable %s', candidate) - return candidate + var candidate = path.resolve(dir, candidates[next]) + try { + var fd = fs.openSync(candidate, 'r') + } catch (e) { + // this candidate was not found or not readable, do nothing + log.silly(logprefix, 'Could not open %s: %s', candidate, e.message) + continue + } + fs.closeSync(fd) + log.silly(logprefix, 'Found readable %s', candidate) + return candidate } return undefined } -function PythonFinder(python, callback) { - this.callback = callback - this.python = python -} - -PythonFinder.prototype = { - checkPythonLauncherDepth: 0, - env: process.env, - execFile: cp.execFile, - log: log, - resolve: path.win32 && path.win32.resolve || path.resolve, - stat: fs.stat, - which: which, - win: win, - - checkPython: function checkPython () { - this.log.verbose('check python', - 'checking for Python executable "%s" in the PATH', - this.python) - this.which(this.python, function (err, execPath) { - if (err) { - this.log.verbose('`which` failed', this.python, err) - if (this.python === 'python2') { - this.python = 'python' - return this.checkPython() - } - if (this.win) { - this.checkPythonLauncher() - } else { - this.failNoPython() - } - } else { - this.log.verbose('`which` succeeded', this.python, execPath) - // Found the `python` executable, and from now on we use it explicitly. - // This solves #667 and #750 (`execFile` won't run batch files - // (*.cmd, and *.bat)) - this.python = execPath - this.checkPythonVersion() - } - }.bind(this)) - }, - - // Distributions of Python on Windows by default install with the "py.exe" - // Python launcher which is more likely to exist than the Python executable - // being in the $PATH. - // Because the Python launcher supports all versions of Python, we have to - // explicitly request a Python 2 version. This is done by supplying "-2" as - // the first command line argument. Since "py.exe -2" would be an invalid - // executable for "execFile", we have to use the launcher to figure out - // where the actual "python.exe" executable is located. - checkPythonLauncher: function checkPythonLauncher () { - this.checkPythonLauncherDepth += 1 - - this.log.verbose( - 'could not find "' + this.python + '". checking python launcher') - var env = extend({}, this.env) - env.TERM = 'dumb' - - var launcherArgs = ['-2', '-c', 'import sys; print sys.executable'] - this.execFile('py.exe', launcherArgs, { env: env }, function (err, stdout) { - if (err) { - this.guessPython() - } else { - this.python = stdout.trim() - this.log.verbose('check python launcher', - 'python executable found: %j', - this.python) - this.checkPythonVersion() - } - this.checkPythonLauncherDepth -= 1 - }.bind(this)) - }, - - checkPythonVersion: function checkPythonVersion () { - var args = ['-c', 'import sys; print "%s.%s.%s" % sys.version_info[:3];'] - var env = extend({}, this.env) - env.TERM = 'dumb' - - this.execFile(this.python, args, { env: env }, function (err, stdout) { - if (err) { - return this.callback(err) - } - this.log.verbose('check python version', - '`%s -c "' + args[1] + '"` returned: %j', - this.python, stdout) - var version = stdout.trim() - var range = semver.Range('>=2.5.0 <3.0.0') - var valid = false - try { - valid = range.test(version) - } catch (e) { - this.log.silly('range.test() error', e) - } - if (valid) { - this.callback(null, this.python) - } else if (this.win && this.checkPythonLauncherDepth === 0) { - this.checkPythonLauncher() - } else { - this.failPythonVersion(version) - } - }.bind(this)) - }, - - failNoPython: function failNoPython () { - var errmsg = - 'Can\'t find Python executable "' + this.python + - '", you can set the PYTHON env variable.' - this.callback(new Error(errmsg)) - }, - - failPythonVersion: function failPythonVersion (badVersion) { - var errmsg = - 'Python executable "' + this.python + - '" is v' + badVersion + ', which is not supported by gyp.\n' + - 'You can pass the --python switch to point to ' + - 'Python >= v2.5.0 & < 3.0.0.' - this.callback(new Error(errmsg)) - }, - - // Called on Windows when "python" isn't available in the current $PATH. - // We are going to check if "%SystemDrive%\python27\python.exe" exists. - guessPython: function guessPython () { - this.log.verbose('could not find "' + this.python + '". guessing location') - var rootDir = this.env.SystemDrive || 'C:\\' - if (rootDir[rootDir.length - 1] !== '\\') { - rootDir += '\\' - } - var pythonPath = this.resolve(rootDir, 'Python27', 'python.exe') - this.log.verbose('ensuring that file exists:', pythonPath) - this.stat(pythonPath, function (err, stat) { - if (err) { - if (err.code == 'ENOENT') { - this.failNoPython() - } else { - this.callback(err) - } - return - } - this.python = pythonPath - this.checkPythonVersion() - }.bind(this)) - }, -} - -function findPython (python, callback) { - var finder = new PythonFinder(python, callback) - finder.checkPython() +module.exports = configure +module.exports.test = { + findAccessibleSync: findAccessibleSync } +module.exports.usage = 'Generates ' + (win ? 'MSVC project files' : 'a Makefile') + ' for the current module' diff --git a/node_modules/node-gyp/lib/find-node-directory.js b/node_modules/node-gyp/lib/find-node-directory.js index 3aee8a109ae28..0dd781a6cfae8 100644 --- a/node_modules/node-gyp/lib/find-node-directory.js +++ b/node_modules/node-gyp/lib/find-node-directory.js @@ -1,7 +1,9 @@ -var path = require('path') - , log = require('npmlog') +'use strict' -function findNodeDirectory(scriptLocation, processObj) { +const path = require('path') +const log = require('npmlog') + +function findNodeDirectory (scriptLocation, processObj) { // set dirname and process if not passed in // this facilitates regression tests if (scriptLocation === undefined) { @@ -12,50 +14,50 @@ function findNodeDirectory(scriptLocation, processObj) { } // Have a look to see what is above us, to try and work out where we are - npm_parent_directory = path.join(scriptLocation, '../../../..') - log.verbose('node-gyp root', 'npm_parent_directory is ' - + path.basename(npm_parent_directory)) - node_root_dir = "" + var npmParentDirectory = path.join(scriptLocation, '../../../..') + log.verbose('node-gyp root', 'npm_parent_directory is ' + + path.basename(npmParentDirectory)) + var nodeRootDir = '' log.verbose('node-gyp root', 'Finding node root directory') - if (path.basename(npm_parent_directory) === 'deps') { + if (path.basename(npmParentDirectory) === 'deps') { // We are in a build directory where this script lives in // deps/npm/node_modules/node-gyp/lib - node_root_dir = path.join(npm_parent_directory, '..') - log.verbose('node-gyp root', 'in build directory, root = ' - + node_root_dir) - } else if (path.basename(npm_parent_directory) === 'node_modules') { + nodeRootDir = path.join(npmParentDirectory, '..') + log.verbose('node-gyp root', 'in build directory, root = ' + + nodeRootDir) + } else if (path.basename(npmParentDirectory) === 'node_modules') { // We are in a node install directory where this script lives in // lib/node_modules/npm/node_modules/node-gyp/lib or // node_modules/npm/node_modules/node-gyp/lib depending on the // platform if (processObj.platform === 'win32') { - node_root_dir = path.join(npm_parent_directory, '..') + nodeRootDir = path.join(npmParentDirectory, '..') } else { - node_root_dir = path.join(npm_parent_directory, '../..') + nodeRootDir = path.join(npmParentDirectory, '../..') } - log.verbose('node-gyp root', 'in install directory, root = ' - + node_root_dir) + log.verbose('node-gyp root', 'in install directory, root = ' + + nodeRootDir) } else { // We don't know where we are, try working it out from the location // of the node binary - var node_dir = path.dirname(processObj.execPath) - var directory_up = path.basename(node_dir) - if (directory_up === 'bin') { - node_root_dir = path.join(node_dir, '..') - } else if (directory_up === 'Release' || directory_up === 'Debug') { + var nodeDir = path.dirname(processObj.execPath) + var directoryUp = path.basename(nodeDir) + if (directoryUp === 'bin') { + nodeRootDir = path.join(nodeDir, '..') + } else if (directoryUp === 'Release' || directoryUp === 'Debug') { // If we are a recently built node, and the directory structure // is that of a repository. If we are on Windows then we only need // to go one level up, everything else, two if (processObj.platform === 'win32') { - node_root_dir = path.join(node_dir, '..') + nodeRootDir = path.join(nodeDir, '..') } else { - node_root_dir = path.join(node_dir, '../..') + nodeRootDir = path.join(nodeDir, '../..') } } // Else return the default blank, "". } - return node_root_dir + return nodeRootDir } module.exports = findNodeDirectory diff --git a/node_modules/node-gyp/lib/find-python.js b/node_modules/node-gyp/lib/find-python.js new file mode 100644 index 0000000000000..e79bc50d5a1d5 --- /dev/null +++ b/node_modules/node-gyp/lib/find-python.js @@ -0,0 +1,321 @@ +'use strict' + +const path = require('path') +const log = require('npmlog') +const semver = require('semver') +const cp = require('child_process') +const extend = require('util')._extend // eslint-disable-line +const win = process.platform === 'win32' +const logWithPrefix = require('./util').logWithPrefix + +function PythonFinder (configPython, callback) { + this.callback = callback + this.configPython = configPython + this.errorLog = [] +} + +PythonFinder.prototype = { + log: logWithPrefix(log, 'find Python'), + argsExecutable: ['-c', 'import sys; print(sys.executable);'], + argsVersion: ['-c', 'import sys; print("%s.%s.%s" % sys.version_info[:3]);'], + semverRange: '^2.6.0 || >=3.5.0', + + // These can be overridden for testing: + execFile: cp.execFile, + env: process.env, + win: win, + pyLauncher: 'py.exe', + winDefaultLocations: [ + path.join(process.env.SystemDrive || 'C:', 'Python27', 'python.exe'), + path.join(process.env.SystemDrive || 'C:', 'Python37', 'python.exe') + ], + + // Logs a message at verbose level, but also saves it to be displayed later + // at error level if an error occurs. This should help diagnose the problem. + addLog: function addLog (message) { + this.log.verbose(message) + this.errorLog.push(message) + }, + + // Find Python by trying a sequence of possibilities. + // Ignore errors, keep trying until Python is found. + findPython: function findPython () { + const SKIP = 0; const FAIL = 1 + var toCheck = getChecks.apply(this) + + function getChecks () { + if (this.env.NODE_GYP_FORCE_PYTHON) { + return [{ + before: () => { + this.addLog( + 'checking Python explicitly set from NODE_GYP_FORCE_PYTHON') + this.addLog('- process.env.NODE_GYP_FORCE_PYTHON is ' + + `"${this.env.NODE_GYP_FORCE_PYTHON}"`) + }, + check: this.checkCommand, + arg: this.env.NODE_GYP_FORCE_PYTHON + }] + } + + var checks = [ + { + before: () => { + if (!this.configPython) { + this.addLog( + 'Python is not set from command line or npm configuration') + return SKIP + } + this.addLog('checking Python explicitly set from command line or ' + + 'npm configuration') + this.addLog('- "--python=" or "npm config get python" is ' + + `"${this.configPython}"`) + }, + check: this.checkCommand, + arg: this.configPython + }, + { + before: () => { + if (!this.env.PYTHON) { + this.addLog('Python is not set from environment variable ' + + 'PYTHON') + return SKIP + } + this.addLog('checking Python explicitly set from environment ' + + 'variable PYTHON') + this.addLog(`- process.env.PYTHON is "${this.env.PYTHON}"`) + }, + check: this.checkCommand, + arg: this.env.PYTHON + }, + { + before: () => { this.addLog('checking if "python" can be used') }, + check: this.checkCommand, + arg: 'python' + }, + { + before: () => { this.addLog('checking if "python2" can be used') }, + check: this.checkCommand, + arg: 'python2' + }, + { + before: () => { this.addLog('checking if "python3" can be used') }, + check: this.checkCommand, + arg: 'python3' + } + ] + + if (this.win) { + checks.push({ + before: () => { + this.addLog( + 'checking if the py launcher can be used to find Python 2') + }, + check: this.checkPyLauncher + }) + for (var i = 0; i < this.winDefaultLocations.length; ++i) { + const location = this.winDefaultLocations[i] + checks.push({ + before: () => { + this.addLog('checking if Python is ' + + `${location}`) + }, + check: this.checkExecPath, + arg: location + }) + } + } + + return checks + } + + function runChecks (err) { + this.log.silly('runChecks: err = %j', (err && err.stack) || err) + + const check = toCheck.shift() + if (!check) { + return this.fail() + } + + const before = check.before.apply(this) + if (before === SKIP) { + return runChecks.apply(this) + } + if (before === FAIL) { + return this.fail() + } + + const args = [runChecks.bind(this)] + if (check.arg) { + args.unshift(check.arg) + } + check.check.apply(this, args) + } + + runChecks.apply(this) + }, + + // Check if command is a valid Python to use. + // Will exit the Python finder on success. + // If on Windows, run in a CMD shell to support BAT/CMD launchers. + checkCommand: function checkCommand (command, errorCallback) { + var exec = command + var args = this.argsExecutable + var shell = false + if (this.win) { + // Arguments have to be manually quoted + exec = `"${exec}"` + args = args.map(a => `"${a}"`) + shell = true + } + + this.log.verbose(`- executing "${command}" to get executable path`) + this.run(exec, args, shell, function (err, execPath) { + // Possible outcomes: + // - Error: not in PATH, not executable or execution fails + // - Gibberish: the next command to check version will fail + // - Absolute path to executable + if (err) { + this.addLog(`- "${command}" is not in PATH or produced an error`) + return errorCallback(err) + } + this.addLog(`- executable path is "${execPath}"`) + this.checkExecPath(execPath, errorCallback) + }.bind(this)) + }, + + // Check if the py launcher can find a valid Python to use. + // Will exit the Python finder on success. + // Distributions of Python on Windows by default install with the "py.exe" + // Python launcher which is more likely to exist than the Python executable + // being in the $PATH. + // Because the Python launcher supports all versions of Python, we have to + // explicitly request a Python 2 version. This is done by supplying "-2" as + // the first command line argument. Since "py.exe -2" would be an invalid + // executable for "execFile", we have to use the launcher to figure out + // where the actual "python.exe" executable is located. + checkPyLauncher: function checkPyLauncher (errorCallback) { + this.log.verbose( + `- executing "${this.pyLauncher}" to get Python 2 executable path`) + this.run(this.pyLauncher, ['-2', ...this.argsExecutable], false, + function (err, execPath) { + // Possible outcomes: same as checkCommand + if (err) { + this.addLog( + `- "${this.pyLauncher}" is not in PATH or produced an error`) + return errorCallback(err) + } + this.addLog(`- executable path is "${execPath}"`) + this.checkExecPath(execPath, errorCallback) + }.bind(this)) + }, + + // Check if a Python executable is the correct version to use. + // Will exit the Python finder on success. + checkExecPath: function checkExecPath (execPath, errorCallback) { + this.log.verbose(`- executing "${execPath}" to get version`) + this.run(execPath, this.argsVersion, false, function (err, version) { + // Possible outcomes: + // - Error: executable can not be run (likely meaning the command wasn't + // a Python executable and the previous command produced gibberish) + // - Gibberish: somehow the last command produced an executable path, + // this will fail when verifying the version + // - Version of the Python executable + if (err) { + this.addLog(`- "${execPath}" could not be run`) + return errorCallback(err) + } + this.addLog(`- version is "${version}"`) + + const range = semver.Range(this.semverRange) + var valid = false + try { + valid = range.test(version) + } catch (err) { + this.log.silly('range.test() threw:\n%s', err.stack) + this.addLog(`- "${execPath}" does not have a valid version`) + this.addLog('- is it a Python executable?') + return errorCallback(err) + } + + if (!valid) { + this.addLog(`- version is ${version} - should be ${this.semverRange}`) + this.addLog('- THIS VERSION OF PYTHON IS NOT SUPPORTED') + return errorCallback(new Error( + `Found unsupported Python version ${version}`)) + } + this.succeed(execPath, version) + }.bind(this)) + }, + + // Run an executable or shell command, trimming the output. + run: function run (exec, args, shell, callback) { + var env = extend({}, this.env) + env.TERM = 'dumb' + const opts = { env: env, shell: shell } + + this.log.silly('execFile: exec = %j', exec) + this.log.silly('execFile: args = %j', args) + this.log.silly('execFile: opts = %j', opts) + try { + this.execFile(exec, args, opts, execFileCallback.bind(this)) + } catch (err) { + this.log.silly('execFile: threw:\n%s', err.stack) + return callback(err) + } + + function execFileCallback (err, stdout, stderr) { + this.log.silly('execFile result: err = %j', (err && err.stack) || err) + this.log.silly('execFile result: stdout = %j', stdout) + this.log.silly('execFile result: stderr = %j', stderr) + if (err) { + return callback(err) + } + const execPath = stdout.trim() + callback(null, execPath) + } + }, + + succeed: function succeed (execPath, version) { + this.log.info(`using Python version ${version} found at "${execPath}"`) + process.nextTick(this.callback.bind(null, null, execPath)) + }, + + fail: function fail () { + const errorLog = this.errorLog.join('\n') + + const pathExample = this.win ? 'C:\\Path\\To\\python.exe' + : '/path/to/pythonexecutable' + // For Windows 80 col console, use up to the column before the one marked + // with X (total 79 chars including logger prefix, 58 chars usable here): + // X + const info = [ + '**********************************************************', + 'You need to install the latest version of Python.', + 'Node-gyp should be able to find and use Python. If not,', + 'you can try one of the following options:', + `- Use the switch --python="${pathExample}"`, + ' (accepted by both node-gyp and npm)', + '- Set the environment variable PYTHON', + '- Set the npm configuration variable python:', + ` npm config set python "${pathExample}"`, + 'For more information consult the documentation at:', + 'https://github.com/nodejs/node-gyp#installation', + '**********************************************************' + ].join('\n') + + this.log.error(`\n${errorLog}\n\n${info}\n`) + process.nextTick(this.callback.bind(null, new Error( + 'Could not find any Python installation to use'))) + } +} + +function findPython (configPython, callback) { + var finder = new PythonFinder(configPython, callback) + finder.findPython() +} + +module.exports = findPython +module.exports.test = { + PythonFinder: PythonFinder, + findPython: findPython +} diff --git a/node_modules/node-gyp/lib/find-visualstudio.js b/node_modules/node-gyp/lib/find-visualstudio.js new file mode 100644 index 0000000000000..c5d26f9a202ca --- /dev/null +++ b/node_modules/node-gyp/lib/find-visualstudio.js @@ -0,0 +1,434 @@ +'use strict' + +const log = require('npmlog') +const execFile = require('child_process').execFile +const path = require('path').win32 +const logWithPrefix = require('./util').logWithPrefix +const regSearchKeys = require('./util').regSearchKeys + +function findVisualStudio (nodeSemver, configMsvsVersion, callback) { + const finder = new VisualStudioFinder(nodeSemver, configMsvsVersion, + callback) + finder.findVisualStudio() +} + +function VisualStudioFinder (nodeSemver, configMsvsVersion, callback) { + this.nodeSemver = nodeSemver + this.configMsvsVersion = configMsvsVersion + this.callback = callback + this.errorLog = [] + this.validVersions = [] +} + +VisualStudioFinder.prototype = { + log: logWithPrefix(log, 'find VS'), + + regSearchKeys: regSearchKeys, + + // Logs a message at verbose level, but also saves it to be displayed later + // at error level if an error occurs. This should help diagnose the problem. + addLog: function addLog (message) { + this.log.verbose(message) + this.errorLog.push(message) + }, + + findVisualStudio: function findVisualStudio () { + this.configVersionYear = null + this.configPath = null + if (this.configMsvsVersion) { + this.addLog('msvs_version was set from command line or npm config') + if (this.configMsvsVersion.match(/^\d{4}$/)) { + this.configVersionYear = parseInt(this.configMsvsVersion, 10) + this.addLog( + `- looking for Visual Studio version ${this.configVersionYear}`) + } else { + this.configPath = path.resolve(this.configMsvsVersion) + this.addLog( + `- looking for Visual Studio installed in "${this.configPath}"`) + } + } else { + this.addLog('msvs_version not set from command line or npm config') + } + + if (process.env.VCINSTALLDIR) { + this.envVcInstallDir = + path.resolve(process.env.VCINSTALLDIR, '..') + this.addLog('running in VS Command Prompt, installation path is:\n' + + `"${this.envVcInstallDir}"\n- will only use this version`) + } else { + this.addLog('VCINSTALLDIR not set, not running in VS Command Prompt') + } + + this.findVisualStudio2017OrNewer((info) => { + if (info) { + return this.succeed(info) + } + this.findVisualStudio2015((info) => { + if (info) { + return this.succeed(info) + } + this.findVisualStudio2013((info) => { + if (info) { + return this.succeed(info) + } + this.fail() + }) + }) + }) + }, + + succeed: function succeed (info) { + this.log.info(`using VS${info.versionYear} (${info.version}) found at:` + + `\n"${info.path}"` + + '\nrun with --verbose for detailed information') + process.nextTick(this.callback.bind(null, null, info)) + }, + + fail: function fail () { + if (this.configMsvsVersion && this.envVcInstallDir) { + this.errorLog.push( + 'msvs_version does not match this VS Command Prompt or the', + 'installation cannot be used.') + } else if (this.configMsvsVersion) { + // If msvs_version was specified but finding VS failed, print what would + // have been accepted + this.errorLog.push('') + if (this.validVersions) { + this.errorLog.push('valid versions for msvs_version:') + this.validVersions.forEach((version) => { + this.errorLog.push(`- "${version}"`) + }) + } else { + this.errorLog.push('no valid versions for msvs_version were found') + } + } + + const errorLog = this.errorLog.join('\n') + + // For Windows 80 col console, use up to the column before the one marked + // with X (total 79 chars including logger prefix, 62 chars usable here): + // X + const infoLog = [ + '**************************************************************', + 'You need to install the latest version of Visual Studio', + 'including the "Desktop development with C++" workload.', + 'For more information consult the documentation at:', + 'https://github.com/nodejs/node-gyp#on-windows', + '**************************************************************' + ].join('\n') + + this.log.error(`\n${errorLog}\n\n${infoLog}\n`) + process.nextTick(this.callback.bind(null, new Error( + 'Could not find any Visual Studio installation to use'))) + }, + + // Invoke the PowerShell script to get information about Visual Studio 2017 + // or newer installations + findVisualStudio2017OrNewer: function findVisualStudio2017OrNewer (cb) { + var ps = path.join(process.env.SystemRoot, 'System32', + 'WindowsPowerShell', 'v1.0', 'powershell.exe') + var csFile = path.join(__dirname, 'Find-VisualStudio.cs') + var psArgs = [ + '-ExecutionPolicy', + 'Unrestricted', + '-NoProfile', + '-Command', + '&{Add-Type -Path \'' + csFile + '\';' + '[VisualStudioConfiguration.Main]::PrintJson()}' + ] + + this.log.silly('Running', ps, psArgs) + var child = execFile(ps, psArgs, { encoding: 'utf8' }, + (err, stdout, stderr) => { + this.parseData(err, stdout, stderr, cb) + }) + child.stdin.end() + }, + + // Parse the output of the PowerShell script and look for an installation + // of Visual Studio 2017 or newer to use + parseData: function parseData (err, stdout, stderr, cb) { + this.log.silly('PS stderr = %j', stderr) + + const failPowershell = () => { + this.addLog( + 'could not use PowerShell to find Visual Studio 2017 or newer') + cb(null) + } + + if (err) { + this.log.silly('PS err = %j', err && (err.stack || err)) + return failPowershell() + } + + var vsInfo + try { + vsInfo = JSON.parse(stdout) + } catch (e) { + this.log.silly('PS stdout = %j', stdout) + this.log.silly(e) + return failPowershell() + } + + if (!Array.isArray(vsInfo)) { + this.log.silly('PS stdout = %j', stdout) + return failPowershell() + } + + vsInfo = vsInfo.map((info) => { + this.log.silly(`processing installation: "${info.path}"`) + info.path = path.resolve(info.path) + var ret = this.getVersionInfo(info) + ret.path = info.path + ret.msBuild = this.getMSBuild(info, ret.versionYear) + ret.toolset = this.getToolset(info, ret.versionYear) + ret.sdk = this.getSDK(info) + return ret + }) + this.log.silly('vsInfo:', vsInfo) + + // Remove future versions or errors parsing version number + vsInfo = vsInfo.filter((info) => { + if (info.versionYear) { + return true + } + this.addLog(`unknown version "${info.version}" found at "${info.path}"`) + return false + }) + + // Sort to place newer versions first + vsInfo.sort((a, b) => b.versionYear - a.versionYear) + + for (var i = 0; i < vsInfo.length; ++i) { + const info = vsInfo[i] + this.addLog(`checking VS${info.versionYear} (${info.version}) found ` + + `at:\n"${info.path}"`) + + if (info.msBuild) { + this.addLog('- found "Visual Studio C++ core features"') + } else { + this.addLog('- "Visual Studio C++ core features" missing') + continue + } + + if (info.toolset) { + this.addLog(`- found VC++ toolset: ${info.toolset}`) + } else { + this.addLog('- missing any VC++ toolset') + continue + } + + if (info.sdk) { + this.addLog(`- found Windows SDK: ${info.sdk}`) + } else { + this.addLog('- missing any Windows SDK') + continue + } + + if (!this.checkConfigVersion(info.versionYear, info.path)) { + continue + } + + return cb(info) + } + + this.addLog( + 'could not find a version of Visual Studio 2017 or newer to use') + cb(null) + }, + + // Helper - process version information + getVersionInfo: function getVersionInfo (info) { + const match = /^(\d+)\.(\d+)\..*/.exec(info.version) + if (!match) { + this.log.silly('- failed to parse version:', info.version) + return {} + } + this.log.silly('- version match = %j', match) + var ret = { + version: info.version, + versionMajor: parseInt(match[1], 10), + versionMinor: parseInt(match[2], 10) + } + if (ret.versionMajor === 15) { + ret.versionYear = 2017 + return ret + } + if (ret.versionMajor === 16) { + ret.versionYear = 2019 + return ret + } + this.log.silly('- unsupported version:', ret.versionMajor) + return {} + }, + + // Helper - process MSBuild information + getMSBuild: function getMSBuild (info, versionYear) { + const pkg = 'Microsoft.VisualStudio.VC.MSBuild.Base' + if (info.packages.indexOf(pkg) !== -1) { + this.log.silly('- found VC.MSBuild.Base') + if (versionYear === 2017) { + return path.join(info.path, 'MSBuild', '15.0', 'Bin', 'MSBuild.exe') + } + if (versionYear === 2019) { + return path.join(info.path, 'MSBuild', 'Current', 'Bin', 'MSBuild.exe') + } + } + return null + }, + + // Helper - process toolset information + getToolset: function getToolset (info, versionYear) { + const pkg = 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64' + const express = 'Microsoft.VisualStudio.WDExpress' + + if (info.packages.indexOf(pkg) !== -1) { + this.log.silly('- found VC.Tools.x86.x64') + } else if (info.packages.indexOf(express) !== -1) { + this.log.silly('- found Visual Studio Express (looking for toolset)') + } else { + return null + } + + if (versionYear === 2017) { + return 'v141' + } else if (versionYear === 2019) { + return 'v142' + } + this.log.silly('- invalid versionYear:', versionYear) + return null + }, + + // Helper - process Windows SDK information + getSDK: function getSDK (info) { + const win8SDK = 'Microsoft.VisualStudio.Component.Windows81SDK' + const win10SDKPrefix = 'Microsoft.VisualStudio.Component.Windows10SDK.' + + var Win10SDKVer = 0 + info.packages.forEach((pkg) => { + if (!pkg.startsWith(win10SDKPrefix)) { + return + } + const parts = pkg.split('.') + if (parts.length > 5 && parts[5] !== 'Desktop') { + this.log.silly('- ignoring non-Desktop Win10SDK:', pkg) + return + } + const foundSdkVer = parseInt(parts[4], 10) + if (isNaN(foundSdkVer)) { + // Microsoft.VisualStudio.Component.Windows10SDK.IpOverUsb + this.log.silly('- failed to parse Win10SDK number:', pkg) + return + } + this.log.silly('- found Win10SDK:', foundSdkVer) + Win10SDKVer = Math.max(Win10SDKVer, foundSdkVer) + }) + + if (Win10SDKVer !== 0) { + return `10.0.${Win10SDKVer}.0` + } else if (info.packages.indexOf(win8SDK) !== -1) { + this.log.silly('- found Win8SDK') + return '8.1' + } + return null + }, + + // Find an installation of Visual Studio 2015 to use + findVisualStudio2015: function findVisualStudio2015 (cb) { + return this.findOldVS({ + version: '14.0', + versionMajor: 14, + versionMinor: 0, + versionYear: 2015, + toolset: 'v140' + }, cb) + }, + + // Find an installation of Visual Studio 2013 to use + findVisualStudio2013: function findVisualStudio2013 (cb) { + if (this.nodeSemver.major >= 9) { + this.addLog( + 'not looking for VS2013 as it is only supported up to Node.js 8') + return cb(null) + } + return this.findOldVS({ + version: '12.0', + versionMajor: 12, + versionMinor: 0, + versionYear: 2013, + toolset: 'v120' + }, cb) + }, + + // Helper - common code for VS2013 and VS2015 + findOldVS: function findOldVS (info, cb) { + const regVC7 = ['HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7', + 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7'] + const regMSBuild = 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions' + + this.addLog(`looking for Visual Studio ${info.versionYear}`) + this.regSearchKeys(regVC7, info.version, [], (err, res) => { + if (err) { + this.addLog('- not found') + return cb(null) + } + + const vsPath = path.resolve(res, '..') + this.addLog(`- found in "${vsPath}"`) + + const msBuildRegOpts = process.arch === 'ia32' ? [] : ['/reg:32'] + this.regSearchKeys([`${regMSBuild}\\${info.version}`], + 'MSBuildToolsPath', msBuildRegOpts, (err, res) => { + if (err) { + this.addLog( + '- could not find MSBuild in registry for this version') + return cb(null) + } + + const msBuild = path.join(res, 'MSBuild.exe') + this.addLog(`- MSBuild in "${msBuild}"`) + + if (!this.checkConfigVersion(info.versionYear, vsPath)) { + return cb(null) + } + + info.path = vsPath + info.msBuild = msBuild + info.sdk = null + cb(info) + }) + }) + }, + + // After finding a usable version of Visual Stuido: + // - add it to validVersions to be displayed at the end if a specific + // version was requested and not found; + // - check if this is the version that was requested. + // - check if this matches the Visual Studio Command Prompt + checkConfigVersion: function checkConfigVersion (versionYear, vsPath) { + this.validVersions.push(versionYear) + this.validVersions.push(vsPath) + + if (this.configVersionYear && this.configVersionYear !== versionYear) { + this.addLog('- msvs_version does not match this version') + return false + } + if (this.configPath && + path.relative(this.configPath, vsPath) !== '') { + this.addLog('- msvs_version does not point to this installation') + return false + } + if (this.envVcInstallDir && + path.relative(this.envVcInstallDir, vsPath) !== '') { + this.addLog('- does not match this Visual Studio Command Prompt') + return false + } + + return true + } +} + +module.exports = findVisualStudio +module.exports.test = { + VisualStudioFinder: VisualStudioFinder, + findVisualStudio: findVisualStudio +} diff --git a/node_modules/node-gyp/lib/find-vs2017.js b/node_modules/node-gyp/lib/find-vs2017.js deleted file mode 100644 index ad46ceaf88c98..0000000000000 --- a/node_modules/node-gyp/lib/find-vs2017.js +++ /dev/null @@ -1,46 +0,0 @@ -var log = require('npmlog') - , execFile = require('child_process').execFile - , path = require('path') - -function findVS2017(callback) { - var ps = path.join(process.env.SystemRoot, 'System32', 'WindowsPowerShell', - 'v1.0', 'powershell.exe') - var csFile = path.join(__dirname, 'Find-VS2017.cs') - var psArgs = ['-ExecutionPolicy', 'Unrestricted', '-NoProfile', - '-Command', '&{Add-Type -Path \'' + csFile + '\';' + - '[VisualStudioConfiguration.Main]::Query()}'] - - log.silly('find vs2017', 'Running', ps, psArgs) - var child = execFile(ps, psArgs, { encoding: 'utf8' }, - function (err, stdout, stderr) { - log.silly('find vs2017', 'PS err:', err) - log.silly('find vs2017', 'PS stdout:', stdout) - log.silly('find vs2017', 'PS stderr:', stderr) - - if (err) - return callback(new Error('Could not use PowerShell to find VS2017')) - - var vsSetup - try { - vsSetup = JSON.parse(stdout) - } catch (e) { - log.silly('find vs2017', e) - return callback(new Error('Could not use PowerShell to find VS2017')) - } - log.silly('find vs2017', 'vsSetup:', vsSetup) - - if (vsSetup && vsSetup.log) - log.verbose('find vs2017', vsSetup.log.trimRight()) - - if (!vsSetup || !vsSetup.path || !vsSetup.sdk) { - return callback(new Error('No usable installation of VS2017 found')) - } - - log.verbose('find vs2017', 'using installation:', vsSetup.path) - callback(null, { "path": vsSetup.path, "sdk": vsSetup.sdk }) - }) - - child.stdin.end() -} - -module.exports = findVS2017 diff --git a/node_modules/node-gyp/lib/install.js b/node_modules/node-gyp/lib/install.js index cb84972e18325..f68cd7fd6d2a5 100644 --- a/node_modules/node-gyp/lib/install.js +++ b/node_modules/node-gyp/lib/install.js @@ -1,46 +1,30 @@ -module.exports = exports = function (gyp, argv, callback) { - return install(fs, gyp, argv, callback) -} - -module.exports.test = { - download: download, - install: install, - readCAFile: readCAFile, -} - -exports.usage = 'Install node development files for the specified node version.' - -/** - * Module dependencies. - */ - -var fs = require('graceful-fs') - , osenv = require('osenv') - , tar = require('tar') - , rm = require('rimraf') - , path = require('path') - , crypto = require('crypto') - , zlib = require('zlib') - , log = require('npmlog') - , semver = require('semver') - , fstream = require('fstream') - , request = require('request') - , mkdir = require('mkdirp') - , processRelease = require('./process-release') - , win = process.platform == 'win32' +'use strict' + +const fs = require('graceful-fs') +const os = require('os') +const tar = require('tar') +const path = require('path') +const crypto = require('crypto') +const log = require('npmlog') +const semver = require('semver') +const request = require('request') +const mkdir = require('mkdirp') +const processRelease = require('./process-release') +const win = process.platform === 'win32' function install (fs, gyp, argv, callback) { - var release = processRelease(argv, gyp, process.version, process.release) // ensure no double-callbacks happen function cb (err) { - if (cb.done) return + if (cb.done) { + return + } cb.done = true if (err) { log.warn('install', 'got an error, rolling back install') // roll-back the install if anything went wrong - gyp.commands.remove([ release.versionDir ], function (err2) { + gyp.commands.remove([release.versionDir], function () { callback(err) }) } else { @@ -82,12 +66,12 @@ function install (fs, gyp, argv, callback) { // check if it is already installed, and only install when needed if (gyp.opts.ensure) { log.verbose('install', '--ensure was passed, so won\'t reinstall if already installed') - fs.stat(devDir, function (err, stat) { + fs.stat(devDir, function (err) { if (err) { - if (err.code == 'ENOENT') { + if (err.code === 'ENOENT') { log.verbose('install', 'version not already installed, continuing with install', release.version) go() - } else if (err.code == 'EACCES') { + } else if (err.code === 'EACCES') { eaccesFallback(err) } else { cb(err) @@ -97,7 +81,7 @@ function install (fs, gyp, argv, callback) { log.verbose('install', 'version is already installed, need to check "installVersion"') var installVersionFile = path.resolve(devDir, 'installVersion') fs.readFile(installVersionFile, 'ascii', function (err, ver) { - if (err && err.code != 'ENOENT') { + if (err && err.code !== 'ENOENT') { return cb(err) } var installVersion = parseInt(ver, 10) || 0 @@ -116,7 +100,7 @@ function install (fs, gyp, argv, callback) { go() } - function getContentSha(res, callback) { + function getContentSha (res, callback) { var shasum = crypto.createHash('sha256') res.on('data', function (chunk) { shasum.update(chunk) @@ -126,13 +110,12 @@ function install (fs, gyp, argv, callback) { } function go () { - log.verbose('ensuring nodedir is created', devDir) // first create the dir for the node dev files mkdir(devDir, function (err, created) { if (err) { - if (err.code == 'EACCES') { + if (err.code === 'EACCES') { eaccesFallback(err) } else { cb(err) @@ -147,42 +130,32 @@ function install (fs, gyp, argv, callback) { // now download the node tarball var tarPath = gyp.opts.tarball var badDownload = false - , extractCount = 0 - , gunzip = zlib.createGunzip() - , extracter = tar.Extract({ path: devDir, strip: 1, filter: isValid }) - + var extractCount = 0 var contentShasums = {} var expectShasums = {} // checks if a file to be extracted from the tarball is valid. // only .h header files and the gyp files get extracted - function isValid () { - var name = this.path.substring(devDir.length + 1) - var isValid = valid(name) - if (name === '' && this.type === 'Directory') { - // the first directory entry is ok - return true - } + function isValid (path) { + var isValid = valid(path) if (isValid) { - log.verbose('extracted file from tarball', name) + log.verbose('extracted file from tarball', path) extractCount++ } else { // invalid - log.silly('ignoring from tarball', name) + log.silly('ignoring from tarball', path) } return isValid } - gunzip.on('error', cb) - extracter.on('error', cb) - extracter.on('end', afterTarball) - - // download the tarball, gunzip and extract! - + // download the tarball and extract! if (tarPath) { - var input = fs.createReadStream(tarPath) - input.pipe(gunzip).pipe(extracter) - return + return tar.extract({ + file: tarPath, + strip: 1, + filter: isValid, + cwd: devDir + }).then(afterTarball, cb) } try { @@ -222,12 +195,18 @@ function install (fs, gyp, argv, callback) { }) // start unzipping and untaring - req.pipe(gunzip).pipe(extracter) + res.pipe(tar.extract({ + strip: 1, + cwd: devDir, + filter: isValid + }).on('close', afterTarball).on('error', cb)) }) // invoked after the tarball has finished being extracted function afterTarball () { - if (badDownload) return + if (badDownload) { + return + } if (extractCount === 0) { return cb(new Error('There was a fatal problem while downloading/extracting the tarball')) } @@ -245,8 +224,8 @@ function install (fs, gyp, argv, callback) { var installVersionPath = path.resolve(devDir, 'installVersion') fs.writeFile(installVersionPath, gyp.package.installVersion + '\n', deref) - // Only download SHASUMS.txt if not using tarPath override - if (!tarPath) { + // Only download SHASUMS.txt if we downloaded something in need of SHA verification + if (!tarPath || win) { // download SHASUMS.txt async++ downloadShasums(deref) @@ -258,7 +237,9 @@ function install (fs, gyp, argv, callback) { } function deref (err) { - if (err) return cb(err) + if (err) { + return cb(err) + } async-- if (!async) { @@ -276,10 +257,8 @@ function install (fs, gyp, argv, callback) { } } - function downloadShasums(done) { + function downloadShasums (done) { log.verbose('check download content checksum, need to download `SHASUMS256.txt`...') - var shasumsPath = path.resolve(devDir, 'SHASUMS256.txt') - log.verbose('checksum url', release.shasumsUrl) try { var req = download(gyp, process.env, release.shasumsUrl) @@ -302,7 +281,9 @@ function install (fs, gyp, argv, callback) { var lines = Buffer.concat(chunks).toString().trim().split('\n') lines.forEach(function (line) { var items = line.trim().split(/\s+/) - if (items.length !== 2) return + if (items.length !== 2) { + return + } // 0035d18e2dcf9aad669b1c7c07319e17abfe3762 ./node-v0.11.4.tar.gz var name = items[1].replace(/^\.\//, '') @@ -317,81 +298,58 @@ function install (fs, gyp, argv, callback) { function downloadNodeLib (done) { log.verbose('on Windows; need to download `' + release.name + '.lib`...') - var dir32 = path.resolve(devDir, 'ia32') - , dir64 = path.resolve(devDir, 'x64') - , libPath32 = path.resolve(dir32, release.name + '.lib') - , libPath64 = path.resolve(dir64, release.name + '.lib') - - log.verbose('32-bit ' + release.name + '.lib dir', dir32) - log.verbose('64-bit ' + release.name + '.lib dir', dir64) - log.verbose('`' + release.name + '.lib` 32-bit url', release.libUrl32) - log.verbose('`' + release.name + '.lib` 64-bit url', release.libUrl64) - - var async = 2 - mkdir(dir32, function (err) { - if (err) return done(err) - log.verbose('streaming 32-bit ' + release.name + '.lib to:', libPath32) - - try { - var req = download(gyp, process.env, release.libUrl32, cb) - } catch (e) { - return cb(e) - } - - req.on('error', done) - req.on('response', function (res) { - if (res.statusCode !== 200) { - done(new Error(res.statusCode + ' status code downloading 32-bit ' + release.name + '.lib')) - return + var archs = ['ia32', 'x64', 'arm64'] + var async = archs.length + archs.forEach(function (arch) { + var dir = path.resolve(devDir, arch) + var targetLibPath = path.resolve(dir, release.name + '.lib') + var libUrl = release[arch].libUrl + var libPath = release[arch].libPath + var name = arch + ' ' + release.name + '.lib' + log.verbose(name, 'dir', dir) + log.verbose(name, 'url', libUrl) + + mkdir(dir, function (err) { + if (err) { + return done(err) } + log.verbose('streaming', name, 'to:', targetLibPath) - getContentSha(res, function (_, checksum) { - contentShasums[release.libPath32] = checksum - log.verbose('content checksum', release.libPath32, checksum) - }) + try { + var req = download(gyp, process.env, libUrl, cb) + } catch (e) { + return cb(e) + } - var ws = fs.createWriteStream(libPath32) - ws.on('error', cb) - req.pipe(ws) - }) - req.on('end', function () { - --async || done() - }) - }) - mkdir(dir64, function (err) { - if (err) return done(err) - log.verbose('streaming 64-bit ' + release.name + '.lib to:', libPath64) - - try { - var req = download(gyp, process.env, release.libUrl64, cb) - } catch (e) { - return cb(e) - } + req.on('error', done) + req.on('response', function (res) { + if (res.statusCode === 403 || res.statusCode === 404) { + if (arch === 'arm64') { + // Arm64 is a newer platform on Windows and not all node distributions provide it. + log.verbose(`${name} was not found in ${libUrl}`) + } else { + log.warn(`${name} was not found in ${libUrl}`) + } + return + } else if (res.statusCode !== 200) { + done(new Error(res.statusCode + ' status code downloading ' + name)) + return + } - req.on('error', done) - req.on('response', function (res) { - if (res.statusCode !== 200) { - done(new Error(res.statusCode + ' status code downloading 64-bit ' + release.name + '.lib')) - return - } + getContentSha(res, function (_, checksum) { + contentShasums[libPath] = checksum + log.verbose('content checksum', libPath, checksum) + }) - getContentSha(res, function (_, checksum) { - contentShasums[release.libPath64] = checksum - log.verbose('content checksum', release.libPath64, checksum) + var ws = fs.createWriteStream(targetLibPath) + ws.on('error', cb) + req.pipe(ws) }) - - var ws = fs.createWriteStream(libPath64) - ws.on('error', cb) - req.pipe(ws) - }) - req.on('end', function () { - --async || done() + req.on('end', function () { --async || done() }) }) }) } // downloadNodeLib() - }) // mkdir() - } // go() /** @@ -400,8 +358,8 @@ function install (fs, gyp, argv, callback) { function valid (file) { // header files - var extname = path.extname(file); - return extname === '.h' || extname === '.gypi'; + var extname = path.extname(file) + return extname === '.h' || extname === '.gypi' } /** @@ -415,28 +373,35 @@ function install (fs, gyp, argv, callback) { function eaccesFallback (err) { var noretry = '--node_gyp_internal_noretry' - if (-1 !== argv.indexOf(noretry)) return cb(err) - var tmpdir = osenv.tmpdir() + if (argv.indexOf(noretry) !== -1) { + return cb(err) + } + var tmpdir = os.tmpdir() gyp.devDir = path.resolve(tmpdir, '.node-gyp') - log.warn('EACCES', 'user "%s" does not have permission to access the dev dir "%s"', osenv.user(), devDir) + var userString = '' + try { + // os.userInfo can fail on some systems, it's not critical here + userString = ` ("${os.userInfo().username}")` + } catch (e) {} + log.warn('EACCES', 'current user%s does not have permission to access the dev dir "%s"', userString, devDir) log.warn('EACCES', 'attempting to reinstall using temporary dev dir "%s"', gyp.devDir) - if (process.cwd() == tmpdir) { + if (process.cwd() === tmpdir) { log.verbose('tmpdir == cwd', 'automatically will remove dev files after to save disk space') gyp.todo.push({ name: 'remove', args: argv }) } gyp.commands.install([noretry].concat(argv), cb) } - } function download (gyp, env, url) { log.http('GET', url) var requestOpts = { - uri: url - , headers: { - 'User-Agent': 'node-gyp v' + gyp.version + ' (node ' + process.version + ')' - } + uri: url, + headers: { + 'User-Agent': 'node-gyp v' + gyp.version + ' (node ' + process.version + ')', + Connection: 'keep-alive' + } } var cafile = gyp.opts.cafile @@ -445,10 +410,10 @@ function download (gyp, env, url) { } // basic support for a proxy server - var proxyUrl = gyp.opts.proxy - || env.http_proxy - || env.HTTP_PROXY - || env.npm_config_proxy + var proxyUrl = gyp.opts.proxy || + env.http_proxy || + env.HTTP_PROXY || + env.npm_config_proxy if (proxyUrl) { if (/^https?:\/\//i.test(proxyUrl)) { log.verbose('download', 'using proxy url: "%s"', proxyUrl) @@ -473,3 +438,13 @@ function readCAFile (filename) { var re = /(-----BEGIN CERTIFICATE-----[\S\s]*?-----END CERTIFICATE-----)/g return ca.match(re) } + +module.exports = function (gyp, argv, callback) { + return install(fs, gyp, argv, callback) +} +module.exports.test = { + download: download, + install: install, + readCAFile: readCAFile +} +module.exports.usage = 'Install node development files for the specified node version.' diff --git a/node_modules/node-gyp/lib/list.js b/node_modules/node-gyp/lib/list.js index 9d680a56a434a..405ebc0d88959 100644 --- a/node_modules/node-gyp/lib/list.js +++ b/node_modules/node-gyp/lib/list.js @@ -1,33 +1,27 @@ +'use strict' -module.exports = exports = list - -exports.usage = 'Prints a listing of the currently installed node development files' - -/** - * Module dependencies. - */ - -var fs = require('graceful-fs') - , path = require('path') - , log = require('npmlog') +const fs = require('graceful-fs') +const log = require('npmlog') function list (gyp, args, callback) { - var devDir = gyp.devDir log.verbose('list', 'using node-gyp dir:', devDir) - // readdir() the node-gyp dir fs.readdir(devDir, onreaddir) function onreaddir (err, versions) { - if (err && err.code != 'ENOENT') { + if (err && err.code !== 'ENOENT') { return callback(err) } + if (Array.isArray(versions)) { - versions = versions.filter(function (v) { return v != 'current' }) + versions = versions.filter(function (v) { return v !== 'current' }) } else { versions = [] } callback(null, versions) } } + +module.exports = list +module.exports.usage = 'Prints a listing of the currently installed node development files' diff --git a/node_modules/node-gyp/lib/node-gyp.js b/node_modules/node-gyp/lib/node-gyp.js index a841161e32eec..9d24103900f70 100644 --- a/node_modules/node-gyp/lib/node-gyp.js +++ b/node_modules/node-gyp/lib/node-gyp.js @@ -1,44 +1,30 @@ - -/** - * Module exports. - */ - -module.exports = exports = gyp - -/** - * Module dependencies. - */ - -var fs = require('graceful-fs') - , path = require('path') - , nopt = require('nopt') - , log = require('npmlog') - , child_process = require('child_process') - , EE = require('events').EventEmitter - , inherits = require('util').inherits - , commands = [ - // Module build commands - 'build' - , 'clean' - , 'configure' - , 'rebuild' - // Development Header File management commands - , 'install' - , 'list' - , 'remove' - ] - , aliases = { - 'ls': 'list' - , 'rm': 'remove' - } +'use strict' + +const path = require('path') +const nopt = require('nopt') +const log = require('npmlog') +const childProcess = require('child_process') +const EE = require('events').EventEmitter +const inherits = require('util').inherits +const commands = [ + // Module build commands + 'build', + 'clean', + 'configure', + 'rebuild', + // Development Header File management commands + 'install', + 'list', + 'remove' +] +const aliases = { + ls: 'list', + rm: 'remove' +} // differentiate node-gyp's logs from npm's log.heading = 'gyp' -/** - * The `gyp` function. - */ - function gyp () { return new Gyp() } @@ -64,31 +50,31 @@ var proto = Gyp.prototype * Export the contents of the package.json. */ -proto.package = require('../package') +proto.package = require('../package.json') /** * nopt configuration definitions */ proto.configDefs = { - help: Boolean // everywhere - , arch: String // 'configure' - , cafile: String // 'install' - , debug: Boolean // 'build' - , directory: String // bin - , make: String // 'build' - , msvs_version: String // 'configure' - , ensure: Boolean // 'install' - , solution: String // 'build' (windows only) - , proxy: String // 'install' - , devdir: String // everywhere - , nodedir: String // 'configure' - , loglevel: String // everywhere - , python: String // 'configure' - , 'dist-url': String // 'install' - , 'tarball': String // 'install' - , jobs: String // 'build' - , thin: String // 'configure' + help: Boolean, // everywhere + arch: String, // 'configure' + cafile: String, // 'install' + debug: Boolean, // 'build' + directory: String, // bin + make: String, // 'build' + msvs_version: String, // 'configure' + ensure: Boolean, // 'install' + solution: String, // 'build' (windows only) + proxy: String, // 'install' + devdir: String, // everywhere + nodedir: String, // 'configure' + loglevel: String, // everywhere + python: String, // 'configure' + 'dist-url': String, // 'install' + tarball: String, // 'install' + jobs: String, // 'build' + thin: String // 'configure' } /** @@ -96,13 +82,13 @@ proto.configDefs = { */ proto.shorthands = { - release: '--no-debug' - , C: '--directory' - , debug: '--debug' - , j: '--jobs' - , silly: '--loglevel=silly' - , verbose: '--loglevel=verbose' - , silent: '--loglevel=silent' + release: '--no-debug', + C: '--directory', + debug: '--debug', + j: '--jobs', + silly: '--loglevel=silly', + verbose: '--loglevel=verbose', + silent: '--loglevel=silent' } /** @@ -147,18 +133,22 @@ proto.parseArgv = function parseOpts (argv) { } // support for inheriting config env variables from npm - var npm_config_prefix = 'npm_config_' + var npmConfigPrefix = 'npm_config_' Object.keys(process.env).forEach(function (name) { - if (name.indexOf(npm_config_prefix) !== 0) return + if (name.indexOf(npmConfigPrefix) !== 0) { + return + } var val = process.env[name] - if (name === npm_config_prefix + 'loglevel') { + if (name === npmConfigPrefix + 'loglevel') { log.level = val } else { // add the user-defined options to the config - name = name.substring(npm_config_prefix.length) + name = name.substring(npmConfigPrefix.length) // gyp@741b7f1 enters an infinite loop when it encounters // zero-length options so ensure those don't get through. - if (name) this.opts[name] = val + if (name) { + this.opts[name] = val + } } }, this) @@ -173,11 +163,13 @@ proto.parseArgv = function parseOpts (argv) { */ proto.spawn = function spawn (command, args, opts) { - if (!opts) opts = {} + if (!opts) { + opts = {} + } if (!opts.silent && !opts.stdio) { - opts.stdio = [ 0, 1, 2 ] + opts.stdio = [0, 1, 2] } - var cp = child_process.spawn(command, args, opts) + var cp = childProcess.spawn(command, args, opts) log.info('spawn', command) log.info('spawn args', args) return cp @@ -189,16 +181,16 @@ proto.spawn = function spawn (command, args, opts) { proto.usage = function usage () { var str = [ - '' - , ' Usage: node-gyp [options]' - , '' - , ' where is one of:' - , commands.map(function (c) { - return ' - ' + c + ' - ' + require('./' + c).usage - }).join('\n') - , '' - , 'node-gyp@' + this.version + ' ' + path.resolve(__dirname, '..') - , 'node@' + process.versions.node + '', + ' Usage: node-gyp [options]', + '', + ' where is one of:', + commands.map(function (c) { + return ' - ' + c + ' - ' + require('./' + c).usage + }).join('\n'), + '', + 'node-gyp@' + this.version + ' ' + path.resolve(__dirname, '..'), + 'node@' + process.versions.node ].join('\n') return str } @@ -208,9 +200,10 @@ proto.usage = function usage () { */ Object.defineProperty(proto, 'version', { - get: function () { - return this.package.version - } - , enumerable: true + get: function () { + return this.package.version + }, + enumerable: true }) +module.exports = exports = gyp diff --git a/node_modules/node-gyp/lib/process-release.js b/node_modules/node-gyp/lib/process-release.js index 0d177f1c93e4d..95b55e4426dee 100644 --- a/node_modules/node-gyp/lib/process-release.js +++ b/node_modules/node-gyp/lib/process-release.js @@ -1,30 +1,35 @@ -var semver = require('semver') - , url = require('url') - , path = require('path') - , log = require('npmlog') - - // versions where -headers.tar.gz started shipping - , headersTarballRange = '>= 3.0.0 || ~0.12.10 || ~0.10.42' - , bitsre = /\/win-(x86|x64)\// - , bitsreV3 = /\/win-(x86|ia32|x64)\// // io.js v3.x.x shipped with "ia32" but should - // have been "x86" - -// Captures all the logic required to determine download URLs, local directory and +/* eslint-disable node/no-deprecated-api */ + +'use strict' + +const semver = require('semver') +const url = require('url') +const path = require('path') +const log = require('npmlog') + +// versions where -headers.tar.gz started shipping +const headersTarballRange = '>= 3.0.0 || ~0.12.10 || ~0.10.42' +const bitsre = /\/win-(x86|x64|arm64)\// +const bitsreV3 = /\/win-(x86|ia32|x64)\// // io.js v3.x.x shipped with "ia32" but should +// have been "x86" + +// Captures all the logic required to determine download URLs, local directory and // file names. Inputs come from command-line switches (--target, --dist-url), // `process.version` and `process.release` where it exists. function processRelease (argv, gyp, defaultVersion, defaultRelease) { var version = (semver.valid(argv[0]) && argv[0]) || gyp.opts.target || defaultVersion - , versionSemver = semver.parse(version) - , overrideDistUrl = gyp.opts['dist-url'] || gyp.opts.disturl - , isDefaultVersion - , isIojs - , name - , distBaseUrl - , baseUrl - , libUrl32 - , libUrl64 - , tarballUrl - , canGetHeaders + var versionSemver = semver.parse(version) + var overrideDistUrl = gyp.opts['dist-url'] || gyp.opts.disturl + var isDefaultVersion + var isNamedForLegacyIojs + var name + var distBaseUrl + var baseUrl + var libUrl32 + var libUrl64 + var libUrlArm64 + var tarballUrl + var canGetHeaders if (!versionSemver) { // not a valid semver string, nothing we can do @@ -37,50 +42,39 @@ function processRelease (argv, gyp, defaultVersion, defaultRelease) { isDefaultVersion = version === semver.parse(defaultVersion).version // can't use process.release if we're using --target=x.y.z - if (!isDefaultVersion) + if (!isDefaultVersion) { defaultRelease = null + } if (defaultRelease) { // v3 onward, has process.release name = defaultRelease.name.replace(/io\.js/, 'iojs') // remove the '.' for directory naming purposes - isIojs = name === 'iojs' } else { // old node or alternative --target= // semver.satisfies() doesn't like prerelease tags so test major directly - isIojs = versionSemver.major >= 1 && versionSemver.major < 4 - name = isIojs ? 'iojs' : 'node' + isNamedForLegacyIojs = versionSemver.major >= 1 && versionSemver.major < 4 + // isNamedForLegacyIojs is required to support Electron < 4 (in particular Electron 3) + // as previously this logic was used to ensure "iojs" was used to download iojs releases + // and "node" for node releases. Unfortunately the logic was broad enough that electron@3 + // published release assets as "iojs" so that the node-gyp logic worked. Once Electron@3 has + // been EOL for a while (late 2019) we should remove this hack. + name = isNamedForLegacyIojs ? 'iojs' : 'node' } // check for the nvm.sh standard mirror env variables - if (!overrideDistUrl) { - if (isIojs) { - if (process.env.IOJS_ORG_MIRROR) { - overrideDistUrl = process.env.IOJS_ORG_MIRROR - } else if (process.env.NVM_IOJS_ORG_MIRROR) {// remove on next semver-major - overrideDistUrl = process.env.NVM_IOJS_ORG_MIRROR - log.warn('download', - 'NVM_IOJS_ORG_MIRROR is deprecated and will be removed in node-gyp v4, ' + - 'please use IOJS_ORG_MIRROR') - } - } else { - if (process.env.NODEJS_ORG_MIRROR) { - overrideDistUrl = process.env.NODEJS_ORG_MIRROR - } else if (process.env.NVM_NODEJS_ORG_MIRROR) {// remove on next semver-major - overrideDistUrl = process.env.NVM_NODEJS_ORG_MIRROR - log.warn('download', - 'NVM_NODEJS_ORG_MIRROR is deprecated and will be removed in node-gyp v4, ' + - 'please use NODEJS_ORG_MIRROR') - } - } + if (!overrideDistUrl && process.env.NODEJS_ORG_MIRROR) { + overrideDistUrl = process.env.NODEJS_ORG_MIRROR } - if (overrideDistUrl) + if (overrideDistUrl) { log.verbose('download', 'using dist-url', overrideDistUrl) + } - if (overrideDistUrl) + if (overrideDistUrl) { distBaseUrl = overrideDistUrl.replace(/\/+$/, '') - else - distBaseUrl = isIojs ? 'https://iojs.org/download/release' : 'https://nodejs.org/dist' + } else { + distBaseUrl = 'https://nodejs.org/dist' + } distBaseUrl += '/v' + version + '/' // new style, based on process.release so we have a lot of the data we need @@ -88,35 +82,24 @@ function processRelease (argv, gyp, defaultVersion, defaultRelease) { baseUrl = url.resolve(defaultRelease.headersUrl, './') libUrl32 = resolveLibUrl(name, defaultRelease.libUrl || baseUrl || distBaseUrl, 'x86', versionSemver.major) libUrl64 = resolveLibUrl(name, defaultRelease.libUrl || baseUrl || distBaseUrl, 'x64', versionSemver.major) - - return { - version: version, - semver: versionSemver, - name: name, - baseUrl: baseUrl, - tarballUrl: defaultRelease.headersUrl, - shasumsUrl: url.resolve(baseUrl, 'SHASUMS256.txt'), - versionDir: (name !== 'node' ? name + '-' : '') + version, - libUrl32: libUrl32, - libUrl64: libUrl64, - libPath32: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl32).path)), - libPath64: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl64).path)) - } + libUrlArm64 = resolveLibUrl(name, defaultRelease.libUrl || baseUrl || distBaseUrl, 'arm64', versionSemver.major) + tarballUrl = defaultRelease.headersUrl + } else { + // older versions without process.release are captured here and we have to make + // a lot of assumptions, additionally if you --target=x.y.z then we can't use the + // current process.release + baseUrl = distBaseUrl + libUrl32 = resolveLibUrl(name, baseUrl, 'x86', versionSemver.major) + libUrl64 = resolveLibUrl(name, baseUrl, 'x64', versionSemver.major) + libUrlArm64 = resolveLibUrl(name, baseUrl, 'arm64', versionSemver.major) + + // making the bold assumption that anything with a version number >3.0.0 will + // have a *-headers.tar.gz file in its dist location, even some frankenstein + // custom version + canGetHeaders = semver.satisfies(versionSemver, headersTarballRange) + tarballUrl = url.resolve(baseUrl, name + '-v' + version + (canGetHeaders ? '-headers' : '') + '.tar.gz') } - // older versions without process.release are captured here and we have to make - // a lot of assumptions, additionally if you --target=x.y.z then we can't use the - // current process.release - - baseUrl = distBaseUrl - libUrl32 = resolveLibUrl(name, baseUrl, 'x86', versionSemver.major) - libUrl64 = resolveLibUrl(name, baseUrl, 'x64', versionSemver.major) - // making the bold assumption that anything with a version number >3.0.0 will - // have a *-headers.tar.gz file in its dist location, even some frankenstein - // custom version - canGetHeaders = semver.satisfies(versionSemver, headersTarballRange) - tarballUrl = url.resolve(baseUrl, name + '-v' + version + (canGetHeaders ? '-headers' : '') + '.tar.gz') - return { version: version, semver: versionSemver, @@ -125,10 +108,18 @@ function processRelease (argv, gyp, defaultVersion, defaultRelease) { tarballUrl: tarballUrl, shasumsUrl: url.resolve(baseUrl, 'SHASUMS256.txt'), versionDir: (name !== 'node' ? name + '-' : '') + version, - libUrl32: libUrl32, - libUrl64: libUrl64, - libPath32: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl32).path)), - libPath64: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl64).path)) + ia32: { + libUrl: libUrl32, + libPath: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl32).path)) + }, + x64: { + libUrl: libUrl64, + libPath: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl64).path)) + }, + arm64: { + libUrl: libUrlArm64, + libPath: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrlArm64).path)) + } } } @@ -138,14 +129,15 @@ function normalizePath (p) { function resolveLibUrl (name, defaultUrl, arch, versionMajor) { var base = url.resolve(defaultUrl, './') - , hasLibUrl = bitsre.test(defaultUrl) || (versionMajor === 3 && bitsreV3.test(defaultUrl)) + var hasLibUrl = bitsre.test(defaultUrl) || (versionMajor === 3 && bitsreV3.test(defaultUrl)) if (!hasLibUrl) { // let's assume it's a baseUrl then - if (versionMajor >= 1) - return url.resolve(base, 'win-' + arch +'/' + name + '.lib') + if (versionMajor >= 1) { + return url.resolve(base, 'win-' + arch + '/' + name + '.lib') + } // prior to io.js@1.0.0 32-bit node.lib lives in /, 64-bit lives in /x64/ - return url.resolve(base, (arch === 'x64' ? 'x64/' : '') + name + '.lib') + return url.resolve(base, (arch === 'x86' ? '' : arch + '/') + name + '.lib') } // else we have a proper url to a .lib, just make sure it's the right arch diff --git a/node_modules/node-gyp/lib/rebuild.js b/node_modules/node-gyp/lib/rebuild.js index 4c6f472aa7c07..a1c5b27cbe56a 100644 --- a/node_modules/node-gyp/lib/rebuild.js +++ b/node_modules/node-gyp/lib/rebuild.js @@ -1,14 +1,13 @@ - -module.exports = exports = rebuild - -exports.usage = 'Runs "clean", "configure" and "build" all at once' +'use strict' function rebuild (gyp, argv, callback) { - gyp.todo.push( - { name: 'clean', args: [] } + { name: 'clean', args: [] } , { name: 'configure', args: argv } , { name: 'build', args: [] } ) process.nextTick(callback) } + +module.exports = rebuild +module.exports.usage = 'Runs "clean", "configure" and "build" all at once' diff --git a/node_modules/node-gyp/lib/remove.js b/node_modules/node-gyp/lib/remove.js index eb80981b88feb..8c945e5659001 100644 --- a/node_modules/node-gyp/lib/remove.js +++ b/node_modules/node-gyp/lib/remove.js @@ -1,20 +1,12 @@ +'use strict' -module.exports = exports = remove - -exports.usage = 'Removes the node development files for the specified version' - -/** - * Module dependencies. - */ - -var fs = require('fs') - , rm = require('rimraf') - , path = require('path') - , log = require('npmlog') - , semver = require('semver') +const fs = require('fs') +const rm = require('rimraf') +const path = require('path') +const log = require('npmlog') +const semver = require('semver') function remove (gyp, argv, callback) { - var devDir = gyp.devDir log.verbose('remove', 'using node-gyp dir:', devDir) @@ -36,9 +28,9 @@ function remove (gyp, argv, callback) { log.verbose('remove', 'removing development files for version:', version) // first check if its even installed - fs.stat(versionPath, function (err, stat) { + fs.stat(versionPath, function (err) { if (err) { - if (err.code == 'ENOENT') { + if (err.code === 'ENOENT') { callback(null, 'version was already uninstalled: ' + version) } else { callback(err) @@ -48,5 +40,7 @@ function remove (gyp, argv, callback) { // Go ahead and delete the dir rm(versionPath, callback) }) - } + +module.exports = exports = remove +module.exports.usage = 'Removes the node development files for the specified version' diff --git a/node_modules/node-gyp/lib/util.js b/node_modules/node-gyp/lib/util.js new file mode 100644 index 0000000000000..3e23c628e6ad7 --- /dev/null +++ b/node_modules/node-gyp/lib/util.js @@ -0,0 +1,64 @@ +'use strict' + +const log = require('npmlog') +const execFile = require('child_process').execFile +const path = require('path') + +function logWithPrefix (log, prefix) { + function setPrefix (logFunction) { + return (...args) => logFunction.apply(null, [ prefix, ...args ]) // eslint-disable-line + } + return { + silly: setPrefix(log.silly), + verbose: setPrefix(log.verbose), + info: setPrefix(log.info), + warn: setPrefix(log.warn), + error: setPrefix(log.error) + } +} + +function regGetValue (key, value, addOpts, cb) { + const outReValue = value.replace(/\W/g, '.') + const outRe = new RegExp(`^\\s+${outReValue}\\s+REG_\\w+\\s+(\\S.*)$`, 'im') + const reg = path.join(process.env.SystemRoot, 'System32', 'reg.exe') + const regArgs = ['query', key, '/v', value].concat(addOpts) + + log.silly('reg', 'running', reg, regArgs) + const child = execFile(reg, regArgs, { encoding: 'utf8' }, + function (err, stdout, stderr) { + log.silly('reg', 'reg.exe stdout = %j', stdout) + if (err || stderr.trim() !== '') { + log.silly('reg', 'reg.exe err = %j', err && (err.stack || err)) + log.silly('reg', 'reg.exe stderr = %j', stderr) + return cb(err, stderr) + } + + const result = outRe.exec(stdout) + if (!result) { + log.silly('reg', 'error parsing stdout') + return cb(new Error('Could not parse output of reg.exe')) + } + log.silly('reg', 'found: %j', result[1]) + cb(null, result[1]) + }) + child.stdin.end() +} + +function regSearchKeys (keys, value, addOpts, cb) { + var i = 0 + const search = () => { + log.silly('reg-search', 'looking for %j in %j', value, keys[i]) + regGetValue(keys[i], value, addOpts, (err, res) => { + ++i + if (err && i < keys.length) { return search() } + cb(err, res) + }) + } + search() +} + +module.exports = { + logWithPrefix: logWithPrefix, + regGetValue: regGetValue, + regSearchKeys: regSearchKeys +} diff --git a/node_modules/node-gyp/macOS_Catalina.md b/node_modules/node-gyp/macOS_Catalina.md new file mode 100644 index 0000000000000..5f02face57d96 --- /dev/null +++ b/node_modules/node-gyp/macOS_Catalina.md @@ -0,0 +1,74 @@ +# Installation notes for macOS Catalina (v10.15) + +_This document specifically refers to upgrades from previous versions of macOS to Catalina (10.15). It should be removed from the source repository when Catalina ceases to be the latest macOS version or updated to deal with challenges involved in upgrades to the next version of macOS._ + +Lessons learned from: +* https://github.com/nodejs/node-gyp/issues/1779 +* https://github.com/nodejs/node-gyp/issues/1861 +* https://github.com/nodejs/node-gyp/issues/1927 and elsewhere + +Installing `node-gyp` on macOS can be found at https://github.com/nodejs/node-gyp#on-macos + +However, upgrading to macOS Catalina changes some settings that may cause normal `node-gyp` installations to fail. + +### Is my Mac running macOS Catalina? +Let's make first make sure that your Mac is currently running Catalina: +% `sw_vers` + ProductName: Mac OS X + ProductVersion: 10.15 + BuildVersion: 19A602 + +If `ProductVersion` is less then `10.15` then this document is not really for you. + +### The acid test +Next, lets see if `Xcode Command Line Tools` are installed: +1. `/usr/sbin/pkgutil --packages | grep CL` + * If nothing is listed, then [skip to the next section](#Two-roads). + * If `com.apple.pkg.CLTools_Executables` is listed then try: +2. `/usr/sbin/pkgutil --pkg-info com.apple.pkg.CLTools_Executables` + * If `version: 11.0.0` or later is listed then _you are done_! Your Mac should be ready to install `node-gyp`. Doing `clang -v` should show `Apple clang version 11.0.0` or later. + +As you go through the remainder of this document, at anytime you can try these `acid test` commands. If they pass then your Mac should be ready to install `node-gyp`. + +### Two roads +There are two main ways to install `node-gyp` on macOS: +1. With the full Xcode (~7.6 GB download) from the `App Store` app. +2. With the _much_ smaller Xcode Command Line Tools via `xcode-select --install` + +### Installing `node-gyp` using the full Xcode +1. `xcodebuild -version` should show `Xcode 11.1` or later. + * If not, then install/upgrade Xcode from the App Store app. +2. Open the Xcode app and... + * Under __Preferences > Locations__ select the tools if their location is empty. + * Allow Xcode app to do an essential install of the most recent compiler tools. +3. Once all installations are _complete_, quit out of Xcode. +4. `sudo xcodebuild -license accept` # If you agree with the licensing terms. +5. `softwareupdate -l` # No listing is a good sign. + * If Xcode or Tools upgrades are listed, use "Software Upgrade" to install them. +6. `xcode-select -version` # Should return `xcode-select version 2370` or later. +7. `xcode-select -print-path` # Should return `/Applications/Xcode.app/Contents/Developer` +8. Try the [_acid test_ steps above](#The-acid-test) to see if your Mac is ready. +9. If the _acid test_ does _not_ pass then... +10. `sudo xcode-select --reset` # Enter root password. No output is normal. +11. Repeat step 7 above. Is the path different this time? Repeat the _acid test_. + +### Installing `node-gyp` using the Xcode Command Line Tools +1. If the _acid test_ has not succeeded, then try `xcode-select --install` +2. Wait until the install process is _complete_. +3. `softwareupdate -l` # No listing is a good sign. + * If Xcode or Tools upgrades are listed, use "Software Update" to install them. +4. `xcode-select -version` # Should return `xcode-select version 2370` or later. +5. `xcode-select -print-path` # Should return `/Library/Developer/CommandLineTools` +6. Try the [_acid test_ steps above](#The-acid-test) to see if your Mac is ready. +7. If the _acid test_ does _not_ pass then... +8. `sudo xcode-select --reset` # Enter root password. No output is normal. +9. Repeat step 5 above. Is the path different this time? Repeat the _acid test_. + +### I did all that and the acid test still does not pass :-( +1. `sudo rm -rf $(xcode-select -print-path)` # Enter root password. No output is normal. +2. `xcode-select --install` +3. If the [_acid test_](#The-acid-test) still does _not_ pass then... +4. `npm explore npm -g -- npm install node-gyp@latest` +5. `npm explore npm -g -- npm explore npm-lifecycle -- npm install node-gyp@latest` +6. If the _acid test_ still does _not_ pass then... +7. Add a comment to https://github.com/nodejs/node-gyp/issues/1927 so we can improve. diff --git a/node_modules/node-gyp/node_modules/.bin/nopt b/node_modules/node-gyp/node_modules/.bin/nopt deleted file mode 120000 index 6b6566ea7febb..0000000000000 --- a/node_modules/node-gyp/node_modules/.bin/nopt +++ /dev/null @@ -1 +0,0 @@ -../nopt/bin/nopt.js \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/.bin/semver b/node_modules/node-gyp/node_modules/.bin/semver deleted file mode 120000 index 317eb293d8e12..0000000000000 --- a/node_modules/node-gyp/node_modules/.bin/semver +++ /dev/null @@ -1 +0,0 @@ -../semver/bin/semver \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/nopt/.travis.yml b/node_modules/node-gyp/node_modules/nopt/.travis.yml deleted file mode 100644 index 99f2bbf5068a8..0000000000000 --- a/node_modules/node-gyp/node_modules/nopt/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -language: node_js -language: node_js -node_js: - - '0.8' - - '0.10' - - '0.12' - - 'iojs' -before_install: - - npm install -g npm@latest diff --git a/node_modules/node-gyp/node_modules/nopt/LICENSE b/node_modules/node-gyp/node_modules/nopt/LICENSE deleted file mode 100644 index 19129e315fe59..0000000000000 --- a/node_modules/node-gyp/node_modules/nopt/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/nopt/README.md b/node_modules/node-gyp/node_modules/nopt/README.md deleted file mode 100644 index f21a4b31c5d48..0000000000000 --- a/node_modules/node-gyp/node_modules/nopt/README.md +++ /dev/null @@ -1,211 +0,0 @@ -If you want to write an option parser, and have it be good, there are -two ways to do it. The Right Way, and the Wrong Way. - -The Wrong Way is to sit down and write an option parser. We've all done -that. - -The Right Way is to write some complex configurable program with so many -options that you hit the limit of your frustration just trying to -manage them all, and defer it with duct-tape solutions until you see -exactly to the core of the problem, and finally snap and write an -awesome option parser. - -If you want to write an option parser, don't write an option parser. -Write a package manager, or a source control system, or a service -restarter, or an operating system. You probably won't end up with a -good one of those, but if you don't give up, and you are relentless and -diligent enough in your procrastination, you may just end up with a very -nice option parser. - -## USAGE - - // my-program.js - var nopt = require("nopt") - , Stream = require("stream").Stream - , path = require("path") - , knownOpts = { "foo" : [String, null] - , "bar" : [Stream, Number] - , "baz" : path - , "bloo" : [ "big", "medium", "small" ] - , "flag" : Boolean - , "pick" : Boolean - , "many1" : [String, Array] - , "many2" : [path] - } - , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] - , "b7" : ["--bar", "7"] - , "m" : ["--bloo", "medium"] - , "p" : ["--pick"] - , "f" : ["--flag"] - } - // everything is optional. - // knownOpts and shorthands default to {} - // arg list defaults to process.argv - // slice defaults to 2 - , parsed = nopt(knownOpts, shortHands, process.argv, 2) - console.log(parsed) - -This would give you support for any of the following: - -```bash -$ node my-program.js --foo "blerp" --no-flag -{ "foo" : "blerp", "flag" : false } - -$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag -{ bar: 7, foo: "Mr. Hand", flag: true } - -$ node my-program.js --foo "blerp" -f -----p -{ foo: "blerp", flag: true, pick: true } - -$ node my-program.js -fp --foofoo -{ foo: "Mr. Foo", flag: true, pick: true } - -$ node my-program.js --foofoo -- -fp # -- stops the flag parsing. -{ foo: "Mr. Foo", argv: { remain: ["-fp"] } } - -$ node my-program.js --blatzk -fp # unknown opts are ok. -{ blatzk: true, flag: true, pick: true } - -$ node my-program.js --blatzk=1000 -fp # but you need to use = if they have a value -{ blatzk: 1000, flag: true, pick: true } - -$ node my-program.js --no-blatzk -fp # unless they start with "no-" -{ blatzk: false, flag: true, pick: true } - -$ node my-program.js --baz b/a/z # known paths are resolved. -{ baz: "/Users/isaacs/b/a/z" } - -# if Array is one of the types, then it can take many -# values, and will always be an array. The other types provided -# specify what types are allowed in the list. - -$ node my-program.js --many1 5 --many1 null --many1 foo -{ many1: ["5", "null", "foo"] } - -$ node my-program.js --many2 foo --many2 bar -{ many2: ["/path/to/foo", "path/to/bar"] } -``` - -Read the tests at the bottom of `lib/nopt.js` for more examples of -what this puppy can do. - -## Types - -The following types are supported, and defined on `nopt.typeDefs` - -* String: A normal string. No parsing is done. -* path: A file system path. Gets resolved against cwd if not absolute. -* url: A url. If it doesn't parse, it isn't accepted. -* Number: Must be numeric. -* Date: Must parse as a date. If it does, and `Date` is one of the options, - then it will return a Date object, not a string. -* Boolean: Must be either `true` or `false`. If an option is a boolean, - then it does not need a value, and its presence will imply `true` as - the value. To negate boolean flags, do `--no-whatever` or `--whatever - false` -* NaN: Means that the option is strictly not allowed. Any value will - fail. -* Stream: An object matching the "Stream" class in node. Valuable - for use when validating programmatically. (npm uses this to let you - supply any WriteStream on the `outfd` and `logfd` config options.) -* Array: If `Array` is specified as one of the types, then the value - will be parsed as a list of options. This means that multiple values - can be specified, and that the value will always be an array. - -If a type is an array of values not on this list, then those are -considered valid values. For instance, in the example above, the -`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`, -and any other value will be rejected. - -When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be -interpreted as their JavaScript equivalents. - -You can also mix types and values, or multiple types, in a list. For -instance `{ blah: [Number, null] }` would allow a value to be set to -either a Number or null. When types are ordered, this implies a -preference, and the first type that can be used to properly interpret -the value will be used. - -To define a new type, add it to `nopt.typeDefs`. Each item in that -hash is an object with a `type` member and a `validate` method. The -`type` member is an object that matches what goes in the type list. The -`validate` method is a function that gets called with `validate(data, -key, val)`. Validate methods should assign `data[key]` to the valid -value of `val` if it can be handled properly, or return boolean -`false` if it cannot. - -You can also call `nopt.clean(data, types, typeDefs)` to clean up a -config object and remove its invalid properties. - -## Error Handling - -By default, nopt outputs a warning to standard error when invalid values for -known options are found. You can change this behavior by assigning a method -to `nopt.invalidHandler`. This method will be called with -the offending `nopt.invalidHandler(key, val, types)`. - -If no `nopt.invalidHandler` is assigned, then it will console.error -its whining. If it is assigned to boolean `false` then the warning is -suppressed. - -## Abbreviations - -Yes, they are supported. If you define options like this: - -```javascript -{ "foolhardyelephants" : Boolean -, "pileofmonkeys" : Boolean } -``` - -Then this will work: - -```bash -node program.js --foolhar --pil -node program.js --no-f --pileofmon -# etc. -``` - -## Shorthands - -Shorthands are a hash of shorter option names to a snippet of args that -they expand to. - -If multiple one-character shorthands are all combined, and the -combination does not unambiguously match any other option or shorthand, -then they will be broken up into their constituent parts. For example: - -```json -{ "s" : ["--loglevel", "silent"] -, "g" : "--global" -, "f" : "--force" -, "p" : "--parseable" -, "l" : "--long" -} -``` - -```bash -npm ls -sgflp -# just like doing this: -npm ls --loglevel silent --global --force --long --parseable -``` - -## The Rest of the args - -The config object returned by nopt is given a special member called -`argv`, which is an object with the following fields: - -* `remain`: The remaining args after all the parsing has occurred. -* `original`: The args as they originally appeared. -* `cooked`: The args after flags and shorthands are expanded. - -## Slicing - -Node programs are called with more or less the exact argv as it appears -in C land, after the v8 and node-specific options have been plucked off. -As such, `argv[0]` is always `node` and `argv[1]` is always the -JavaScript program being run. - -That's usually not very useful to you. So they're sliced off by -default. If you want them, then you can pass in `0` as the last -argument, or any other number that you'd like to slice off the start of -the list. diff --git a/node_modules/node-gyp/node_modules/nopt/bin/nopt.js b/node_modules/node-gyp/node_modules/nopt/bin/nopt.js deleted file mode 100755 index 3232d4c570fdc..0000000000000 --- a/node_modules/node-gyp/node_modules/nopt/bin/nopt.js +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env node -var nopt = require("../lib/nopt") - , path = require("path") - , types = { num: Number - , bool: Boolean - , help: Boolean - , list: Array - , "num-list": [Number, Array] - , "str-list": [String, Array] - , "bool-list": [Boolean, Array] - , str: String - , clear: Boolean - , config: Boolean - , length: Number - , file: path - } - , shorthands = { s: [ "--str", "astring" ] - , b: [ "--bool" ] - , nb: [ "--no-bool" ] - , tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ] - , "?": ["--help"] - , h: ["--help"] - , H: ["--help"] - , n: [ "--num", "125" ] - , c: ["--config"] - , l: ["--length"] - , f: ["--file"] - } - , parsed = nopt( types - , shorthands - , process.argv - , 2 ) - -console.log("parsed", parsed) - -if (parsed.help) { - console.log("") - console.log("nopt cli tester") - console.log("") - console.log("types") - console.log(Object.keys(types).map(function M (t) { - var type = types[t] - if (Array.isArray(type)) { - return [t, type.map(function (type) { return type.name })] - } - return [t, type && type.name] - }).reduce(function (s, i) { - s[i[0]] = i[1] - return s - }, {})) - console.log("") - console.log("shorthands") - console.log(shorthands) -} diff --git a/node_modules/node-gyp/node_modules/nopt/examples/my-program.js b/node_modules/node-gyp/node_modules/nopt/examples/my-program.js deleted file mode 100755 index 142447e18e756..0000000000000 --- a/node_modules/node-gyp/node_modules/nopt/examples/my-program.js +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env node - -//process.env.DEBUG_NOPT = 1 - -// my-program.js -var nopt = require("../lib/nopt") - , Stream = require("stream").Stream - , path = require("path") - , knownOpts = { "foo" : [String, null] - , "bar" : [Stream, Number] - , "baz" : path - , "bloo" : [ "big", "medium", "small" ] - , "flag" : Boolean - , "pick" : Boolean - } - , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] - , "b7" : ["--bar", "7"] - , "m" : ["--bloo", "medium"] - , "p" : ["--pick"] - , "f" : ["--flag", "true"] - , "g" : ["--flag"] - , "s" : "--flag" - } - // everything is optional. - // knownOpts and shorthands default to {} - // arg list defaults to process.argv - // slice defaults to 2 - , parsed = nopt(knownOpts, shortHands, process.argv, 2) - -console.log("parsed =\n"+ require("util").inspect(parsed)) diff --git a/node_modules/node-gyp/node_modules/nopt/lib/nopt.js b/node_modules/node-gyp/node_modules/nopt/lib/nopt.js deleted file mode 100644 index 97707e784243b..0000000000000 --- a/node_modules/node-gyp/node_modules/nopt/lib/nopt.js +++ /dev/null @@ -1,415 +0,0 @@ -// info about each config option. - -var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG - ? function () { console.error.apply(console, arguments) } - : function () {} - -var url = require("url") - , path = require("path") - , Stream = require("stream").Stream - , abbrev = require("abbrev") - -module.exports = exports = nopt -exports.clean = clean - -exports.typeDefs = - { String : { type: String, validate: validateString } - , Boolean : { type: Boolean, validate: validateBoolean } - , url : { type: url, validate: validateUrl } - , Number : { type: Number, validate: validateNumber } - , path : { type: path, validate: validatePath } - , Stream : { type: Stream, validate: validateStream } - , Date : { type: Date, validate: validateDate } - } - -function nopt (types, shorthands, args, slice) { - args = args || process.argv - types = types || {} - shorthands = shorthands || {} - if (typeof slice !== "number") slice = 2 - - debug(types, shorthands, args, slice) - - args = args.slice(slice) - var data = {} - , key - , remain = [] - , cooked = args - , original = args.slice(0) - - parse(args, data, remain, types, shorthands) - // now data is full - clean(data, types, exports.typeDefs) - data.argv = {remain:remain,cooked:cooked,original:original} - Object.defineProperty(data.argv, 'toString', { value: function () { - return this.original.map(JSON.stringify).join(" ") - }, enumerable: false }) - return data -} - -function clean (data, types, typeDefs) { - typeDefs = typeDefs || exports.typeDefs - var remove = {} - , typeDefault = [false, true, null, String, Array] - - Object.keys(data).forEach(function (k) { - if (k === "argv") return - var val = data[k] - , isArray = Array.isArray(val) - , type = types[k] - if (!isArray) val = [val] - if (!type) type = typeDefault - if (type === Array) type = typeDefault.concat(Array) - if (!Array.isArray(type)) type = [type] - - debug("val=%j", val) - debug("types=", type) - val = val.map(function (val) { - // if it's an unknown value, then parse false/true/null/numbers/dates - if (typeof val === "string") { - debug("string %j", val) - val = val.trim() - if ((val === "null" && ~type.indexOf(null)) - || (val === "true" && - (~type.indexOf(true) || ~type.indexOf(Boolean))) - || (val === "false" && - (~type.indexOf(false) || ~type.indexOf(Boolean)))) { - val = JSON.parse(val) - debug("jsonable %j", val) - } else if (~type.indexOf(Number) && !isNaN(val)) { - debug("convert to number", val) - val = +val - } else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) { - debug("convert to date", val) - val = new Date(val) - } - } - - if (!types.hasOwnProperty(k)) { - return val - } - - // allow `--no-blah` to set 'blah' to null if null is allowed - if (val === false && ~type.indexOf(null) && - !(~type.indexOf(false) || ~type.indexOf(Boolean))) { - val = null - } - - var d = {} - d[k] = val - debug("prevalidated val", d, val, types[k]) - if (!validate(d, k, val, types[k], typeDefs)) { - if (exports.invalidHandler) { - exports.invalidHandler(k, val, types[k], data) - } else if (exports.invalidHandler !== false) { - debug("invalid: "+k+"="+val, types[k]) - } - return remove - } - debug("validated val", d, val, types[k]) - return d[k] - }).filter(function (val) { return val !== remove }) - - if (!val.length) delete data[k] - else if (isArray) { - debug(isArray, data[k], val) - data[k] = val - } else data[k] = val[0] - - debug("k=%s val=%j", k, val, data[k]) - }) -} - -function validateString (data, k, val) { - data[k] = String(val) -} - -function validatePath (data, k, val) { - if (val === true) return false - if (val === null) return true - - val = String(val) - var homePattern = process.platform === 'win32' ? /^~(\/|\\)/ : /^~\// - if (val.match(homePattern) && process.env.HOME) { - val = path.resolve(process.env.HOME, val.substr(2)) - } - data[k] = path.resolve(String(val)) - return true -} - -function validateNumber (data, k, val) { - debug("validate Number %j %j %j", k, val, isNaN(val)) - if (isNaN(val)) return false - data[k] = +val -} - -function validateDate (data, k, val) { - debug("validate Date %j %j %j", k, val, Date.parse(val)) - var s = Date.parse(val) - if (isNaN(s)) return false - data[k] = new Date(val) -} - -function validateBoolean (data, k, val) { - if (val instanceof Boolean) val = val.valueOf() - else if (typeof val === "string") { - if (!isNaN(val)) val = !!(+val) - else if (val === "null" || val === "false") val = false - else val = true - } else val = !!val - data[k] = val -} - -function validateUrl (data, k, val) { - val = url.parse(String(val)) - if (!val.host) return false - data[k] = val.href -} - -function validateStream (data, k, val) { - if (!(val instanceof Stream)) return false - data[k] = val -} - -function validate (data, k, val, type, typeDefs) { - // arrays are lists of types. - if (Array.isArray(type)) { - for (var i = 0, l = type.length; i < l; i ++) { - if (type[i] === Array) continue - if (validate(data, k, val, type[i], typeDefs)) return true - } - delete data[k] - return false - } - - // an array of anything? - if (type === Array) return true - - // NaN is poisonous. Means that something is not allowed. - if (type !== type) { - debug("Poison NaN", k, val, type) - delete data[k] - return false - } - - // explicit list of values - if (val === type) { - debug("Explicitly allowed %j", val) - // if (isArray) (data[k] = data[k] || []).push(val) - // else data[k] = val - data[k] = val - return true - } - - // now go through the list of typeDefs, validate against each one. - var ok = false - , types = Object.keys(typeDefs) - for (var i = 0, l = types.length; i < l; i ++) { - debug("test type %j %j %j", k, val, types[i]) - var t = typeDefs[types[i]] - if (t && - ((type && type.name && t.type && t.type.name) ? (type.name === t.type.name) : (type === t.type))) { - var d = {} - ok = false !== t.validate(d, k, val) - val = d[k] - if (ok) { - // if (isArray) (data[k] = data[k] || []).push(val) - // else data[k] = val - data[k] = val - break - } - } - } - debug("OK? %j (%j %j %j)", ok, k, val, types[i]) - - if (!ok) delete data[k] - return ok -} - -function parse (args, data, remain, types, shorthands) { - debug("parse", args, data, remain) - - var key = null - , abbrevs = abbrev(Object.keys(types)) - , shortAbbr = abbrev(Object.keys(shorthands)) - - for (var i = 0; i < args.length; i ++) { - var arg = args[i] - debug("arg", arg) - - if (arg.match(/^-{2,}$/)) { - // done with keys. - // the rest are args. - remain.push.apply(remain, args.slice(i + 1)) - args[i] = "--" - break - } - var hadEq = false - if (arg.charAt(0) === "-" && arg.length > 1) { - if (arg.indexOf("=") !== -1) { - hadEq = true - var v = arg.split("=") - arg = v.shift() - v = v.join("=") - args.splice.apply(args, [i, 1].concat([arg, v])) - } - - // see if it's a shorthand - // if so, splice and back up to re-parse it. - var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs) - debug("arg=%j shRes=%j", arg, shRes) - if (shRes) { - debug(arg, shRes) - args.splice.apply(args, [i, 1].concat(shRes)) - if (arg !== shRes[0]) { - i -- - continue - } - } - arg = arg.replace(/^-+/, "") - var no = null - while (arg.toLowerCase().indexOf("no-") === 0) { - no = !no - arg = arg.substr(3) - } - - if (abbrevs[arg]) arg = abbrevs[arg] - - var isArray = types[arg] === Array || - Array.isArray(types[arg]) && types[arg].indexOf(Array) !== -1 - - // allow unknown things to be arrays if specified multiple times. - if (!types.hasOwnProperty(arg) && data.hasOwnProperty(arg)) { - if (!Array.isArray(data[arg])) - data[arg] = [data[arg]] - isArray = true - } - - var val - , la = args[i + 1] - - var isBool = typeof no === 'boolean' || - types[arg] === Boolean || - Array.isArray(types[arg]) && types[arg].indexOf(Boolean) !== -1 || - (typeof types[arg] === 'undefined' && !hadEq) || - (la === "false" && - (types[arg] === null || - Array.isArray(types[arg]) && ~types[arg].indexOf(null))) - - if (isBool) { - // just set and move along - val = !no - // however, also support --bool true or --bool false - if (la === "true" || la === "false") { - val = JSON.parse(la) - la = null - if (no) val = !val - i ++ - } - - // also support "foo":[Boolean, "bar"] and "--foo bar" - if (Array.isArray(types[arg]) && la) { - if (~types[arg].indexOf(la)) { - // an explicit type - val = la - i ++ - } else if ( la === "null" && ~types[arg].indexOf(null) ) { - // null allowed - val = null - i ++ - } else if ( !la.match(/^-{2,}[^-]/) && - !isNaN(la) && - ~types[arg].indexOf(Number) ) { - // number - val = +la - i ++ - } else if ( !la.match(/^-[^-]/) && ~types[arg].indexOf(String) ) { - // string - val = la - i ++ - } - } - - if (isArray) (data[arg] = data[arg] || []).push(val) - else data[arg] = val - - continue - } - - if (types[arg] === String && la === undefined) - la = "" - - if (la && la.match(/^-{2,}$/)) { - la = undefined - i -- - } - - val = la === undefined ? true : la - if (isArray) (data[arg] = data[arg] || []).push(val) - else data[arg] = val - - i ++ - continue - } - remain.push(arg) - } -} - -function resolveShort (arg, shorthands, shortAbbr, abbrevs) { - // handle single-char shorthands glommed together, like - // npm ls -glp, but only if there is one dash, and only if - // all of the chars are single-char shorthands, and it's - // not a match to some other abbrev. - arg = arg.replace(/^-+/, '') - - // if it's an exact known option, then don't go any further - if (abbrevs[arg] === arg) - return null - - // if it's an exact known shortopt, same deal - if (shorthands[arg]) { - // make it an array, if it's a list of words - if (shorthands[arg] && !Array.isArray(shorthands[arg])) - shorthands[arg] = shorthands[arg].split(/\s+/) - - return shorthands[arg] - } - - // first check to see if this arg is a set of single-char shorthands - var singles = shorthands.___singles - if (!singles) { - singles = Object.keys(shorthands).filter(function (s) { - return s.length === 1 - }).reduce(function (l,r) { - l[r] = true - return l - }, {}) - shorthands.___singles = singles - debug('shorthand singles', singles) - } - - var chrs = arg.split("").filter(function (c) { - return singles[c] - }) - - if (chrs.join("") === arg) return chrs.map(function (c) { - return shorthands[c] - }).reduce(function (l, r) { - return l.concat(r) - }, []) - - - // if it's an arg abbrev, and not a literal shorthand, then prefer the arg - if (abbrevs[arg] && !shorthands[arg]) - return null - - // if it's an abbr for a shorthand, then use that - if (shortAbbr[arg]) - arg = shortAbbr[arg] - - // make it an array, if it's a list of words - if (shorthands[arg] && !Array.isArray(shorthands[arg])) - shorthands[arg] = shorthands[arg].split(/\s+/) - - return shorthands[arg] -} diff --git a/node_modules/node-gyp/node_modules/nopt/package.json b/node_modules/node-gyp/node_modules/nopt/package.json deleted file mode 100644 index 5179f473b1bbd..0000000000000 --- a/node_modules/node-gyp/node_modules/nopt/package.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "_from": "nopt@2 || 3", - "_id": "nopt@3.0.6", - "_inBundle": false, - "_integrity": "sha1-xkZdvwirzU2zWTF/eaxopkayj/k=", - "_location": "/node-gyp/nopt", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "nopt@2 || 3", - "name": "nopt", - "escapedName": "nopt", - "rawSpec": "2 || 3", - "saveSpec": null, - "fetchSpec": "2 || 3" - }, - "_requiredBy": [ - "/node-gyp" - ], - "_resolved": "https://registry.npmjs.org/nopt/-/nopt-3.0.6.tgz", - "_shasum": "c6465dbf08abcd4db359317f79ac68a646b28ff9", - "_spec": "nopt@2 || 3", - "_where": "/Users/rebecca/code/npm/node_modules/node-gyp", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "bin": { - "nopt": "./bin/nopt.js" - }, - "bugs": { - "url": "https://github.com/npm/nopt/issues" - }, - "bundleDependencies": false, - "dependencies": { - "abbrev": "1" - }, - "deprecated": false, - "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.", - "devDependencies": { - "tap": "^1.2.0" - }, - "homepage": "https://github.com/npm/nopt#readme", - "license": "ISC", - "main": "lib/nopt.js", - "name": "nopt", - "repository": { - "type": "git", - "url": "git+https://github.com/npm/nopt.git" - }, - "scripts": { - "test": "tap test/*.js" - }, - "version": "3.0.6" -} diff --git a/node_modules/node-gyp/node_modules/nopt/test/basic.js b/node_modules/node-gyp/node_modules/nopt/test/basic.js deleted file mode 100644 index d399de9209932..0000000000000 --- a/node_modules/node-gyp/node_modules/nopt/test/basic.js +++ /dev/null @@ -1,273 +0,0 @@ -var nopt = require("../") - , test = require('tap').test - - -test("passing a string results in a string", function (t) { - var parsed = nopt({ key: String }, {}, ["--key", "myvalue"], 0) - t.same(parsed.key, "myvalue") - t.end() -}) - -// https://github.com/npm/nopt/issues/31 -test("Empty String results in empty string, not true", function (t) { - var parsed = nopt({ empty: String }, {}, ["--empty"], 0) - t.same(parsed.empty, "") - t.end() -}) - -test("~ path is resolved to $HOME", function (t) { - var path = require("path") - if (!process.env.HOME) process.env.HOME = "/tmp" - var parsed = nopt({key: path}, {}, ["--key=~/val"], 0) - t.same(parsed.key, path.resolve(process.env.HOME, "val")) - t.end() -}) - -// https://github.com/npm/nopt/issues/24 -test("Unknown options are not parsed as numbers", function (t) { - var parsed = nopt({"parse-me": Number}, null, ['--leave-as-is=1.20', '--parse-me=1.20'], 0) - t.equal(parsed['leave-as-is'], '1.20') - t.equal(parsed['parse-me'], 1.2) - t.end() -}); - -// https://github.com/npm/nopt/issues/48 -test("Check types based on name of type", function (t) { - var parsed = nopt({"parse-me": {name: "Number"}}, null, ['--parse-me=1.20'], 0) - t.equal(parsed['parse-me'], 1.2) - t.end() -}) - - -test("Missing types are not parsed", function (t) { - var parsed = nopt({"parse-me": {}}, null, ['--parse-me=1.20'], 0) - //should only contain argv - t.equal(Object.keys(parsed).length, 1) - t.end() -}) - -test("Types passed without a name are not parsed", function (t) { - var parsed = nopt({"parse-me": {}}, {}, ['--parse-me=1.20'], 0) - //should only contain argv - t.equal(Object.keys(parsed).length, 1) - t.end() -}) - -test("other tests", function (t) { - - var util = require("util") - , Stream = require("stream") - , path = require("path") - , url = require("url") - - , shorthands = - { s : ["--loglevel", "silent"] - , d : ["--loglevel", "info"] - , dd : ["--loglevel", "verbose"] - , ddd : ["--loglevel", "silly"] - , noreg : ["--no-registry"] - , reg : ["--registry"] - , "no-reg" : ["--no-registry"] - , silent : ["--loglevel", "silent"] - , verbose : ["--loglevel", "verbose"] - , h : ["--usage"] - , H : ["--usage"] - , "?" : ["--usage"] - , help : ["--usage"] - , v : ["--version"] - , f : ["--force"] - , desc : ["--description"] - , "no-desc" : ["--no-description"] - , "local" : ["--no-global"] - , l : ["--long"] - , p : ["--parseable"] - , porcelain : ["--parseable"] - , g : ["--global"] - } - - , types = - { aoa: Array - , nullstream: [null, Stream] - , date: Date - , str: String - , browser : String - , cache : path - , color : ["always", Boolean] - , depth : Number - , description : Boolean - , dev : Boolean - , editor : path - , force : Boolean - , global : Boolean - , globalconfig : path - , group : [String, Number] - , gzipbin : String - , logfd : [Number, Stream] - , loglevel : ["silent","win","error","warn","info","verbose","silly"] - , long : Boolean - , "node-version" : [false, String] - , npaturl : url - , npat : Boolean - , "onload-script" : [false, String] - , outfd : [Number, Stream] - , parseable : Boolean - , pre: Boolean - , prefix: path - , proxy : url - , "rebuild-bundle" : Boolean - , registry : url - , searchopts : String - , searchexclude: [null, String] - , shell : path - , t: [Array, String] - , tag : String - , tar : String - , tmp : path - , "unsafe-perm" : Boolean - , usage : Boolean - , user : String - , username : String - , userconfig : path - , version : Boolean - , viewer: path - , _exit : Boolean - , path: path - } - - ; [["-v", {version:true}, []] - ,["---v", {version:true}, []] - ,["ls -s --no-reg connect -d", - {loglevel:"info",registry:null},["ls","connect"]] - ,["ls ---s foo",{loglevel:"silent"},["ls","foo"]] - ,["ls --registry blargle", {}, ["ls"]] - ,["--no-registry", {registry:null}, []] - ,["--no-color true", {color:false}, []] - ,["--no-color false", {color:true}, []] - ,["--no-color", {color:false}, []] - ,["--color false", {color:false}, []] - ,["--color --logfd 7", {logfd:7,color:true}, []] - ,["--color=true", {color:true}, []] - ,["--logfd=10", {logfd:10}, []] - ,["--tmp=/tmp -tar=gtar",{tmp:"/tmp",tar:"gtar"},[]] - ,["--tmp=tmp -tar=gtar", - {tmp:path.resolve(process.cwd(), "tmp"),tar:"gtar"},[]] - ,["--logfd x", {}, []] - ,["a -true -- -no-false", {true:true},["a","-no-false"]] - ,["a -no-false", {false:false},["a"]] - ,["a -no-no-true", {true:true}, ["a"]] - ,["a -no-no-no-false", {false:false}, ["a"]] - ,["---NO-no-No-no-no-no-nO-no-no"+ - "-No-no-no-no-no-no-no-no-no"+ - "-no-no-no-no-NO-NO-no-no-no-no-no-no"+ - "-no-body-can-do-the-boogaloo-like-I-do" - ,{"body-can-do-the-boogaloo-like-I-do":false}, []] - ,["we are -no-strangers-to-love "+ - "--you-know=the-rules --and=so-do-i "+ - "---im-thinking-of=a-full-commitment "+ - "--no-you-would-get-this-from-any-other-guy "+ - "--no-gonna-give-you-up "+ - "-no-gonna-let-you-down=true "+ - "--no-no-gonna-run-around false "+ - "--desert-you=false "+ - "--make-you-cry false "+ - "--no-tell-a-lie "+ - "--no-no-and-hurt-you false" - ,{"strangers-to-love":false - ,"you-know":"the-rules" - ,"and":"so-do-i" - ,"you-would-get-this-from-any-other-guy":false - ,"gonna-give-you-up":false - ,"gonna-let-you-down":false - ,"gonna-run-around":false - ,"desert-you":false - ,"make-you-cry":false - ,"tell-a-lie":false - ,"and-hurt-you":false - },["we", "are"]] - ,["-t one -t two -t three" - ,{t: ["one", "two", "three"]} - ,[]] - ,["-t one -t null -t three four five null" - ,{t: ["one", "null", "three"]} - ,["four", "five", "null"]] - ,["-t foo" - ,{t:["foo"]} - ,[]] - ,["--no-t" - ,{t:["false"]} - ,[]] - ,["-no-no-t" - ,{t:["true"]} - ,[]] - ,["-aoa one -aoa null -aoa 100" - ,{aoa:["one", null, '100']} - ,[]] - ,["-str 100" - ,{str:"100"} - ,[]] - ,["--color always" - ,{color:"always"} - ,[]] - ,["--no-nullstream" - ,{nullstream:null} - ,[]] - ,["--nullstream false" - ,{nullstream:null} - ,[]] - ,["--notadate=2011-01-25" - ,{notadate: "2011-01-25"} - ,[]] - ,["--date 2011-01-25" - ,{date: new Date("2011-01-25")} - ,[]] - ,["-cl 1" - ,{config: true, length: 1} - ,[] - ,{config: Boolean, length: Number, clear: Boolean} - ,{c: "--config", l: "--length"}] - ,["--acount bla" - ,{"acount":true} - ,["bla"] - ,{account: Boolean, credentials: Boolean, options: String} - ,{a:"--account", c:"--credentials",o:"--options"}] - ,["--clear" - ,{clear:true} - ,[] - ,{clear:Boolean,con:Boolean,len:Boolean,exp:Boolean,add:Boolean,rep:Boolean} - ,{c:"--con",l:"--len",e:"--exp",a:"--add",r:"--rep"}] - ,["--file -" - ,{"file":"-"} - ,[] - ,{file:String} - ,{}] - ,["--file -" - ,{"file":true} - ,["-"] - ,{file:Boolean} - ,{}] - ,["--path" - ,{"path":null} - ,[]] - ,["--path ." - ,{"path":process.cwd()} - ,[]] - ].forEach(function (test) { - var argv = test[0].split(/\s+/) - , opts = test[1] - , rem = test[2] - , actual = nopt(test[3] || types, test[4] || shorthands, argv, 0) - , parsed = actual.argv - delete actual.argv - for (var i in opts) { - var e = JSON.stringify(opts[i]) - , a = JSON.stringify(actual[i] === undefined ? null : actual[i]) - if (e && typeof e === "object") { - t.deepEqual(e, a) - } else { - t.equal(e, a) - } - } - t.deepEqual(rem, parsed.remain) - }) - t.end() -}) diff --git a/node_modules/node-gyp/node_modules/semver/LICENSE b/node_modules/node-gyp/node_modules/semver/LICENSE deleted file mode 100644 index 19129e315fe59..0000000000000 --- a/node_modules/node-gyp/node_modules/semver/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/semver/README.md b/node_modules/node-gyp/node_modules/semver/README.md deleted file mode 100644 index cbd956549dbb0..0000000000000 --- a/node_modules/node-gyp/node_modules/semver/README.md +++ /dev/null @@ -1,350 +0,0 @@ -semver(1) -- The semantic versioner for npm -=========================================== - -## Usage - - $ npm install semver - $ node - var semver = require('semver') - - semver.valid('1.2.3') // '1.2.3' - semver.valid('a.b.c') // null - semver.clean(' =v1.2.3 ') // '1.2.3' - semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true - semver.gt('1.2.3', '9.8.7') // false - semver.lt('1.2.3', '9.8.7') // true - -As a command-line utility: - - $ semver -h - - SemVer 5.1.0 - - A JavaScript implementation of the http://semver.org/ specification - Copyright Isaac Z. Schlueter - - Usage: semver [options] [ [...]] - Prints valid versions sorted by SemVer precedence - - Options: - -r --range - Print versions that match the specified range. - - -i --increment [] - Increment a version by the specified level. Level can - be one of: major, minor, patch, premajor, preminor, - prepatch, or prerelease. Default level is 'patch'. - Only one version may be specified. - - --preid - Identifier to be used to prefix premajor, preminor, - prepatch or prerelease version increments. - - -l --loose - Interpret versions and ranges loosely - - Program exits successfully if any valid version satisfies - all supplied ranges, and prints all satisfying versions. - - If no satisfying versions are found, then exits failure. - - Versions are printed in ascending order, so supplying - multiple versions to the utility will just sort them. - -## Versions - -A "version" is described by the `v2.0.0` specification found at -. - -A leading `"="` or `"v"` character is stripped off and ignored. - -## Ranges - -A `version range` is a set of `comparators` which specify versions -that satisfy the range. - -A `comparator` is composed of an `operator` and a `version`. The set -of primitive `operators` is: - -* `<` Less than -* `<=` Less than or equal to -* `>` Greater than -* `>=` Greater than or equal to -* `=` Equal. If no operator is specified, then equality is assumed, - so this operator is optional, but MAY be included. - -For example, the comparator `>=1.2.7` would match the versions -`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6` -or `1.1.0`. - -Comparators can be joined by whitespace to form a `comparator set`, -which is satisfied by the **intersection** of all of the comparators -it includes. - -A range is composed of one or more comparator sets, joined by `||`. A -version matches a range if and only if every comparator in at least -one of the `||`-separated comparator sets is satisfied by the version. - -For example, the range `>=1.2.7 <1.3.0` would match the versions -`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`, -or `1.1.0`. - -The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`, -`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`. - -### Prerelease Tags - -If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then -it will only be allowed to satisfy comparator sets if at least one -comparator with the same `[major, minor, patch]` tuple also has a -prerelease tag. - -For example, the range `>1.2.3-alpha.3` would be allowed to match the -version `1.2.3-alpha.7`, but it would *not* be satisfied by -`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater -than" `1.2.3-alpha.3` according to the SemVer sort rules. The version -range only accepts prerelease tags on the `1.2.3` version. The -version `3.4.5` *would* satisfy the range, because it does not have a -prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`. - -The purpose for this behavior is twofold. First, prerelease versions -frequently are updated very quickly, and contain many breaking changes -that are (by the author's design) not yet fit for public consumption. -Therefore, by default, they are excluded from range matching -semantics. - -Second, a user who has opted into using a prerelease version has -clearly indicated the intent to use *that specific* set of -alpha/beta/rc versions. By including a prerelease tag in the range, -the user is indicating that they are aware of the risk. However, it -is still not appropriate to assume that they have opted into taking a -similar risk on the *next* set of prerelease versions. - -#### Prerelease Identifiers - -The method `.inc` takes an additional `identifier` string argument that -will append the value of the string as a prerelease identifier: - -```javascript -> semver.inc('1.2.3', 'prerelease', 'beta') -'1.2.4-beta.0' -``` - -command-line example: - -```shell -$ semver 1.2.3 -i prerelease --preid beta -1.2.4-beta.0 -``` - -Which then can be used to increment further: - -```shell -$ semver 1.2.4-beta.0 -i prerelease -1.2.4-beta.1 -``` - -### Advanced Range Syntax - -Advanced range syntax desugars to primitive comparators in -deterministic ways. - -Advanced ranges may be combined in the same way as primitive -comparators using white space or `||`. - -#### Hyphen Ranges `X.Y.Z - A.B.C` - -Specifies an inclusive set. - -* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4` - -If a partial version is provided as the first version in the inclusive -range, then the missing pieces are replaced with zeroes. - -* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4` - -If a partial version is provided as the second version in the -inclusive range, then all versions that start with the supplied parts -of the tuple are accepted, but nothing that would be greater than the -provided tuple parts. - -* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0` -* `1.2.3 - 2` := `>=1.2.3 <3.0.0` - -#### X-Ranges `1.2.x` `1.X` `1.2.*` `*` - -Any of `X`, `x`, or `*` may be used to "stand in" for one of the -numeric values in the `[major, minor, patch]` tuple. - -* `*` := `>=0.0.0` (Any version satisfies) -* `1.x` := `>=1.0.0 <2.0.0` (Matching major version) -* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions) - -A partial version range is treated as an X-Range, so the special -character is in fact optional. - -* `""` (empty string) := `*` := `>=0.0.0` -* `1` := `1.x.x` := `>=1.0.0 <2.0.0` -* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0` - -#### Tilde Ranges `~1.2.3` `~1.2` `~1` - -Allows patch-level changes if a minor version is specified on the -comparator. Allows minor-level changes if not. - -* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0` -* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`) -* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`) -* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0` -* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`) -* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`) -* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in - the `1.2.3` version will be allowed, if they are greater than or - equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but - `1.2.4-beta.2` would not, because it is a prerelease of a - different `[major, minor, patch]` tuple. - -#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4` - -Allows changes that do not modify the left-most non-zero digit in the -`[major, minor, patch]` tuple. In other words, this allows patch and -minor updates for versions `1.0.0` and above, patch updates for -versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`. - -Many authors treat a `0.x` version as if the `x` were the major -"breaking-change" indicator. - -Caret ranges are ideal when an author may make breaking changes -between `0.2.4` and `0.3.0` releases, which is a common practice. -However, it presumes that there will *not* be breaking changes between -`0.2.4` and `0.2.5`. It allows for changes that are presumed to be -additive (but non-breaking), according to commonly observed practices. - -* `^1.2.3` := `>=1.2.3 <2.0.0` -* `^0.2.3` := `>=0.2.3 <0.3.0` -* `^0.0.3` := `>=0.0.3 <0.0.4` -* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in - the `1.2.3` version will be allowed, if they are greater than or - equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but - `1.2.4-beta.2` would not, because it is a prerelease of a - different `[major, minor, patch]` tuple. -* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the - `0.0.3` version *only* will be allowed, if they are greater than or - equal to `beta`. So, `0.0.3-pr.2` would be allowed. - -When parsing caret ranges, a missing `patch` value desugars to the -number `0`, but will allow flexibility within that value, even if the -major and minor versions are both `0`. - -* `^1.2.x` := `>=1.2.0 <2.0.0` -* `^0.0.x` := `>=0.0.0 <0.1.0` -* `^0.0` := `>=0.0.0 <0.1.0` - -A missing `minor` and `patch` values will desugar to zero, but also -allow flexibility within those values, even if the major version is -zero. - -* `^1.x` := `>=1.0.0 <2.0.0` -* `^0.x` := `>=0.0.0 <1.0.0` - -### Range Grammar - -Putting all this together, here is a Backus-Naur grammar for ranges, -for the benefit of parser authors: - -```bnf -range-set ::= range ( logical-or range ) * -logical-or ::= ( ' ' ) * '||' ( ' ' ) * -range ::= hyphen | simple ( ' ' simple ) * | '' -hyphen ::= partial ' - ' partial -simple ::= primitive | partial | tilde | caret -primitive ::= ( '<' | '>' | '>=' | '<=' | '=' | ) partial -partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? -xr ::= 'x' | 'X' | '*' | nr -nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) * -tilde ::= '~' partial -caret ::= '^' partial -qualifier ::= ( '-' pre )? ( '+' build )? -pre ::= parts -build ::= parts -parts ::= part ( '.' part ) * -part ::= nr | [-0-9A-Za-z]+ -``` - -## Functions - -All methods and classes take a final `loose` boolean argument that, if -true, will be more forgiving about not-quite-valid semver strings. -The resulting output will always be 100% strict, of course. - -Strict-mode Comparators and Ranges will be strict about the SemVer -strings that they parse. - -* `valid(v)`: Return the parsed version, or null if it's not valid. -* `inc(v, release)`: Return the version incremented by the release - type (`major`, `premajor`, `minor`, `preminor`, `patch`, - `prepatch`, or `prerelease`), or null if it's not valid - * `premajor` in one call will bump the version up to the next major - version and down to a prerelease of that major version. - `preminor`, and `prepatch` work the same way. - * If called from a non-prerelease version, the `prerelease` will work the - same as `prepatch`. It increments the patch version, then makes a - prerelease. If the input version is already a prerelease it simply - increments it. -* `prerelease(v)`: Returns an array of prerelease components, or null - if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]` -* `major(v)`: Return the major version number. -* `minor(v)`: Return the minor version number. -* `patch(v)`: Return the patch version number. - -### Comparison - -* `gt(v1, v2)`: `v1 > v2` -* `gte(v1, v2)`: `v1 >= v2` -* `lt(v1, v2)`: `v1 < v2` -* `lte(v1, v2)`: `v1 <= v2` -* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent, - even if they're not the exact same string. You already know how to - compare strings. -* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`. -* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call - the corresponding function above. `"==="` and `"!=="` do simple - string comparison, but are included for completeness. Throws if an - invalid comparison string is provided. -* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if - `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. -* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions - in descending order when passed to `Array.sort()`. -* `diff(v1, v2)`: Returns difference between two versions by the release type - (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`), - or null if the versions are the same. - - -### Ranges - -* `validRange(range)`: Return the valid range or null if it's not valid -* `satisfies(version, range)`: Return true if the version satisfies the - range. -* `maxSatisfying(versions, range)`: Return the highest version in the list - that satisfies the range, or `null` if none of them do. -* `minSatisfying(versions, range)`: Return the lowest version in the list - that satisfies the range, or `null` if none of them do. -* `gtr(version, range)`: Return `true` if version is greater than all the - versions possible in the range. -* `ltr(version, range)`: Return `true` if version is less than all the - versions possible in the range. -* `outside(version, range, hilo)`: Return true if the version is outside - the bounds of the range in either the high or low direction. The - `hilo` argument must be either the string `'>'` or `'<'`. (This is - the function called by `gtr` and `ltr`.) - -Note that, since ranges may be non-contiguous, a version might not be -greater than a range, less than a range, *or* satisfy a range! For -example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9` -until `2.0.0`, so the version `1.2.10` would not be greater than the -range (because `2.0.1` satisfies, which is higher), nor less than the -range (since `1.2.8` satisfies, which is lower), and it also does not -satisfy the range. - -If you want to know if a version satisfies or does not satisfy a -range, use the `satisfies(version, range)` function. diff --git a/node_modules/node-gyp/node_modules/semver/bin/semver b/node_modules/node-gyp/node_modules/semver/bin/semver deleted file mode 100755 index c5f2e857e8279..0000000000000 --- a/node_modules/node-gyp/node_modules/semver/bin/semver +++ /dev/null @@ -1,133 +0,0 @@ -#!/usr/bin/env node -// Standalone semver comparison program. -// Exits successfully and prints matching version(s) if -// any supplied version is valid and passes all tests. - -var argv = process.argv.slice(2) - , versions = [] - , range = [] - , gt = [] - , lt = [] - , eq = [] - , inc = null - , version = require("../package.json").version - , loose = false - , identifier = undefined - , semver = require("../semver") - , reverse = false - -main() - -function main () { - if (!argv.length) return help() - while (argv.length) { - var a = argv.shift() - var i = a.indexOf('=') - if (i !== -1) { - a = a.slice(0, i) - argv.unshift(a.slice(i + 1)) - } - switch (a) { - case "-rv": case "-rev": case "--rev": case "--reverse": - reverse = true - break - case "-l": case "--loose": - loose = true - break - case "-v": case "--version": - versions.push(argv.shift()) - break - case "-i": case "--inc": case "--increment": - switch (argv[0]) { - case "major": case "minor": case "patch": case "prerelease": - case "premajor": case "preminor": case "prepatch": - inc = argv.shift() - break - default: - inc = "patch" - break - } - break - case "--preid": - identifier = argv.shift() - break - case "-r": case "--range": - range.push(argv.shift()) - break - case "-h": case "--help": case "-?": - return help() - default: - versions.push(a) - break - } - } - - versions = versions.filter(function (v) { - return semver.valid(v, loose) - }) - if (!versions.length) return fail() - if (inc && (versions.length !== 1 || range.length)) - return failInc() - - for (var i = 0, l = range.length; i < l ; i ++) { - versions = versions.filter(function (v) { - return semver.satisfies(v, range[i], loose) - }) - if (!versions.length) return fail() - } - return success(versions) -} - -function failInc () { - console.error("--inc can only be used on a single version with no range") - fail() -} - -function fail () { process.exit(1) } - -function success () { - var compare = reverse ? "rcompare" : "compare" - versions.sort(function (a, b) { - return semver[compare](a, b, loose) - }).map(function (v) { - return semver.clean(v, loose) - }).map(function (v) { - return inc ? semver.inc(v, inc, loose, identifier) : v - }).forEach(function (v,i,_) { console.log(v) }) -} - -function help () { - console.log(["SemVer " + version - ,"" - ,"A JavaScript implementation of the http://semver.org/ specification" - ,"Copyright Isaac Z. Schlueter" - ,"" - ,"Usage: semver [options] [ [...]]" - ,"Prints valid versions sorted by SemVer precedence" - ,"" - ,"Options:" - ,"-r --range " - ," Print versions that match the specified range." - ,"" - ,"-i --increment []" - ," Increment a version by the specified level. Level can" - ," be one of: major, minor, patch, premajor, preminor," - ," prepatch, or prerelease. Default level is 'patch'." - ," Only one version may be specified." - ,"" - ,"--preid " - ," Identifier to be used to prefix premajor, preminor," - ," prepatch or prerelease version increments." - ,"" - ,"-l --loose" - ," Interpret versions and ranges loosely" - ,"" - ,"Program exits successfully if any valid version satisfies" - ,"all supplied ranges, and prints all satisfying versions." - ,"" - ,"If no satisfying versions are found, then exits failure." - ,"" - ,"Versions are printed in ascending order, so supplying" - ,"multiple versions to the utility will just sort them." - ].join("\n")) -} diff --git a/node_modules/node-gyp/node_modules/semver/package.json b/node_modules/node-gyp/node_modules/semver/package.json deleted file mode 100644 index 8793bae37c297..0000000000000 --- a/node_modules/node-gyp/node_modules/semver/package.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "_from": "semver@~5.3.0", - "_id": "semver@5.3.0", - "_inBundle": false, - "_integrity": "sha1-myzl094C0XxgEq0yaqa00M9U+U8=", - "_location": "/node-gyp/semver", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "semver@~5.3.0", - "name": "semver", - "escapedName": "semver", - "rawSpec": "~5.3.0", - "saveSpec": null, - "fetchSpec": "~5.3.0" - }, - "_requiredBy": [ - "/node-gyp" - ], - "_resolved": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz", - "_shasum": "9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f", - "_spec": "semver@~5.3.0", - "_where": "/Users/rebecca/code/npm/node_modules/node-gyp", - "bin": { - "semver": "./bin/semver" - }, - "bugs": { - "url": "https://github.com/npm/node-semver/issues" - }, - "bundleDependencies": false, - "deprecated": false, - "description": "The semantic version parser used by npm.", - "devDependencies": { - "tap": "^2.0.0" - }, - "files": [ - "bin", - "range.bnf", - "semver.js" - ], - "homepage": "https://github.com/npm/node-semver#readme", - "license": "ISC", - "main": "semver.js", - "name": "semver", - "repository": { - "type": "git", - "url": "git+https://github.com/npm/node-semver.git" - }, - "scripts": { - "test": "tap test/*.js" - }, - "version": "5.3.0" -} diff --git a/node_modules/node-gyp/node_modules/semver/range.bnf b/node_modules/node-gyp/node_modules/semver/range.bnf deleted file mode 100644 index 25ebd5c832548..0000000000000 --- a/node_modules/node-gyp/node_modules/semver/range.bnf +++ /dev/null @@ -1,16 +0,0 @@ -range-set ::= range ( logical-or range ) * -logical-or ::= ( ' ' ) * '||' ( ' ' ) * -range ::= hyphen | simple ( ' ' simple ) * | '' -hyphen ::= partial ' - ' partial -simple ::= primitive | partial | tilde | caret -primitive ::= ( '<' | '>' | '>=' | '<=' | '=' | ) partial -partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? -xr ::= 'x' | 'X' | '*' | nr -nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) * -tilde ::= '~' partial -caret ::= '^' partial -qualifier ::= ( '-' pre )? ( '+' build )? -pre ::= parts -build ::= parts -parts ::= part ( '.' part ) * -part ::= nr | [-0-9A-Za-z]+ diff --git a/node_modules/node-gyp/node_modules/semver/semver.js b/node_modules/node-gyp/node_modules/semver/semver.js deleted file mode 100644 index 5f1a3c5c9e5dc..0000000000000 --- a/node_modules/node-gyp/node_modules/semver/semver.js +++ /dev/null @@ -1,1203 +0,0 @@ -exports = module.exports = SemVer; - -// The debug function is excluded entirely from the minified version. -/* nomin */ var debug; -/* nomin */ if (typeof process === 'object' && - /* nomin */ process.env && - /* nomin */ process.env.NODE_DEBUG && - /* nomin */ /\bsemver\b/i.test(process.env.NODE_DEBUG)) - /* nomin */ debug = function() { - /* nomin */ var args = Array.prototype.slice.call(arguments, 0); - /* nomin */ args.unshift('SEMVER'); - /* nomin */ console.log.apply(console, args); - /* nomin */ }; -/* nomin */ else - /* nomin */ debug = function() {}; - -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -exports.SEMVER_SPEC_VERSION = '2.0.0'; - -var MAX_LENGTH = 256; -var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || 9007199254740991; - -// The actual regexps go on exports.re -var re = exports.re = []; -var src = exports.src = []; -var R = 0; - -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. - -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. - -var NUMERICIDENTIFIER = R++; -src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'; -var NUMERICIDENTIFIERLOOSE = R++; -src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'; - - -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. - -var NONNUMERICIDENTIFIER = R++; -src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'; - - -// ## Main Version -// Three dot-separated numeric identifiers. - -var MAINVERSION = R++; -src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')'; - -var MAINVERSIONLOOSE = R++; -src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')'; - -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. - -var PRERELEASEIDENTIFIER = R++; -src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + - '|' + src[NONNUMERICIDENTIFIER] + ')'; - -var PRERELEASEIDENTIFIERLOOSE = R++; -src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + - '|' + src[NONNUMERICIDENTIFIER] + ')'; - - -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. - -var PRERELEASE = R++; -src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + - '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'; - -var PRERELEASELOOSE = R++; -src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'; - -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. - -var BUILDIDENTIFIER = R++; -src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'; - -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. - -var BUILD = R++; -src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + - '(?:\\.' + src[BUILDIDENTIFIER] + ')*))'; - - -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. - -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. - -var FULL = R++; -var FULLPLAIN = 'v?' + src[MAINVERSION] + - src[PRERELEASE] + '?' + - src[BUILD] + '?'; - -src[FULL] = '^' + FULLPLAIN + '$'; - -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + - src[PRERELEASELOOSE] + '?' + - src[BUILD] + '?'; - -var LOOSE = R++; -src[LOOSE] = '^' + LOOSEPLAIN + '$'; - -var GTLT = R++; -src[GTLT] = '((?:<|>)?=?)'; - -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -var XRANGEIDENTIFIERLOOSE = R++; -src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'; -var XRANGEIDENTIFIER = R++; -src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'; - -var XRANGEPLAIN = R++; -src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:' + src[PRERELEASE] + ')?' + - src[BUILD] + '?' + - ')?)?'; - -var XRANGEPLAINLOOSE = R++; -src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:' + src[PRERELEASELOOSE] + ')?' + - src[BUILD] + '?' + - ')?)?'; - -var XRANGE = R++; -src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'; -var XRANGELOOSE = R++; -src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'; - -// Tilde ranges. -// Meaning is "reasonably at or greater than" -var LONETILDE = R++; -src[LONETILDE] = '(?:~>?)'; - -var TILDETRIM = R++; -src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'; -re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g'); -var tildeTrimReplace = '$1~'; - -var TILDE = R++; -src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'; -var TILDELOOSE = R++; -src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'; - -// Caret ranges. -// Meaning is "at least and backwards compatible with" -var LONECARET = R++; -src[LONECARET] = '(?:\\^)'; - -var CARETTRIM = R++; -src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'; -re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g'); -var caretTrimReplace = '$1^'; - -var CARET = R++; -src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'; -var CARETLOOSE = R++; -src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'; - -// A simple gt/lt/eq thing, or just "" to indicate "any version" -var COMPARATORLOOSE = R++; -src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'; -var COMPARATOR = R++; -src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'; - - -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -var COMPARATORTRIM = R++; -src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + - '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'; - -// this one has to use the /g flag -re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g'); -var comparatorTrimReplace = '$1$2$3'; - - -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -var HYPHENRANGE = R++; -src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAIN] + ')' + - '\\s*$'; - -var HYPHENRANGELOOSE = R++; -src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s*$'; - -// Star ranges basically just allow anything at all. -var STAR = R++; -src[STAR] = '(<|>)?=?\\s*\\*'; - -// Compile to actual regexp objects. -// All are flag-free, unless they were created above with a flag. -for (var i = 0; i < R; i++) { - debug(i, src[i]); - if (!re[i]) - re[i] = new RegExp(src[i]); -} - -exports.parse = parse; -function parse(version, loose) { - if (version instanceof SemVer) - return version; - - if (typeof version !== 'string') - return null; - - if (version.length > MAX_LENGTH) - return null; - - var r = loose ? re[LOOSE] : re[FULL]; - if (!r.test(version)) - return null; - - try { - return new SemVer(version, loose); - } catch (er) { - return null; - } -} - -exports.valid = valid; -function valid(version, loose) { - var v = parse(version, loose); - return v ? v.version : null; -} - - -exports.clean = clean; -function clean(version, loose) { - var s = parse(version.trim().replace(/^[=v]+/, ''), loose); - return s ? s.version : null; -} - -exports.SemVer = SemVer; - -function SemVer(version, loose) { - if (version instanceof SemVer) { - if (version.loose === loose) - return version; - else - version = version.version; - } else if (typeof version !== 'string') { - throw new TypeError('Invalid Version: ' + version); - } - - if (version.length > MAX_LENGTH) - throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') - - if (!(this instanceof SemVer)) - return new SemVer(version, loose); - - debug('SemVer', version, loose); - this.loose = loose; - var m = version.trim().match(loose ? re[LOOSE] : re[FULL]); - - if (!m) - throw new TypeError('Invalid Version: ' + version); - - this.raw = version; - - // these are actually numbers - this.major = +m[1]; - this.minor = +m[2]; - this.patch = +m[3]; - - if (this.major > MAX_SAFE_INTEGER || this.major < 0) - throw new TypeError('Invalid major version') - - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) - throw new TypeError('Invalid minor version') - - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) - throw new TypeError('Invalid patch version') - - // numberify any prerelease numeric ids - if (!m[4]) - this.prerelease = []; - else - this.prerelease = m[4].split('.').map(function(id) { - if (/^[0-9]+$/.test(id)) { - var num = +id; - if (num >= 0 && num < MAX_SAFE_INTEGER) - return num; - } - return id; - }); - - this.build = m[5] ? m[5].split('.') : []; - this.format(); -} - -SemVer.prototype.format = function() { - this.version = this.major + '.' + this.minor + '.' + this.patch; - if (this.prerelease.length) - this.version += '-' + this.prerelease.join('.'); - return this.version; -}; - -SemVer.prototype.toString = function() { - return this.version; -}; - -SemVer.prototype.compare = function(other) { - debug('SemVer.compare', this.version, this.loose, other); - if (!(other instanceof SemVer)) - other = new SemVer(other, this.loose); - - return this.compareMain(other) || this.comparePre(other); -}; - -SemVer.prototype.compareMain = function(other) { - if (!(other instanceof SemVer)) - other = new SemVer(other, this.loose); - - return compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch); -}; - -SemVer.prototype.comparePre = function(other) { - if (!(other instanceof SemVer)) - other = new SemVer(other, this.loose); - - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) - return -1; - else if (!this.prerelease.length && other.prerelease.length) - return 1; - else if (!this.prerelease.length && !other.prerelease.length) - return 0; - - var i = 0; - do { - var a = this.prerelease[i]; - var b = other.prerelease[i]; - debug('prerelease compare', i, a, b); - if (a === undefined && b === undefined) - return 0; - else if (b === undefined) - return 1; - else if (a === undefined) - return -1; - else if (a === b) - continue; - else - return compareIdentifiers(a, b); - } while (++i); -}; - -// preminor will bump the version up to the next minor release, and immediately -// down to pre-release. premajor and prepatch work the same way. -SemVer.prototype.inc = function(release, identifier) { - switch (release) { - case 'premajor': - this.prerelease.length = 0; - this.patch = 0; - this.minor = 0; - this.major++; - this.inc('pre', identifier); - break; - case 'preminor': - this.prerelease.length = 0; - this.patch = 0; - this.minor++; - this.inc('pre', identifier); - break; - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0; - this.inc('patch', identifier); - this.inc('pre', identifier); - break; - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) - this.inc('patch', identifier); - this.inc('pre', identifier); - break; - - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) - this.major++; - this.minor = 0; - this.patch = 0; - this.prerelease = []; - break; - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) - this.minor++; - this.patch = 0; - this.prerelease = []; - break; - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) - this.patch++; - this.prerelease = []; - break; - // This probably shouldn't be used publicly. - // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. - case 'pre': - if (this.prerelease.length === 0) - this.prerelease = [0]; - else { - var i = this.prerelease.length; - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++; - i = -2; - } - } - if (i === -1) // didn't increment anything - this.prerelease.push(0); - } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - if (this.prerelease[0] === identifier) { - if (isNaN(this.prerelease[1])) - this.prerelease = [identifier, 0]; - } else - this.prerelease = [identifier, 0]; - } - break; - - default: - throw new Error('invalid increment argument: ' + release); - } - this.format(); - this.raw = this.version; - return this; -}; - -exports.inc = inc; -function inc(version, release, loose, identifier) { - if (typeof(loose) === 'string') { - identifier = loose; - loose = undefined; - } - - try { - return new SemVer(version, loose).inc(release, identifier).version; - } catch (er) { - return null; - } -} - -exports.diff = diff; -function diff(version1, version2) { - if (eq(version1, version2)) { - return null; - } else { - var v1 = parse(version1); - var v2 = parse(version2); - if (v1.prerelease.length || v2.prerelease.length) { - for (var key in v1) { - if (key === 'major' || key === 'minor' || key === 'patch') { - if (v1[key] !== v2[key]) { - return 'pre'+key; - } - } - } - return 'prerelease'; - } - for (var key in v1) { - if (key === 'major' || key === 'minor' || key === 'patch') { - if (v1[key] !== v2[key]) { - return key; - } - } - } - } -} - -exports.compareIdentifiers = compareIdentifiers; - -var numeric = /^[0-9]+$/; -function compareIdentifiers(a, b) { - var anum = numeric.test(a); - var bnum = numeric.test(b); - - if (anum && bnum) { - a = +a; - b = +b; - } - - return (anum && !bnum) ? -1 : - (bnum && !anum) ? 1 : - a < b ? -1 : - a > b ? 1 : - 0; -} - -exports.rcompareIdentifiers = rcompareIdentifiers; -function rcompareIdentifiers(a, b) { - return compareIdentifiers(b, a); -} - -exports.major = major; -function major(a, loose) { - return new SemVer(a, loose).major; -} - -exports.minor = minor; -function minor(a, loose) { - return new SemVer(a, loose).minor; -} - -exports.patch = patch; -function patch(a, loose) { - return new SemVer(a, loose).patch; -} - -exports.compare = compare; -function compare(a, b, loose) { - return new SemVer(a, loose).compare(b); -} - -exports.compareLoose = compareLoose; -function compareLoose(a, b) { - return compare(a, b, true); -} - -exports.rcompare = rcompare; -function rcompare(a, b, loose) { - return compare(b, a, loose); -} - -exports.sort = sort; -function sort(list, loose) { - return list.sort(function(a, b) { - return exports.compare(a, b, loose); - }); -} - -exports.rsort = rsort; -function rsort(list, loose) { - return list.sort(function(a, b) { - return exports.rcompare(a, b, loose); - }); -} - -exports.gt = gt; -function gt(a, b, loose) { - return compare(a, b, loose) > 0; -} - -exports.lt = lt; -function lt(a, b, loose) { - return compare(a, b, loose) < 0; -} - -exports.eq = eq; -function eq(a, b, loose) { - return compare(a, b, loose) === 0; -} - -exports.neq = neq; -function neq(a, b, loose) { - return compare(a, b, loose) !== 0; -} - -exports.gte = gte; -function gte(a, b, loose) { - return compare(a, b, loose) >= 0; -} - -exports.lte = lte; -function lte(a, b, loose) { - return compare(a, b, loose) <= 0; -} - -exports.cmp = cmp; -function cmp(a, op, b, loose) { - var ret; - switch (op) { - case '===': - if (typeof a === 'object') a = a.version; - if (typeof b === 'object') b = b.version; - ret = a === b; - break; - case '!==': - if (typeof a === 'object') a = a.version; - if (typeof b === 'object') b = b.version; - ret = a !== b; - break; - case '': case '=': case '==': ret = eq(a, b, loose); break; - case '!=': ret = neq(a, b, loose); break; - case '>': ret = gt(a, b, loose); break; - case '>=': ret = gte(a, b, loose); break; - case '<': ret = lt(a, b, loose); break; - case '<=': ret = lte(a, b, loose); break; - default: throw new TypeError('Invalid operator: ' + op); - } - return ret; -} - -exports.Comparator = Comparator; -function Comparator(comp, loose) { - if (comp instanceof Comparator) { - if (comp.loose === loose) - return comp; - else - comp = comp.value; - } - - if (!(this instanceof Comparator)) - return new Comparator(comp, loose); - - debug('comparator', comp, loose); - this.loose = loose; - this.parse(comp); - - if (this.semver === ANY) - this.value = ''; - else - this.value = this.operator + this.semver.version; - - debug('comp', this); -} - -var ANY = {}; -Comparator.prototype.parse = function(comp) { - var r = this.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; - var m = comp.match(r); - - if (!m) - throw new TypeError('Invalid comparator: ' + comp); - - this.operator = m[1]; - if (this.operator === '=') - this.operator = ''; - - // if it literally is just '>' or '' then allow anything. - if (!m[2]) - this.semver = ANY; - else - this.semver = new SemVer(m[2], this.loose); -}; - -Comparator.prototype.toString = function() { - return this.value; -}; - -Comparator.prototype.test = function(version) { - debug('Comparator.test', version, this.loose); - - if (this.semver === ANY) - return true; - - if (typeof version === 'string') - version = new SemVer(version, this.loose); - - return cmp(version, this.operator, this.semver, this.loose); -}; - - -exports.Range = Range; -function Range(range, loose) { - if ((range instanceof Range) && range.loose === loose) - return range; - - if (!(this instanceof Range)) - return new Range(range, loose); - - this.loose = loose; - - // First, split based on boolean or || - this.raw = range; - this.set = range.split(/\s*\|\|\s*/).map(function(range) { - return this.parseRange(range.trim()); - }, this).filter(function(c) { - // throw out any that are not relevant for whatever reason - return c.length; - }); - - if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + range); - } - - this.format(); -} - -Range.prototype.format = function() { - this.range = this.set.map(function(comps) { - return comps.join(' ').trim(); - }).join('||').trim(); - return this.range; -}; - -Range.prototype.toString = function() { - return this.range; -}; - -Range.prototype.parseRange = function(range) { - var loose = this.loose; - range = range.trim(); - debug('range', range, loose); - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]; - range = range.replace(hr, hyphenReplace); - debug('hyphen replace', range); - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace); - debug('comparator trim', range, re[COMPARATORTRIM]); - - // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[TILDETRIM], tildeTrimReplace); - - // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[CARETTRIM], caretTrimReplace); - - // normalize spaces - range = range.split(/\s+/).join(' '); - - // At this point, the range is completely trimmed and - // ready to be split into comparators. - - var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; - var set = range.split(' ').map(function(comp) { - return parseComparator(comp, loose); - }).join(' ').split(/\s+/); - if (this.loose) { - // in loose mode, throw out any that are not valid comparators - set = set.filter(function(comp) { - return !!comp.match(compRe); - }); - } - set = set.map(function(comp) { - return new Comparator(comp, loose); - }); - - return set; -}; - -// Mostly just for testing and legacy API reasons -exports.toComparators = toComparators; -function toComparators(range, loose) { - return new Range(range, loose).set.map(function(comp) { - return comp.map(function(c) { - return c.value; - }).join(' ').trim().split(' '); - }); -} - -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -function parseComparator(comp, loose) { - debug('comp', comp); - comp = replaceCarets(comp, loose); - debug('caret', comp); - comp = replaceTildes(comp, loose); - debug('tildes', comp); - comp = replaceXRanges(comp, loose); - debug('xrange', comp); - comp = replaceStars(comp, loose); - debug('stars', comp); - return comp; -} - -function isX(id) { - return !id || id.toLowerCase() === 'x' || id === '*'; -} - -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 -function replaceTildes(comp, loose) { - return comp.trim().split(/\s+/).map(function(comp) { - return replaceTilde(comp, loose); - }).join(' '); -} - -function replaceTilde(comp, loose) { - var r = loose ? re[TILDELOOSE] : re[TILDE]; - return comp.replace(r, function(_, M, m, p, pr) { - debug('tilde', comp, _, M, m, p, pr); - var ret; - - if (isX(M)) - ret = ''; - else if (isX(m)) - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; - else if (isX(p)) - // ~1.2 == >=1.2.0 <1.3.0 - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; - else if (pr) { - debug('replaceTilde pr', pr); - if (pr.charAt(0) !== '-') - pr = '-' + pr; - ret = '>=' + M + '.' + m + '.' + p + pr + - ' <' + M + '.' + (+m + 1) + '.0'; - } else - // ~1.2.3 == >=1.2.3 <1.3.0 - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0'; - - debug('tilde return', ret); - return ret; - }); -} - -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 -// ^1.2.3 --> >=1.2.3 <2.0.0 -// ^1.2.0 --> >=1.2.0 <2.0.0 -function replaceCarets(comp, loose) { - return comp.trim().split(/\s+/).map(function(comp) { - return replaceCaret(comp, loose); - }).join(' '); -} - -function replaceCaret(comp, loose) { - debug('caret', comp, loose); - var r = loose ? re[CARETLOOSE] : re[CARET]; - return comp.replace(r, function(_, M, m, p, pr) { - debug('caret', comp, _, M, m, p, pr); - var ret; - - if (isX(M)) - ret = ''; - else if (isX(m)) - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; - else if (isX(p)) { - if (M === '0') - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; - else - ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'; - } else if (pr) { - debug('replaceCaret pr', pr); - if (pr.charAt(0) !== '-') - pr = '-' + pr; - if (M === '0') { - if (m === '0') - ret = '>=' + M + '.' + m + '.' + p + pr + - ' <' + M + '.' + m + '.' + (+p + 1); - else - ret = '>=' + M + '.' + m + '.' + p + pr + - ' <' + M + '.' + (+m + 1) + '.0'; - } else - ret = '>=' + M + '.' + m + '.' + p + pr + - ' <' + (+M + 1) + '.0.0'; - } else { - debug('no pr'); - if (M === '0') { - if (m === '0') - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + m + '.' + (+p + 1); - else - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0'; - } else - ret = '>=' + M + '.' + m + '.' + p + - ' <' + (+M + 1) + '.0.0'; - } - - debug('caret return', ret); - return ret; - }); -} - -function replaceXRanges(comp, loose) { - debug('replaceXRanges', comp, loose); - return comp.split(/\s+/).map(function(comp) { - return replaceXRange(comp, loose); - }).join(' '); -} - -function replaceXRange(comp, loose) { - comp = comp.trim(); - var r = loose ? re[XRANGELOOSE] : re[XRANGE]; - return comp.replace(r, function(ret, gtlt, M, m, p, pr) { - debug('xRange', comp, ret, gtlt, M, m, p, pr); - var xM = isX(M); - var xm = xM || isX(m); - var xp = xm || isX(p); - var anyX = xp; - - if (gtlt === '=' && anyX) - gtlt = ''; - - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0'; - } else { - // nothing is forbidden - ret = '*'; - } - } else if (gtlt && anyX) { - // replace X with 0 - if (xm) - m = 0; - if (xp) - p = 0; - - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - // >1.2.3 => >= 1.2.4 - gtlt = '>='; - if (xm) { - M = +M + 1; - m = 0; - p = 0; - } else if (xp) { - m = +m + 1; - p = 0; - } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<'; - if (xm) - M = +M + 1; - else - m = +m + 1; - } - - ret = gtlt + M + '.' + m + '.' + p; - } else if (xm) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; - } else if (xp) { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; - } - - debug('xRange return', ret); - - return ret; - }); -} - -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -function replaceStars(comp, loose) { - debug('replaceStars', comp, loose); - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[STAR], ''); -} - -// This function is passed to string.replace(re[HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0 -function hyphenReplace($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) { - - if (isX(fM)) - from = ''; - else if (isX(fm)) - from = '>=' + fM + '.0.0'; - else if (isX(fp)) - from = '>=' + fM + '.' + fm + '.0'; - else - from = '>=' + from; - - if (isX(tM)) - to = ''; - else if (isX(tm)) - to = '<' + (+tM + 1) + '.0.0'; - else if (isX(tp)) - to = '<' + tM + '.' + (+tm + 1) + '.0'; - else if (tpr) - to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr; - else - to = '<=' + to; - - return (from + ' ' + to).trim(); -} - - -// if ANY of the sets match ALL of its comparators, then pass -Range.prototype.test = function(version) { - if (!version) - return false; - - if (typeof version === 'string') - version = new SemVer(version, this.loose); - - for (var i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version)) - return true; - } - return false; -}; - -function testSet(set, version) { - for (var i = 0; i < set.length; i++) { - if (!set[i].test(version)) - return false; - } - - if (version.prerelease.length) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (var i = 0; i < set.length; i++) { - debug(set[i].semver); - if (set[i].semver === ANY) - continue; - - if (set[i].semver.prerelease.length > 0) { - var allowed = set[i].semver; - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) - return true; - } - } - - // Version has a -pre, but it's not one of the ones we like. - return false; - } - - return true; -} - -exports.satisfies = satisfies; -function satisfies(version, range, loose) { - try { - range = new Range(range, loose); - } catch (er) { - return false; - } - return range.test(version); -} - -exports.maxSatisfying = maxSatisfying; -function maxSatisfying(versions, range, loose) { - return versions.filter(function(version) { - return satisfies(version, range, loose); - }).sort(function(a, b) { - return rcompare(a, b, loose); - })[0] || null; -} - -exports.minSatisfying = minSatisfying; -function minSatisfying(versions, range, loose) { - return versions.filter(function(version) { - return satisfies(version, range, loose); - }).sort(function(a, b) { - return compare(a, b, loose); - })[0] || null; -} - -exports.validRange = validRange; -function validRange(range, loose) { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, loose).range || '*'; - } catch (er) { - return null; - } -} - -// Determine if version is less than all the versions possible in the range -exports.ltr = ltr; -function ltr(version, range, loose) { - return outside(version, range, '<', loose); -} - -// Determine if version is greater than all the versions possible in the range. -exports.gtr = gtr; -function gtr(version, range, loose) { - return outside(version, range, '>', loose); -} - -exports.outside = outside; -function outside(version, range, hilo, loose) { - version = new SemVer(version, loose); - range = new Range(range, loose); - - var gtfn, ltefn, ltfn, comp, ecomp; - switch (hilo) { - case '>': - gtfn = gt; - ltefn = lte; - ltfn = lt; - comp = '>'; - ecomp = '>='; - break; - case '<': - gtfn = lt; - ltefn = gte; - ltfn = gt; - comp = '<'; - ecomp = '<='; - break; - default: - throw new TypeError('Must provide a hilo val of "<" or ">"'); - } - - // If it satisifes the range it is not outside - if (satisfies(version, range, loose)) { - return false; - } - - // From now on, variable terms are as if we're in "gtr" mode. - // but note that everything is flipped for the "ltr" function. - - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i]; - - var high = null; - var low = null; - - comparators.forEach(function(comparator) { - if (comparator.semver === ANY) { - comparator = new Comparator('>=0.0.0') - } - high = high || comparator; - low = low || comparator; - if (gtfn(comparator.semver, high.semver, loose)) { - high = comparator; - } else if (ltfn(comparator.semver, low.semver, loose)) { - low = comparator; - } - }); - - // If the edge version comparator has a operator then our version - // isn't outside it - if (high.operator === comp || high.operator === ecomp) { - return false; - } - - // If the lowest version comparator has an operator and our version - // is less than it then it isn't higher than the range - if ((!low.operator || low.operator === comp) && - ltefn(version, low.semver)) { - return false; - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false; - } - } - return true; -} - -exports.prerelease = prerelease; -function prerelease(version, loose) { - var parsed = parse(version, loose); - return (parsed && parsed.prerelease.length) ? parsed.prerelease : null; -} diff --git a/node_modules/node-gyp/node_modules/tar/.npmignore b/node_modules/node-gyp/node_modules/tar/.npmignore deleted file mode 100644 index c167ad5b1c12f..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/.npmignore +++ /dev/null @@ -1,5 +0,0 @@ -.*.swp -node_modules -examples/extract/ -test/tmp/ -test/fixtures/ diff --git a/node_modules/node-gyp/node_modules/tar/.travis.yml b/node_modules/node-gyp/node_modules/tar/.travis.yml deleted file mode 100644 index fca8ef019405d..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/.travis.yml +++ /dev/null @@ -1,4 +0,0 @@ -language: node_js -node_js: - - 0.10 - - 0.11 diff --git a/node_modules/node-gyp/node_modules/tar/LICENSE b/node_modules/node-gyp/node_modules/tar/LICENSE deleted file mode 100644 index 019b7e40ea056..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/LICENSE +++ /dev/null @@ -1,12 +0,0 @@ -The ISC License -Copyright (c) Isaac Z. Schlueter and Contributors -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/tar/README.md b/node_modules/node-gyp/node_modules/tar/README.md deleted file mode 100644 index cfda2ac180611..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/README.md +++ /dev/null @@ -1,50 +0,0 @@ -# node-tar - -Tar for Node.js. - -[![NPM](https://nodei.co/npm/tar.png)](https://nodei.co/npm/tar/) - -## API - -See `examples/` for usage examples. - -### var tar = require('tar') - -Returns an object with `.Pack`, `.Extract` and `.Parse` methods. - -### tar.Pack([properties]) - -Returns a through stream. Use -[fstream](https://npmjs.org/package/fstream) to write files into the -pack stream and you will receive tar archive data from the pack -stream. - -This only works with directories, it does not work with individual files. - -The optional `properties` object are used to set properties in the tar -'Global Extended Header'. If the `fromBase` property is set to true, -the tar will contain files relative to the path passed, and not with -the path included. - -### tar.Extract([options]) - -Returns a through stream. Write tar data to the stream and the files -in the tarball will be extracted onto the filesystem. - -`options` can be: - -```js -{ - path: '/path/to/extract/tar/into', - strip: 0, // how many path segments to strip from the root when extracting -} -``` - -`options` also get passed to the `fstream.Writer` instance that `tar` -uses internally. - -### tar.Parse() - -Returns a writable stream. Write tar data to it and it will emit -`entry` events for each entry parsed from the tarball. This is used by -`tar.Extract`. diff --git a/node_modules/node-gyp/node_modules/tar/examples/extracter.js b/node_modules/node-gyp/node_modules/tar/examples/extracter.js deleted file mode 100644 index f6253a72c5cd3..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/examples/extracter.js +++ /dev/null @@ -1,19 +0,0 @@ -var tar = require("../tar.js") - , fs = require("fs") - - -function onError(err) { - console.error('An error occurred:', err) -} - -function onEnd() { - console.log('Extracted!') -} - -var extractor = tar.Extract({path: __dirname + "/extract"}) - .on('error', onError) - .on('end', onEnd); - -fs.createReadStream(__dirname + "/../test/fixtures/c.tar") - .on('error', onError) - .pipe(extractor); diff --git a/node_modules/node-gyp/node_modules/tar/examples/packer.js b/node_modules/node-gyp/node_modules/tar/examples/packer.js deleted file mode 100644 index 039969ce300d1..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/examples/packer.js +++ /dev/null @@ -1,24 +0,0 @@ -var tar = require("../tar.js") - , fstream = require("fstream") - , fs = require("fs") - -var dirDest = fs.createWriteStream('dir.tar') - - -function onError(err) { - console.error('An error occurred:', err) -} - -function onEnd() { - console.log('Packed!') -} - -var packer = tar.Pack({ noProprietary: true }) - .on('error', onError) - .on('end', onEnd); - -// This must be a "directory" -fstream.Reader({ path: __dirname, type: "Directory" }) - .on('error', onError) - .pipe(packer) - .pipe(dirDest) diff --git a/node_modules/node-gyp/node_modules/tar/examples/reader.js b/node_modules/node-gyp/node_modules/tar/examples/reader.js deleted file mode 100644 index 39f3f0888a2cf..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/examples/reader.js +++ /dev/null @@ -1,36 +0,0 @@ -var tar = require("../tar.js") - , fs = require("fs") - -fs.createReadStream(__dirname + "/../test/fixtures/c.tar") - .pipe(tar.Parse()) - .on("extendedHeader", function (e) { - console.error("extended pax header", e.props) - e.on("end", function () { - console.error("extended pax fields:", e.fields) - }) - }) - .on("ignoredEntry", function (e) { - console.error("ignoredEntry?!?", e.props) - }) - .on("longLinkpath", function (e) { - console.error("longLinkpath entry", e.props) - e.on("end", function () { - console.error("value=%j", e.body.toString()) - }) - }) - .on("longPath", function (e) { - console.error("longPath entry", e.props) - e.on("end", function () { - console.error("value=%j", e.body.toString()) - }) - }) - .on("entry", function (e) { - console.error("entry", e.props) - e.on("data", function (c) { - console.error(" >>>" + c.toString().replace(/\n/g, "\\n")) - }) - e.on("end", function () { - console.error(" << 0 - return !this._needDrain -} - -EntryWriter.prototype.end = function (c) { - // console.error(".. ew end") - if (c) this._buffer.push(c) - this._buffer.push(EOF) - this._ended = true - this._process() - this._needDrain = this._buffer.length > 0 -} - -EntryWriter.prototype.pause = function () { - // console.error(".. ew pause") - this._paused = true - this.emit("pause") -} - -EntryWriter.prototype.resume = function () { - // console.error(".. ew resume") - this._paused = false - this.emit("resume") - this._process() -} - -EntryWriter.prototype.add = function (entry) { - // console.error(".. ew add") - if (!this.parent) return this.emit("error", new Error("no parent")) - - // make sure that the _header and such is emitted, and clear out - // the _currentEntry link on the parent. - if (!this._ended) this.end() - - return this.parent.add(entry) -} - -EntryWriter.prototype._header = function () { - // console.error(".. ew header") - if (this._didHeader) return - this._didHeader = true - - var headerBlock = TarHeader.encode(this.props) - - if (this.props.needExtended && !this._meta) { - var me = this - - ExtendedHeaderWriter = ExtendedHeaderWriter || - require("./extended-header-writer.js") - - ExtendedHeaderWriter(this.props) - .on("data", function (c) { - me.emit("data", c) - }) - .on("error", function (er) { - me.emit("error", er) - }) - .end() - } - - // console.error(".. .. ew headerBlock emitting") - this.emit("data", headerBlock) - this.emit("header") -} - -EntryWriter.prototype._process = function () { - // console.error(".. .. ew process") - if (!this._didHeader && !this._meta) { - this._header() - } - - if (this._paused || this._processing) { - // console.error(".. .. .. paused=%j, processing=%j", this._paused, this._processing) - return - } - - this._processing = true - - var buf = this._buffer - for (var i = 0; i < buf.length; i ++) { - // console.error(".. .. .. i=%d", i) - - var c = buf[i] - - if (c === EOF) this._stream.end() - else this._stream.write(c) - - if (this._paused) { - // console.error(".. .. .. paused mid-emission") - this._processing = false - if (i < buf.length) { - this._needDrain = true - this._buffer = buf.slice(i + 1) - } - return - } - } - - // console.error(".. .. .. emitted") - this._buffer.length = 0 - this._processing = false - - // console.error(".. .. .. emitting drain") - this.emit("drain") -} - -EntryWriter.prototype.destroy = function () {} diff --git a/node_modules/node-gyp/node_modules/tar/lib/entry.js b/node_modules/node-gyp/node_modules/tar/lib/entry.js deleted file mode 100644 index 591202bd3b9af..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/lib/entry.js +++ /dev/null @@ -1,220 +0,0 @@ -// A passthrough read/write stream that sets its properties -// based on a header, extendedHeader, and globalHeader -// -// Can be either a file system object of some sort, or -// a pax/ustar metadata entry. - -module.exports = Entry - -var TarHeader = require("./header.js") - , tar = require("../tar") - , assert = require("assert").ok - , Stream = require("stream").Stream - , inherits = require("inherits") - , fstream = require("fstream").Abstract - -function Entry (header, extended, global) { - Stream.call(this) - this.readable = true - this.writable = true - - this._needDrain = false - this._paused = false - this._reading = false - this._ending = false - this._ended = false - this._remaining = 0 - this._abort = false - this._queue = [] - this._index = 0 - this._queueLen = 0 - - this._read = this._read.bind(this) - - this.props = {} - this._header = header - this._extended = extended || {} - - // globals can change throughout the course of - // a file parse operation. Freeze it at its current state. - this._global = {} - var me = this - Object.keys(global || {}).forEach(function (g) { - me._global[g] = global[g] - }) - - this._setProps() -} - -inherits(Entry, Stream) - -Entry.prototype.write = function (c) { - if (this._ending) this.error("write() after end()", null, true) - if (this._remaining === 0) { - this.error("invalid bytes past eof") - } - - // often we'll get a bunch of \0 at the end of the last write, - // since chunks will always be 512 bytes when reading a tarball. - if (c.length > this._remaining) { - c = c.slice(0, this._remaining) - } - this._remaining -= c.length - - // put it on the stack. - var ql = this._queueLen - this._queue.push(c) - this._queueLen ++ - - this._read() - - // either paused, or buffered - if (this._paused || ql > 0) { - this._needDrain = true - return false - } - - return true -} - -Entry.prototype.end = function (c) { - if (c) this.write(c) - this._ending = true - this._read() -} - -Entry.prototype.pause = function () { - this._paused = true - this.emit("pause") -} - -Entry.prototype.resume = function () { - // console.error(" Tar Entry resume", this.path) - this.emit("resume") - this._paused = false - this._read() - return this._queueLen - this._index > 1 -} - - // This is bound to the instance -Entry.prototype._read = function () { - // console.error(" Tar Entry _read", this.path) - - if (this._paused || this._reading || this._ended) return - - // set this flag so that event handlers don't inadvertently - // get multiple _read() calls running. - this._reading = true - - // have any data to emit? - while (this._index < this._queueLen && !this._paused) { - var chunk = this._queue[this._index ++] - this.emit("data", chunk) - } - - // check if we're drained - if (this._index >= this._queueLen) { - this._queue.length = this._queueLen = this._index = 0 - if (this._needDrain) { - this._needDrain = false - this.emit("drain") - } - if (this._ending) { - this._ended = true - this.emit("end") - } - } - - // if the queue gets too big, then pluck off whatever we can. - // this should be fairly rare. - var mql = this._maxQueueLen - if (this._queueLen > mql && this._index > 0) { - mql = Math.min(this._index, mql) - this._index -= mql - this._queueLen -= mql - this._queue = this._queue.slice(mql) - } - - this._reading = false -} - -Entry.prototype._setProps = function () { - // props = extended->global->header->{} - var header = this._header - , extended = this._extended - , global = this._global - , props = this.props - - // first get the values from the normal header. - var fields = tar.fields - for (var f = 0; fields[f] !== null; f ++) { - var field = fields[f] - , val = header[field] - if (typeof val !== "undefined") props[field] = val - } - - // next, the global header for this file. - // numeric values, etc, will have already been parsed. - ;[global, extended].forEach(function (p) { - Object.keys(p).forEach(function (f) { - if (typeof p[f] !== "undefined") props[f] = p[f] - }) - }) - - // no nulls allowed in path or linkpath - ;["path", "linkpath"].forEach(function (p) { - if (props.hasOwnProperty(p)) { - props[p] = props[p].split("\0")[0] - } - }) - - - // set date fields to be a proper date - ;["mtime", "ctime", "atime"].forEach(function (p) { - if (props.hasOwnProperty(p)) { - props[p] = new Date(props[p] * 1000) - } - }) - - // set the type so that we know what kind of file to create - var type - switch (tar.types[props.type]) { - case "OldFile": - case "ContiguousFile": - type = "File" - break - - case "GNUDumpDir": - type = "Directory" - break - - case undefined: - type = "Unknown" - break - - case "Link": - case "SymbolicLink": - case "CharacterDevice": - case "BlockDevice": - case "Directory": - case "FIFO": - default: - type = tar.types[props.type] - } - - this.type = type - this.path = props.path - this.size = props.size - - // size is special, since it signals when the file needs to end. - this._remaining = props.size -} - -// the parser may not call write if _abort is true. -// useful for skipping data from some files quickly. -Entry.prototype.abort = function(){ - this._abort = true -} - -Entry.prototype.warn = fstream.warn -Entry.prototype.error = fstream.error diff --git a/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js b/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js deleted file mode 100644 index 1728c4583ae06..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js +++ /dev/null @@ -1,191 +0,0 @@ - -module.exports = ExtendedHeaderWriter - -var inherits = require("inherits") - , EntryWriter = require("./entry-writer.js") - -inherits(ExtendedHeaderWriter, EntryWriter) - -var tar = require("../tar.js") - , path = require("path") - , TarHeader = require("./header.js") - -// props is the props of the thing we need to write an -// extended header for. -// Don't be shy with it. Just encode everything. -function ExtendedHeaderWriter (props) { - // console.error(">> ehw ctor") - var me = this - - if (!(me instanceof ExtendedHeaderWriter)) { - return new ExtendedHeaderWriter(props) - } - - me.fields = props - - var p = - { path : ("PaxHeader" + path.join("/", props.path || "")) - .replace(/\\/g, "/").substr(0, 100) - , mode : props.mode || 0666 - , uid : props.uid || 0 - , gid : props.gid || 0 - , size : 0 // will be set later - , mtime : props.mtime || Date.now() / 1000 - , type : "x" - , linkpath : "" - , ustar : "ustar\0" - , ustarver : "00" - , uname : props.uname || "" - , gname : props.gname || "" - , devmaj : props.devmaj || 0 - , devmin : props.devmin || 0 - } - - - EntryWriter.call(me, p) - // console.error(">> ehw props", me.props) - me.props = p - - me._meta = true -} - -ExtendedHeaderWriter.prototype.end = function () { - // console.error(">> ehw end") - var me = this - - if (me._ended) return - me._ended = true - - me._encodeFields() - - if (me.props.size === 0) { - // nothing to write! - me._ready = true - me._stream.end() - return - } - - me._stream.write(TarHeader.encode(me.props)) - me.body.forEach(function (l) { - me._stream.write(l) - }) - me._ready = true - - // console.error(">> ehw _process calling end()", me.props) - this._stream.end() -} - -ExtendedHeaderWriter.prototype._encodeFields = function () { - // console.error(">> ehw _encodeFields") - this.body = [] - if (this.fields.prefix) { - this.fields.path = this.fields.prefix + "/" + this.fields.path - this.fields.prefix = "" - } - encodeFields(this.fields, "", this.body, this.fields.noProprietary) - var me = this - this.body.forEach(function (l) { - me.props.size += l.length - }) -} - -function encodeFields (fields, prefix, body, nop) { - // console.error(">> >> ehw encodeFields") - // "%d %s=%s\n", , , - // The length is a decimal number, and includes itself and the \n - // Numeric values are decimal strings. - - Object.keys(fields).forEach(function (k) { - var val = fields[k] - , numeric = tar.numeric[k] - - if (prefix) k = prefix + "." + k - - // already including NODETAR.type, don't need File=true also - if (k === fields.type && val === true) return - - switch (k) { - // don't include anything that's always handled just fine - // in the normal header, or only meaningful in the context - // of nodetar - case "mode": - case "cksum": - case "ustar": - case "ustarver": - case "prefix": - case "basename": - case "dirname": - case "needExtended": - case "block": - case "filter": - return - - case "rdev": - if (val === 0) return - break - - case "nlink": - case "dev": // Truly a hero among men, Creator of Star! - case "ino": // Speak his name with reverent awe! It is: - k = "SCHILY." + k - break - - default: break - } - - if (val && typeof val === "object" && - !Buffer.isBuffer(val)) encodeFields(val, k, body, nop) - else if (val === null || val === undefined) return - else body.push.apply(body, encodeField(k, val, nop)) - }) - - return body -} - -function encodeField (k, v, nop) { - // lowercase keys must be valid, otherwise prefix with - // "NODETAR." - if (k.charAt(0) === k.charAt(0).toLowerCase()) { - var m = k.split(".")[0] - if (!tar.knownExtended[m]) k = "NODETAR." + k - } - - // no proprietary - if (nop && k.charAt(0) !== k.charAt(0).toLowerCase()) { - return [] - } - - if (typeof val === "number") val = val.toString(10) - - var s = new Buffer(" " + k + "=" + v + "\n") - , digits = Math.floor(Math.log(s.length) / Math.log(10)) + 1 - - // console.error("1 s=%j digits=%j s.length=%d", s.toString(), digits, s.length) - - // if adding that many digits will make it go over that length, - // then add one to it. For example, if the string is: - // " foo=bar\n" - // then that's 9 characters. With the "9", that bumps the length - // up to 10. However, this is invalid: - // "10 foo=bar\n" - // but, since that's actually 11 characters, since 10 adds another - // character to the length, and the length includes the number - // itself. In that case, just bump it up again. - if (s.length + digits >= Math.pow(10, digits)) digits += 1 - // console.error("2 s=%j digits=%j s.length=%d", s.toString(), digits, s.length) - - var len = digits + s.length - // console.error("3 s=%j digits=%j s.length=%d len=%d", s.toString(), digits, s.length, len) - var lenBuf = new Buffer("" + len) - if (lenBuf.length + s.length !== len) { - throw new Error("Bad length calculation\n"+ - "len="+len+"\n"+ - "lenBuf="+JSON.stringify(lenBuf.toString())+"\n"+ - "lenBuf.length="+lenBuf.length+"\n"+ - "digits="+digits+"\n"+ - "s="+JSON.stringify(s.toString())+"\n"+ - "s.length="+s.length) - } - - return [lenBuf, s] -} diff --git a/node_modules/node-gyp/node_modules/tar/lib/extended-header.js b/node_modules/node-gyp/node_modules/tar/lib/extended-header.js deleted file mode 100644 index 74f432ceee5b2..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/lib/extended-header.js +++ /dev/null @@ -1,140 +0,0 @@ -// An Entry consisting of: -// -// "%d %s=%s\n", , , -// -// The length is a decimal number, and includes itself and the \n -// \0 does not terminate anything. Only the length terminates the string. -// Numeric values are decimal strings. - -module.exports = ExtendedHeader - -var Entry = require("./entry.js") - , inherits = require("inherits") - , tar = require("../tar.js") - , numeric = tar.numeric - , keyTrans = { "SCHILY.dev": "dev" - , "SCHILY.ino": "ino" - , "SCHILY.nlink": "nlink" } - -function ExtendedHeader () { - Entry.apply(this, arguments) - this.on("data", this._parse) - this.fields = {} - this._position = 0 - this._fieldPos = 0 - this._state = SIZE - this._sizeBuf = [] - this._keyBuf = [] - this._valBuf = [] - this._size = -1 - this._key = "" -} - -inherits(ExtendedHeader, Entry) -ExtendedHeader.prototype._parse = parse - -var s = 0 - , states = ExtendedHeader.states = {} - , SIZE = states.SIZE = s++ - , KEY = states.KEY = s++ - , VAL = states.VAL = s++ - , ERR = states.ERR = s++ - -Object.keys(states).forEach(function (s) { - states[states[s]] = states[s] -}) - -states[s] = null - -// char code values for comparison -var _0 = "0".charCodeAt(0) - , _9 = "9".charCodeAt(0) - , point = ".".charCodeAt(0) - , a = "a".charCodeAt(0) - , Z = "Z".charCodeAt(0) - , a = "a".charCodeAt(0) - , z = "z".charCodeAt(0) - , space = " ".charCodeAt(0) - , eq = "=".charCodeAt(0) - , cr = "\n".charCodeAt(0) - -function parse (c) { - if (this._state === ERR) return - - for ( var i = 0, l = c.length - ; i < l - ; this._position++, this._fieldPos++, i++) { - // console.error("top of loop, size="+this._size) - - var b = c[i] - - if (this._size >= 0 && this._fieldPos > this._size) { - error(this, "field exceeds length="+this._size) - return - } - - switch (this._state) { - case ERR: return - - case SIZE: - // console.error("parsing size, b=%d, rest=%j", b, c.slice(i).toString()) - if (b === space) { - this._state = KEY - // this._fieldPos = this._sizeBuf.length - this._size = parseInt(new Buffer(this._sizeBuf).toString(), 10) - this._sizeBuf.length = 0 - continue - } - if (b < _0 || b > _9) { - error(this, "expected [" + _0 + ".." + _9 + "], got " + b) - return - } - this._sizeBuf.push(b) - continue - - case KEY: - // can be any char except =, not > size. - if (b === eq) { - this._state = VAL - this._key = new Buffer(this._keyBuf).toString() - if (keyTrans[this._key]) this._key = keyTrans[this._key] - this._keyBuf.length = 0 - continue - } - this._keyBuf.push(b) - continue - - case VAL: - // field must end with cr - if (this._fieldPos === this._size - 1) { - // console.error("finished with "+this._key) - if (b !== cr) { - error(this, "expected \\n at end of field") - return - } - var val = new Buffer(this._valBuf).toString() - if (numeric[this._key]) { - val = parseFloat(val) - } - this.fields[this._key] = val - - this._valBuf.length = 0 - this._state = SIZE - this._size = -1 - this._fieldPos = -1 - continue - } - this._valBuf.push(b) - continue - } - } -} - -function error (me, msg) { - msg = "invalid header: " + msg - + "\nposition=" + me._position - + "\nfield position=" + me._fieldPos - - me.error(msg) - me.state = ERR -} diff --git a/node_modules/node-gyp/node_modules/tar/lib/extract.js b/node_modules/node-gyp/node_modules/tar/lib/extract.js deleted file mode 100644 index fe1bb976eb0ce..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/lib/extract.js +++ /dev/null @@ -1,94 +0,0 @@ -// give it a tarball and a path, and it'll dump the contents - -module.exports = Extract - -var tar = require("../tar.js") - , fstream = require("fstream") - , inherits = require("inherits") - , path = require("path") - -function Extract (opts) { - if (!(this instanceof Extract)) return new Extract(opts) - tar.Parse.apply(this) - - if (typeof opts !== "object") { - opts = { path: opts } - } - - // better to drop in cwd? seems more standard. - opts.path = opts.path || path.resolve("node-tar-extract") - opts.type = "Directory" - opts.Directory = true - - // similar to --strip or --strip-components - opts.strip = +opts.strip - if (!opts.strip || opts.strip <= 0) opts.strip = 0 - - this._fst = fstream.Writer(opts) - - this.pause() - var me = this - - // Hardlinks in tarballs are relative to the root - // of the tarball. So, they need to be resolved against - // the target directory in order to be created properly. - me.on("entry", function (entry) { - // if there's a "strip" argument, then strip off that many - // path components. - if (opts.strip) { - var p = entry.path.split("/").slice(opts.strip).join("/") - entry.path = entry.props.path = p - if (entry.linkpath) { - var lp = entry.linkpath.split("/").slice(opts.strip).join("/") - entry.linkpath = entry.props.linkpath = lp - } - } - if (entry.type === "Link") { - entry.linkpath = entry.props.linkpath = - path.join(opts.path, path.join("/", entry.props.linkpath)) - } - - if (entry.type === "SymbolicLink") { - var dn = path.dirname(entry.path) || "" - var linkpath = entry.props.linkpath - var target = path.resolve(opts.path, dn, linkpath) - if (target.indexOf(opts.path) !== 0) { - linkpath = path.join(opts.path, path.join("/", linkpath)) - } - entry.linkpath = entry.props.linkpath = linkpath - } - }) - - this._fst.on("ready", function () { - me.pipe(me._fst, { end: false }) - me.resume() - }) - - this._fst.on('error', function(err) { - me.emit('error', err) - }) - - this._fst.on('drain', function() { - me.emit('drain') - }) - - // this._fst.on("end", function () { - // console.error("\nEEEE Extract End", me._fst.path) - // }) - - this._fst.on("close", function () { - // console.error("\nEEEE Extract End", me._fst.path) - me.emit("finish") - me.emit("end") - me.emit("close") - }) -} - -inherits(Extract, tar.Parse) - -Extract.prototype._streamEnd = function () { - var me = this - if (!me._ended || me._entry) me.error("unexpected eof") - me._fst.end() - // my .end() is coming later. -} diff --git a/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js b/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js deleted file mode 100644 index 0bfc7b80aa7c6..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js +++ /dev/null @@ -1,14 +0,0 @@ -module.exports = GlobalHeaderWriter - -var ExtendedHeaderWriter = require("./extended-header-writer.js") - , inherits = require("inherits") - -inherits(GlobalHeaderWriter, ExtendedHeaderWriter) - -function GlobalHeaderWriter (props) { - if (!(this instanceof GlobalHeaderWriter)) { - return new GlobalHeaderWriter(props) - } - ExtendedHeaderWriter.call(this, props) - this.props.type = "g" -} diff --git a/node_modules/node-gyp/node_modules/tar/lib/header.js b/node_modules/node-gyp/node_modules/tar/lib/header.js deleted file mode 100644 index 05b237c0c7b32..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/lib/header.js +++ /dev/null @@ -1,385 +0,0 @@ -// parse a 512-byte header block to a data object, or vice-versa -// If the data won't fit nicely in a simple header, then generate -// the appropriate extended header file, and return that. - -module.exports = TarHeader - -var tar = require("../tar.js") - , fields = tar.fields - , fieldOffs = tar.fieldOffs - , fieldEnds = tar.fieldEnds - , fieldSize = tar.fieldSize - , numeric = tar.numeric - , assert = require("assert").ok - , space = " ".charCodeAt(0) - , slash = "/".charCodeAt(0) - , bslash = process.platform === "win32" ? "\\".charCodeAt(0) : null - -function TarHeader (block) { - if (!(this instanceof TarHeader)) return new TarHeader(block) - if (block) this.decode(block) -} - -TarHeader.prototype = - { decode : decode - , encode: encode - , calcSum: calcSum - , checkSum: checkSum - } - -TarHeader.parseNumeric = parseNumeric -TarHeader.encode = encode -TarHeader.decode = decode - -// note that this will only do the normal ustar header, not any kind -// of extended posix header file. If something doesn't fit comfortably, -// then it will set obj.needExtended = true, and set the block to -// the closest approximation. -function encode (obj) { - if (!obj && !(this instanceof TarHeader)) throw new Error( - "encode must be called on a TarHeader, or supplied an object") - - obj = obj || this - var block = obj.block = new Buffer(512) - - // if the object has a "prefix", then that's actually an extension of - // the path field. - if (obj.prefix) { - // console.error("%% header encoding, got a prefix", obj.prefix) - obj.path = obj.prefix + "/" + obj.path - // console.error("%% header encoding, prefixed path", obj.path) - obj.prefix = "" - } - - obj.needExtended = false - - if (obj.mode) { - if (typeof obj.mode === "string") obj.mode = parseInt(obj.mode, 8) - obj.mode = obj.mode & 0777 - } - - for (var f = 0; fields[f] !== null; f ++) { - var field = fields[f] - , off = fieldOffs[f] - , end = fieldEnds[f] - , ret - - switch (field) { - case "cksum": - // special, done below, after all the others - break - - case "prefix": - // special, this is an extension of the "path" field. - // console.error("%% header encoding, skip prefix later") - break - - case "type": - // convert from long name to a single char. - var type = obj.type || "0" - if (type.length > 1) { - type = tar.types[obj.type] - if (!type) type = "0" - } - writeText(block, off, end, type) - break - - case "path": - // uses the "prefix" field if > 100 bytes, but <= 255 - var pathLen = Buffer.byteLength(obj.path) - , pathFSize = fieldSize[fields.path] - , prefFSize = fieldSize[fields.prefix] - - // paths between 100 and 255 should use the prefix field. - // longer than 255 - if (pathLen > pathFSize && - pathLen <= pathFSize + prefFSize) { - // need to find a slash somewhere in the middle so that - // path and prefix both fit in their respective fields - var searchStart = pathLen - 1 - pathFSize - , searchEnd = prefFSize - , found = false - , pathBuf = new Buffer(obj.path) - - for ( var s = searchStart - ; (s <= searchEnd) - ; s ++ ) { - if (pathBuf[s] === slash || pathBuf[s] === bslash) { - found = s - break - } - } - - if (found !== false) { - prefix = pathBuf.slice(0, found).toString("utf8") - path = pathBuf.slice(found + 1).toString("utf8") - - ret = writeText(block, off, end, path) - off = fieldOffs[fields.prefix] - end = fieldEnds[fields.prefix] - // console.error("%% header writing prefix", off, end, prefix) - ret = writeText(block, off, end, prefix) || ret - break - } - } - - // paths less than 100 chars don't need a prefix - // and paths longer than 255 need an extended header and will fail - // on old implementations no matter what we do here. - // Null out the prefix, and fallthrough to default. - // console.error("%% header writing no prefix") - var poff = fieldOffs[fields.prefix] - , pend = fieldEnds[fields.prefix] - writeText(block, poff, pend, "") - // fallthrough - - // all other fields are numeric or text - default: - ret = numeric[field] - ? writeNumeric(block, off, end, obj[field]) - : writeText(block, off, end, obj[field] || "") - break - } - obj.needExtended = obj.needExtended || ret - } - - var off = fieldOffs[fields.cksum] - , end = fieldEnds[fields.cksum] - - writeNumeric(block, off, end, calcSum.call(this, block)) - - return block -} - -// if it's a negative number, or greater than will fit, -// then use write256. -var MAXNUM = { 12: 077777777777 - , 11: 07777777777 - , 8 : 07777777 - , 7 : 0777777 } -function writeNumeric (block, off, end, num) { - var writeLen = end - off - , maxNum = MAXNUM[writeLen] || 0 - - num = num || 0 - // console.error(" numeric", num) - - if (num instanceof Date || - Object.prototype.toString.call(num) === "[object Date]") { - num = num.getTime() / 1000 - } - - if (num > maxNum || num < 0) { - write256(block, off, end, num) - // need an extended header if negative or too big. - return true - } - - // god, tar is so annoying - // if the string is small enough, you should put a space - // between the octal string and the \0, but if it doesn't - // fit, then don't. - var numStr = Math.floor(num).toString(8) - if (num < MAXNUM[writeLen - 1]) numStr += " " - - // pad with "0" chars - if (numStr.length < writeLen) { - numStr = (new Array(writeLen - numStr.length).join("0")) + numStr - } - - if (numStr.length !== writeLen - 1) { - throw new Error("invalid length: " + JSON.stringify(numStr) + "\n" + - "expected: "+writeLen) - } - block.write(numStr, off, writeLen, "utf8") - block[end - 1] = 0 -} - -function write256 (block, off, end, num) { - var buf = block.slice(off, end) - var positive = num >= 0 - buf[0] = positive ? 0x80 : 0xFF - - // get the number as a base-256 tuple - if (!positive) num *= -1 - var tuple = [] - do { - var n = num % 256 - tuple.push(n) - num = (num - n) / 256 - } while (num) - - var bytes = tuple.length - - var fill = buf.length - bytes - for (var i = 1; i < fill; i ++) { - buf[i] = positive ? 0 : 0xFF - } - - // tuple is a base256 number, with [0] as the *least* significant byte - // if it's negative, then we need to flip all the bits once we hit the - // first non-zero bit. The 2's-complement is (0x100 - n), and the 1's- - // complement is (0xFF - n). - var zero = true - for (i = bytes; i > 0; i --) { - var byte = tuple[bytes - i] - if (positive) buf[fill + i] = byte - else if (zero && byte === 0) buf[fill + i] = 0 - else if (zero) { - zero = false - buf[fill + i] = 0x100 - byte - } else buf[fill + i] = 0xFF - byte - } -} - -function writeText (block, off, end, str) { - // strings are written as utf8, then padded with \0 - var strLen = Buffer.byteLength(str) - , writeLen = Math.min(strLen, end - off) - // non-ascii fields need extended headers - // long fields get truncated - , needExtended = strLen !== str.length || strLen > writeLen - - // write the string, and null-pad - if (writeLen > 0) block.write(str, off, writeLen, "utf8") - for (var i = off + writeLen; i < end; i ++) block[i] = 0 - - return needExtended -} - -function calcSum (block) { - block = block || this.block - assert(Buffer.isBuffer(block) && block.length === 512) - - if (!block) throw new Error("Need block to checksum") - - // now figure out what it would be if the cksum was " " - var sum = 0 - , start = fieldOffs[fields.cksum] - , end = fieldEnds[fields.cksum] - - for (var i = 0; i < fieldOffs[fields.cksum]; i ++) { - sum += block[i] - } - - for (var i = start; i < end; i ++) { - sum += space - } - - for (var i = end; i < 512; i ++) { - sum += block[i] - } - - return sum -} - - -function checkSum (block) { - var sum = calcSum.call(this, block) - block = block || this.block - - var cksum = block.slice(fieldOffs[fields.cksum], fieldEnds[fields.cksum]) - cksum = parseNumeric(cksum) - - return cksum === sum -} - -function decode (block) { - block = block || this.block - assert(Buffer.isBuffer(block) && block.length === 512) - - this.block = block - this.cksumValid = this.checkSum() - - var prefix = null - - // slice off each field. - for (var f = 0; fields[f] !== null; f ++) { - var field = fields[f] - , val = block.slice(fieldOffs[f], fieldEnds[f]) - - switch (field) { - case "ustar": - // if not ustar, then everything after that is just padding. - if (val.toString() !== "ustar\0") { - this.ustar = false - return - } else { - // console.error("ustar:", val, val.toString()) - this.ustar = val.toString() - } - break - - // prefix is special, since it might signal the xstar header - case "prefix": - var atime = parseNumeric(val.slice(131, 131 + 12)) - , ctime = parseNumeric(val.slice(131 + 12, 131 + 12 + 12)) - if ((val[130] === 0 || val[130] === space) && - typeof atime === "number" && - typeof ctime === "number" && - val[131 + 12] === space && - val[131 + 12 + 12] === space) { - this.atime = atime - this.ctime = ctime - val = val.slice(0, 130) - } - prefix = val.toString("utf8").replace(/\0+$/, "") - // console.error("%% header reading prefix", prefix) - break - - // all other fields are null-padding text - // or a number. - default: - if (numeric[field]) { - this[field] = parseNumeric(val) - } else { - this[field] = val.toString("utf8").replace(/\0+$/, "") - } - break - } - } - - // if we got a prefix, then prepend it to the path. - if (prefix) { - this.path = prefix + "/" + this.path - // console.error("%% header got a prefix", this.path) - } -} - -function parse256 (buf) { - // first byte MUST be either 80 or FF - // 80 for positive, FF for 2's comp - var positive - if (buf[0] === 0x80) positive = true - else if (buf[0] === 0xFF) positive = false - else return null - - // build up a base-256 tuple from the least sig to the highest - var zero = false - , tuple = [] - for (var i = buf.length - 1; i > 0; i --) { - var byte = buf[i] - if (positive) tuple.push(byte) - else if (zero && byte === 0) tuple.push(0) - else if (zero) { - zero = false - tuple.push(0x100 - byte) - } else tuple.push(0xFF - byte) - } - - for (var sum = 0, i = 0, l = tuple.length; i < l; i ++) { - sum += tuple[i] * Math.pow(256, i) - } - - return positive ? sum : -1 * sum -} - -function parseNumeric (f) { - if (f[0] & 0x80) return parse256(f) - - var str = f.toString("utf8").split("\0")[0].trim() - , res = parseInt(str, 8) - - return isNaN(res) ? null : res -} - diff --git a/node_modules/node-gyp/node_modules/tar/lib/pack.js b/node_modules/node-gyp/node_modules/tar/lib/pack.js deleted file mode 100644 index 5a3bb95a121bd..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/lib/pack.js +++ /dev/null @@ -1,236 +0,0 @@ -// pipe in an fstream, and it'll make a tarball. -// key-value pair argument is global extended header props. - -module.exports = Pack - -var EntryWriter = require("./entry-writer.js") - , Stream = require("stream").Stream - , path = require("path") - , inherits = require("inherits") - , GlobalHeaderWriter = require("./global-header-writer.js") - , collect = require("fstream").collect - , eof = new Buffer(512) - -for (var i = 0; i < 512; i ++) eof[i] = 0 - -inherits(Pack, Stream) - -function Pack (props) { - // console.error("-- p ctor") - var me = this - if (!(me instanceof Pack)) return new Pack(props) - - if (props) me._noProprietary = props.noProprietary - else me._noProprietary = false - - me._global = props - - me.readable = true - me.writable = true - me._buffer = [] - // console.error("-- -- set current to null in ctor") - me._currentEntry = null - me._processing = false - - me._pipeRoot = null - me.on("pipe", function (src) { - if (src.root === me._pipeRoot) return - me._pipeRoot = src - src.on("end", function () { - me._pipeRoot = null - }) - me.add(src) - }) -} - -Pack.prototype.addGlobal = function (props) { - // console.error("-- p addGlobal") - if (this._didGlobal) return - this._didGlobal = true - - var me = this - GlobalHeaderWriter(props) - .on("data", function (c) { - me.emit("data", c) - }) - .end() -} - -Pack.prototype.add = function (stream) { - if (this._global && !this._didGlobal) this.addGlobal(this._global) - - if (this._ended) return this.emit("error", new Error("add after end")) - - collect(stream) - this._buffer.push(stream) - this._process() - this._needDrain = this._buffer.length > 0 - return !this._needDrain -} - -Pack.prototype.pause = function () { - this._paused = true - if (this._currentEntry) this._currentEntry.pause() - this.emit("pause") -} - -Pack.prototype.resume = function () { - this._paused = false - if (this._currentEntry) this._currentEntry.resume() - this.emit("resume") - this._process() -} - -Pack.prototype.end = function () { - this._ended = true - this._buffer.push(eof) - this._process() -} - -Pack.prototype._process = function () { - var me = this - if (me._paused || me._processing) { - return - } - - var entry = me._buffer.shift() - - if (!entry) { - if (me._needDrain) { - me.emit("drain") - } - return - } - - if (entry.ready === false) { - // console.error("-- entry is not ready", entry) - me._buffer.unshift(entry) - entry.on("ready", function () { - // console.error("-- -- ready!", entry) - me._process() - }) - return - } - - me._processing = true - - if (entry === eof) { - // need 2 ending null blocks. - me.emit("data", eof) - me.emit("data", eof) - me.emit("end") - me.emit("close") - return - } - - // Change the path to be relative to the root dir that was - // added to the tarball. - // - // XXX This should be more like how -C works, so you can - // explicitly set a root dir, and also explicitly set a pathname - // in the tarball to use. That way we can skip a lot of extra - // work when resolving symlinks for bundled dependencies in npm. - - var root = path.dirname((entry.root || entry).path); - if (me._global && me._global.fromBase && entry.root && entry.root.path) { - // user set 'fromBase: true' indicating tar root should be directory itself - root = entry.root.path; - } - - var wprops = {} - - Object.keys(entry.props || {}).forEach(function (k) { - wprops[k] = entry.props[k] - }) - - if (me._noProprietary) wprops.noProprietary = true - - wprops.path = path.relative(root, entry.path || '') - - // actually not a matter of opinion or taste. - if (process.platform === "win32") { - wprops.path = wprops.path.replace(/\\/g, "/") - } - - if (!wprops.type) - wprops.type = 'Directory' - - switch (wprops.type) { - // sockets not supported - case "Socket": - return - - case "Directory": - wprops.path += "/" - wprops.size = 0 - break - - case "Link": - var lp = path.resolve(path.dirname(entry.path), entry.linkpath) - wprops.linkpath = path.relative(root, lp) || "." - wprops.size = 0 - break - - case "SymbolicLink": - var lp = path.resolve(path.dirname(entry.path), entry.linkpath) - wprops.linkpath = path.relative(path.dirname(entry.path), lp) || "." - wprops.size = 0 - break - } - - // console.error("-- new writer", wprops) - // if (!wprops.type) { - // // console.error("-- no type?", entry.constructor.name, entry) - // } - - // console.error("-- -- set current to new writer", wprops.path) - var writer = me._currentEntry = EntryWriter(wprops) - - writer.parent = me - - // writer.on("end", function () { - // // console.error("-- -- writer end", writer.path) - // }) - - writer.on("data", function (c) { - me.emit("data", c) - }) - - writer.on("header", function () { - Buffer.prototype.toJSON = function () { - return this.toString().split(/\0/).join(".") - } - // console.error("-- -- writer header %j", writer.props) - if (writer.props.size === 0) nextEntry() - }) - writer.on("close", nextEntry) - - var ended = false - function nextEntry () { - if (ended) return - ended = true - - // console.error("-- -- writer close", writer.path) - // console.error("-- -- set current to null", wprops.path) - me._currentEntry = null - me._processing = false - me._process() - } - - writer.on("error", function (er) { - // console.error("-- -- writer error", writer.path) - me.emit("error", er) - }) - - // if it's the root, then there's no need to add its entries, - // or data, since they'll be added directly. - if (entry === me._pipeRoot) { - // console.error("-- is the root, don't auto-add") - writer.add = null - } - - entry.pipe(writer) -} - -Pack.prototype.destroy = function () {} -Pack.prototype.write = function () {} diff --git a/node_modules/node-gyp/node_modules/tar/lib/parse.js b/node_modules/node-gyp/node_modules/tar/lib/parse.js deleted file mode 100644 index 600ad782f0f61..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/lib/parse.js +++ /dev/null @@ -1,275 +0,0 @@ - -// A writable stream. -// It emits "entry" events, which provide a readable stream that has -// header info attached. - -module.exports = Parse.create = Parse - -var stream = require("stream") - , Stream = stream.Stream - , BlockStream = require("block-stream") - , tar = require("../tar.js") - , TarHeader = require("./header.js") - , Entry = require("./entry.js") - , BufferEntry = require("./buffer-entry.js") - , ExtendedHeader = require("./extended-header.js") - , assert = require("assert").ok - , inherits = require("inherits") - , fstream = require("fstream") - -// reading a tar is a lot like reading a directory -// However, we're actually not going to run the ctor, -// since it does a stat and various other stuff. -// This inheritance gives us the pause/resume/pipe -// behavior that is desired. -inherits(Parse, fstream.Reader) - -function Parse () { - var me = this - if (!(me instanceof Parse)) return new Parse() - - // doesn't apply fstream.Reader ctor? - // no, becasue we don't want to stat/etc, we just - // want to get the entry/add logic from .pipe() - Stream.apply(me) - - me.writable = true - me.readable = true - me._stream = new BlockStream(512) - me.position = 0 - me._ended = false - - me._stream.on("error", function (e) { - me.emit("error", e) - }) - - me._stream.on("data", function (c) { - me._process(c) - }) - - me._stream.on("end", function () { - me._streamEnd() - }) - - me._stream.on("drain", function () { - me.emit("drain") - }) -} - -// overridden in Extract class, since it needs to -// wait for its DirWriter part to finish before -// emitting "end" -Parse.prototype._streamEnd = function () { - var me = this - if (!me._ended || me._entry) me.error("unexpected eof") - me.emit("end") -} - -// a tar reader is actually a filter, not just a readable stream. -// So, you should pipe a tarball stream into it, and it needs these -// write/end methods to do that. -Parse.prototype.write = function (c) { - if (this._ended) { - // gnutar puts a LOT of nulls at the end. - // you can keep writing these things forever. - // Just ignore them. - for (var i = 0, l = c.length; i > l; i ++) { - if (c[i] !== 0) return this.error("write() after end()") - } - return - } - return this._stream.write(c) -} - -Parse.prototype.end = function (c) { - this._ended = true - return this._stream.end(c) -} - -// don't need to do anything, since we're just -// proxying the data up from the _stream. -// Just need to override the parent's "Not Implemented" -// error-thrower. -Parse.prototype._read = function () {} - -Parse.prototype._process = function (c) { - assert(c && c.length === 512, "block size should be 512") - - // one of three cases. - // 1. A new header - // 2. A part of a file/extended header - // 3. One of two or more EOF null blocks - - if (this._entry) { - var entry = this._entry - if(!entry._abort) entry.write(c) - else { - entry._remaining -= c.length - if(entry._remaining < 0) entry._remaining = 0 - } - if (entry._remaining === 0) { - entry.end() - this._entry = null - } - } else { - // either zeroes or a header - var zero = true - for (var i = 0; i < 512 && zero; i ++) { - zero = c[i] === 0 - } - - // eof is *at least* 2 blocks of nulls, and then the end of the - // file. you can put blocks of nulls between entries anywhere, - // so appending one tarball to another is technically valid. - // ending without the eof null blocks is not allowed, however. - if (zero) { - if (this._eofStarted) - this._ended = true - this._eofStarted = true - } else { - this._eofStarted = false - this._startEntry(c) - } - } - - this.position += 512 -} - -// take a header chunk, start the right kind of entry. -Parse.prototype._startEntry = function (c) { - var header = new TarHeader(c) - , self = this - , entry - , ev - , EntryType - , onend - , meta = false - - if (null === header.size || !header.cksumValid) { - var e = new Error("invalid tar file") - e.header = header - e.tar_file_offset = this.position - e.tar_block = this.position / 512 - return this.emit("error", e) - } - - switch (tar.types[header.type]) { - case "File": - case "OldFile": - case "Link": - case "SymbolicLink": - case "CharacterDevice": - case "BlockDevice": - case "Directory": - case "FIFO": - case "ContiguousFile": - case "GNUDumpDir": - // start a file. - // pass in any extended headers - // These ones consumers are typically most interested in. - EntryType = Entry - ev = "entry" - break - - case "GlobalExtendedHeader": - // extended headers that apply to the rest of the tarball - EntryType = ExtendedHeader - onend = function () { - self._global = self._global || {} - Object.keys(entry.fields).forEach(function (k) { - self._global[k] = entry.fields[k] - }) - } - ev = "globalExtendedHeader" - meta = true - break - - case "ExtendedHeader": - case "OldExtendedHeader": - // extended headers that apply to the next entry - EntryType = ExtendedHeader - onend = function () { - self._extended = entry.fields - } - ev = "extendedHeader" - meta = true - break - - case "NextFileHasLongLinkpath": - // set linkpath= in extended header - EntryType = BufferEntry - onend = function () { - self._extended = self._extended || {} - self._extended.linkpath = entry.body - } - ev = "longLinkpath" - meta = true - break - - case "NextFileHasLongPath": - case "OldGnuLongPath": - // set path= in file-extended header - EntryType = BufferEntry - onend = function () { - self._extended = self._extended || {} - self._extended.path = entry.body - } - ev = "longPath" - meta = true - break - - default: - // all the rest we skip, but still set the _entry - // member, so that we can skip over their data appropriately. - // emit an event to say that this is an ignored entry type? - EntryType = Entry - ev = "ignoredEntry" - break - } - - var global, extended - if (meta) { - global = extended = null - } else { - var global = this._global - var extended = this._extended - - // extendedHeader only applies to one entry, so once we start - // an entry, it's over. - this._extended = null - } - entry = new EntryType(header, extended, global) - entry.meta = meta - - // only proxy data events of normal files. - if (!meta) { - entry.on("data", function (c) { - me.emit("data", c) - }) - } - - if (onend) entry.on("end", onend) - - this._entry = entry - var me = this - - entry.on("pause", function () { - me.pause() - }) - - entry.on("resume", function () { - me.resume() - }) - - if (this.listeners("*").length) { - this.emit("*", ev, entry) - } - - this.emit(ev, entry) - - // Zero-byte entry. End immediately. - if (entry.props.size === 0) { - entry.end() - this._entry = null - } -} diff --git a/node_modules/node-gyp/node_modules/tar/package.json b/node_modules/node-gyp/node_modules/tar/package.json deleted file mode 100644 index 2ab77e47f328d..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/package.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "_from": "tar@^2.0.0", - "_id": "tar@2.2.1", - "_inBundle": false, - "_integrity": "sha1-jk0qJWwOIYXGsYrWlK7JaLg8sdE=", - "_location": "/node-gyp/tar", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "tar@^2.0.0", - "name": "tar", - "escapedName": "tar", - "rawSpec": "^2.0.0", - "saveSpec": null, - "fetchSpec": "^2.0.0" - }, - "_requiredBy": [ - "/node-gyp" - ], - "_resolved": "https://registry.npmjs.org/tar/-/tar-2.2.1.tgz", - "_shasum": "8e4d2a256c0e2185c6b18ad694aec968b83cb1d1", - "_spec": "tar@^2.0.0", - "_where": "/Users/rebecca/code/npm/node_modules/node-gyp", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "bugs": { - "url": "https://github.com/isaacs/node-tar/issues" - }, - "bundleDependencies": false, - "dependencies": { - "block-stream": "*", - "fstream": "^1.0.2", - "inherits": "2" - }, - "deprecated": false, - "description": "tar for node", - "devDependencies": { - "graceful-fs": "^4.1.2", - "mkdirp": "^0.5.0", - "rimraf": "1.x", - "tap": "0.x" - }, - "homepage": "https://github.com/isaacs/node-tar#readme", - "license": "ISC", - "main": "tar.js", - "name": "tar", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-tar.git" - }, - "scripts": { - "test": "tap test/*.js" - }, - "version": "2.2.1" -} diff --git a/node_modules/node-gyp/node_modules/tar/tar.js b/node_modules/node-gyp/node_modules/tar/tar.js deleted file mode 100644 index a81298b9a0b12..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/tar.js +++ /dev/null @@ -1,173 +0,0 @@ -// field paths that every tar file must have. -// header is padded to 512 bytes. -var f = 0 - , fields = {} - , path = fields.path = f++ - , mode = fields.mode = f++ - , uid = fields.uid = f++ - , gid = fields.gid = f++ - , size = fields.size = f++ - , mtime = fields.mtime = f++ - , cksum = fields.cksum = f++ - , type = fields.type = f++ - , linkpath = fields.linkpath = f++ - , headerSize = 512 - , blockSize = 512 - , fieldSize = [] - -fieldSize[path] = 100 -fieldSize[mode] = 8 -fieldSize[uid] = 8 -fieldSize[gid] = 8 -fieldSize[size] = 12 -fieldSize[mtime] = 12 -fieldSize[cksum] = 8 -fieldSize[type] = 1 -fieldSize[linkpath] = 100 - -// "ustar\0" may introduce another bunch of headers. -// these are optional, and will be nulled out if not present. - -var ustar = fields.ustar = f++ - , ustarver = fields.ustarver = f++ - , uname = fields.uname = f++ - , gname = fields.gname = f++ - , devmaj = fields.devmaj = f++ - , devmin = fields.devmin = f++ - , prefix = fields.prefix = f++ - , fill = fields.fill = f++ - -// terminate fields. -fields[f] = null - -fieldSize[ustar] = 6 -fieldSize[ustarver] = 2 -fieldSize[uname] = 32 -fieldSize[gname] = 32 -fieldSize[devmaj] = 8 -fieldSize[devmin] = 8 -fieldSize[prefix] = 155 -fieldSize[fill] = 12 - -// nb: prefix field may in fact be 130 bytes of prefix, -// a null char, 12 bytes for atime, 12 bytes for ctime. -// -// To recognize this format: -// 1. prefix[130] === ' ' or '\0' -// 2. atime and ctime are octal numeric values -// 3. atime and ctime have ' ' in their last byte - -var fieldEnds = {} - , fieldOffs = {} - , fe = 0 -for (var i = 0; i < f; i ++) { - fieldOffs[i] = fe - fieldEnds[i] = (fe += fieldSize[i]) -} - -// build a translation table of field paths. -Object.keys(fields).forEach(function (f) { - if (fields[f] !== null) fields[fields[f]] = f -}) - -// different values of the 'type' field -// paths match the values of Stats.isX() functions, where appropriate -var types = - { 0: "File" - , "\0": "OldFile" // like 0 - , "": "OldFile" - , 1: "Link" - , 2: "SymbolicLink" - , 3: "CharacterDevice" - , 4: "BlockDevice" - , 5: "Directory" - , 6: "FIFO" - , 7: "ContiguousFile" // like 0 - // posix headers - , g: "GlobalExtendedHeader" // k=v for the rest of the archive - , x: "ExtendedHeader" // k=v for the next file - // vendor-specific stuff - , A: "SolarisACL" // skip - , D: "GNUDumpDir" // like 5, but with data, which should be skipped - , I: "Inode" // metadata only, skip - , K: "NextFileHasLongLinkpath" // data = link path of next file - , L: "NextFileHasLongPath" // data = path of next file - , M: "ContinuationFile" // skip - , N: "OldGnuLongPath" // like L - , S: "SparseFile" // skip - , V: "TapeVolumeHeader" // skip - , X: "OldExtendedHeader" // like x - } - -Object.keys(types).forEach(function (t) { - types[types[t]] = types[types[t]] || t -}) - -// values for the mode field -var modes = - { suid: 04000 // set uid on extraction - , sgid: 02000 // set gid on extraction - , svtx: 01000 // set restricted deletion flag on dirs on extraction - , uread: 0400 - , uwrite: 0200 - , uexec: 0100 - , gread: 040 - , gwrite: 020 - , gexec: 010 - , oread: 4 - , owrite: 2 - , oexec: 1 - , all: 07777 - } - -var numeric = - { mode: true - , uid: true - , gid: true - , size: true - , mtime: true - , devmaj: true - , devmin: true - , cksum: true - , atime: true - , ctime: true - , dev: true - , ino: true - , nlink: true - } - -Object.keys(modes).forEach(function (t) { - modes[modes[t]] = modes[modes[t]] || t -}) - -var knownExtended = - { atime: true - , charset: true - , comment: true - , ctime: true - , gid: true - , gname: true - , linkpath: true - , mtime: true - , path: true - , realtime: true - , security: true - , size: true - , uid: true - , uname: true } - - -exports.fields = fields -exports.fieldSize = fieldSize -exports.fieldOffs = fieldOffs -exports.fieldEnds = fieldEnds -exports.types = types -exports.modes = modes -exports.numeric = numeric -exports.headerSize = headerSize -exports.blockSize = blockSize -exports.knownExtended = knownExtended - -exports.Pack = require("./lib/pack.js") -exports.Parse = require("./lib/parse.js") -exports.Extract = require("./lib/extract.js") diff --git a/node_modules/node-gyp/node_modules/tar/test/00-setup-fixtures.js b/node_modules/node-gyp/node_modules/tar/test/00-setup-fixtures.js deleted file mode 100644 index 1524ff7af0570..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/00-setup-fixtures.js +++ /dev/null @@ -1,53 +0,0 @@ -// the fixtures have some weird stuff that is painful -// to include directly in the repo for various reasons. -// -// So, unpack the fixtures with the system tar first. -// -// This means, of course, that it'll only work if you -// already have a tar implementation, and some of them -// will not properly unpack the fixtures anyway. -// -// But, since usually those tests will fail on Windows -// and other systems with less capable filesystems anyway, -// at least this way we don't cause inconveniences by -// merely cloning the repo or installing the package. - -var tap = require("tap") -, child_process = require("child_process") -, rimraf = require("rimraf") -, test = tap.test -, path = require("path") - -test("clean fixtures", function (t) { - rimraf(path.resolve(__dirname, "fixtures"), function (er) { - t.ifError(er, "rimraf ./fixtures/") - t.end() - }) -}) - -test("clean tmp", function (t) { - rimraf(path.resolve(__dirname, "tmp"), function (er) { - t.ifError(er, "rimraf ./tmp/") - t.end() - }) -}) - -test("extract fixtures", function (t) { - var c = child_process.spawn("tar" - ,["xzvf", "fixtures.tgz"] - ,{ cwd: __dirname }) - - c.stdout.on("data", errwrite) - c.stderr.on("data", errwrite) - function errwrite (chunk) { - process.stderr.write(chunk) - } - - c.on("exit", function (code) { - t.equal(code, 0, "extract fixtures should exit with 0") - if (code) { - t.comment("Note, all tests from here on out will fail because of this.") - } - t.end() - }) -}) diff --git a/node_modules/node-gyp/node_modules/tar/test/cb-never-called-1.0.1.tgz b/node_modules/node-gyp/node_modules/tar/test/cb-never-called-1.0.1.tgz deleted file mode 100644 index 9e7014d85abe4..0000000000000 Binary files a/node_modules/node-gyp/node_modules/tar/test/cb-never-called-1.0.1.tgz and /dev/null differ diff --git a/node_modules/node-gyp/node_modules/tar/test/dir-normalization.js b/node_modules/node-gyp/node_modules/tar/test/dir-normalization.js deleted file mode 100644 index 9719c42f35400..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/dir-normalization.js +++ /dev/null @@ -1,177 +0,0 @@ -// Set the umask, so that it works the same everywhere. -process.umask(parseInt('22', 8)) - -var fs = require('fs') -var path = require('path') - -var fstream = require('fstream') -var test = require('tap').test - -var tar = require('../tar.js') -var file = path.resolve(__dirname, 'dir-normalization.tar') -var target = path.resolve(__dirname, 'tmp/dir-normalization-test') -var ee = 0 - -var expectEntries = [ - { path: 'fixtures/', - mode: '755', - type: '5', - linkpath: '' - }, - { path: 'fixtures/a/', - mode: '755', - type: '5', - linkpath: '' - }, - { path: 'fixtures/the-chumbler', - mode: '755', - type: '2', - linkpath: path.resolve(target, 'a/b/c/d/the-chumbler'), - }, - { path: 'fixtures/a/b/', - mode: '755', - type: '5', - linkpath: '' - }, - { path: 'fixtures/a/x', - mode: '644', - type: '0', - linkpath: '' - }, - { path: 'fixtures/a/b/c/', - mode: '755', - type: '5', - linkpath: '' - }, - { path: 'fixtures/a/b/c/y', - mode: '755', - type: '2', - linkpath: '../../x', - } -] - -var ef = 0 -var expectFiles = [ - { path: '', - mode: '40755', - type: 'Directory', - depth: 0, - linkpath: undefined - }, - { path: '/fixtures', - mode: '40755', - type: 'Directory', - depth: 1, - linkpath: undefined - }, - { path: '/fixtures/a', - mode: '40755', - type: 'Directory', - depth: 2, - linkpath: undefined - }, - { path: '/fixtures/a/b', - mode: '40755', - type: 'Directory', - depth: 3, - linkpath: undefined - }, - { path: '/fixtures/a/b/c', - mode: '40755', - type: 'Directory', - depth: 4, - linkpath: undefined - }, - { path: '/fixtures/a/b/c/y', - mode: '120755', - type: 'SymbolicLink', - depth: 5, - linkpath: '../../x' - }, - { path: '/fixtures/a/x', - mode: '100644', - type: 'File', - depth: 3, - linkpath: undefined - }, - { path: '/fixtures/the-chumbler', - mode: '120755', - type: 'SymbolicLink', - depth: 2, - linkpath: path.resolve(target, 'a/b/c/d/the-chumbler') - } -] - -test('preclean', function (t) { - require('rimraf').sync(path.join(__dirname, '/tmp/dir-normalization-test')) - t.pass('cleaned!') - t.end() -}) - -test('extract test', function (t) { - var extract = tar.Extract(target) - var inp = fs.createReadStream(file) - - inp.pipe(extract) - - extract.on('end', function () { - t.equal(ee, expectEntries.length, 'should see ' + expectEntries.length + ' entries') - - // should get no more entries after end - extract.removeAllListeners('entry') - extract.on('entry', function (e) { - t.fail('Should not get entries after end!') - }) - - next() - }) - - extract.on('entry', function (entry) { - var mode = entry.props.mode & (~parseInt('22', 8)) - var found = { - path: entry.path, - mode: mode.toString(8), - type: entry.props.type, - linkpath: entry.props.linkpath, - } - - var wanted = expectEntries[ee++] - t.equivalent(found, wanted, 'tar entry ' + ee + ' ' + (wanted && wanted.path)) - }) - - function next () { - var r = fstream.Reader({ - path: target, - type: 'Directory', - sort: 'alpha' - }) - - r.on('ready', function () { - foundEntry(r) - }) - - r.on('end', finish) - - function foundEntry (entry) { - var p = entry.path.substr(target.length) - var mode = entry.props.mode & (~parseInt('22', 8)) - var found = { - path: p, - mode: mode.toString(8), - type: entry.props.type, - depth: entry.props.depth, - linkpath: entry.props.linkpath - } - - var wanted = expectFiles[ef++] - t.equivalent(found, wanted, 'unpacked file ' + ef + ' ' + (wanted && wanted.path)) - - entry.on('entry', foundEntry) - } - - function finish () { - t.equal(ef, expectFiles.length, 'should have ' + ef + ' items') - t.end() - } - } -}) diff --git a/node_modules/node-gyp/node_modules/tar/test/dir-normalization.tar b/node_modules/node-gyp/node_modules/tar/test/dir-normalization.tar deleted file mode 100644 index 3c4845356cecb..0000000000000 Binary files a/node_modules/node-gyp/node_modules/tar/test/dir-normalization.tar and /dev/null differ diff --git a/node_modules/node-gyp/node_modules/tar/test/error-on-broken.js b/node_modules/node-gyp/node_modules/tar/test/error-on-broken.js deleted file mode 100644 index e484920fd9625..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/error-on-broken.js +++ /dev/null @@ -1,33 +0,0 @@ -var fs = require('fs') -var path = require('path') -var zlib = require('zlib') - -var tap = require('tap') - -var tar = require('../tar.js') - -var file = path.join(__dirname, 'cb-never-called-1.0.1.tgz') -var target = path.join(__dirname, 'tmp/extract-test') - -tap.test('preclean', function (t) { - require('rimraf').sync(__dirname + '/tmp/extract-test') - t.pass('cleaned!') - t.end() -}) - -tap.test('extract test', function (t) { - var extract = tar.Extract(target) - var inp = fs.createReadStream(file) - - inp.pipe(zlib.createGunzip()).pipe(extract) - - extract.on('error', function (er) { - t.equal(er.message, 'unexpected eof', 'error noticed') - t.end() - }) - - extract.on('end', function () { - t.fail('shouldn\'t reach this point due to errors') - t.end() - }) -}) diff --git a/node_modules/node-gyp/node_modules/tar/test/extract-move.js b/node_modules/node-gyp/node_modules/tar/test/extract-move.js deleted file mode 100644 index 45400cd9bb818..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/extract-move.js +++ /dev/null @@ -1,132 +0,0 @@ -// Set the umask, so that it works the same everywhere. -process.umask(parseInt('22', 8)) - -var tap = require("tap") - , tar = require("../tar.js") - , fs = require("fs") - , gfs = require("graceful-fs") - , path = require("path") - , file = path.resolve(__dirname, "fixtures/dir.tar") - , target = path.resolve(__dirname, "tmp/extract-test") - , index = 0 - , fstream = require("fstream") - , rimraf = require("rimraf") - , mkdirp = require("mkdirp") - - , ee = 0 - , expectEntries = [ - { - "path" : "dir/", - "mode" : "750", - "type" : "5", - "depth" : undefined, - "size" : 0, - "linkpath" : "", - "nlink" : undefined, - "dev" : undefined, - "ino" : undefined - }, - { - "path" : "dir/sub/", - "mode" : "750", - "type" : "5", - "depth" : undefined, - "size" : 0, - "linkpath" : "", - "nlink" : undefined, - "dev" : undefined, - "ino" : undefined - } ] - -function slow (fs, method, t1, t2) { - var orig = fs[method] - if (!orig) return null - fs[method] = function () { - var args = [].slice.call(arguments) - console.error("slow", method, args[0]) - var cb = args.pop() - - setTimeout(function () { - orig.apply(fs, args.concat(function(er, data) { - setTimeout(function() { - cb(er, data) - }, t2) - })) - }, t1) - } -} - -// Make sure we get the graceful-fs that fstream is using. -var gfs2 -try { - gfs2 = require("fstream/node_modules/graceful-fs") -} catch (er) {} - -var slowMethods = ["chown", "chmod", "utimes", "lutimes"] -slowMethods.forEach(function (method) { - var t1 = 500 - var t2 = 0 - slow(fs, method, t1, t2) - slow(gfs, method, t1, t2) - if (gfs2) { - slow(gfs2, method, t1, t2) - } -}) - - - -// The extract class basically just pipes the input -// to a Reader, and then to a fstream.DirWriter - -// So, this is as much a test of fstream.Reader and fstream.Writer -// as it is of tar.Extract, but it sort of makes sense. - -tap.test("preclean", function (t) { - rimraf.sync(target) - /mkdirp.sync(target) - t.pass("cleaned!") - t.end() -}) - -tap.test("extract test", function (t) { - var extract = tar.Extract(target) - var inp = fs.createReadStream(file) - - // give it a weird buffer size to try to break in odd places - inp.bufferSize = 1234 - - inp.pipe(extract) - - extract.on("end", function () { - rimraf.sync(target) - - t.equal(ee, expectEntries.length, "should see "+ee+" entries") - - // should get no more entries after end - extract.removeAllListeners("entry") - extract.on("entry", function (e) { - t.fail("Should not get entries after end!") - }) - - t.end() - }) - - - extract.on("entry", function (entry) { - var found = - { path: entry.path - , mode: entry.props.mode.toString(8) - , type: entry.props.type - , depth: entry.props.depth - , size: entry.props.size - , linkpath: entry.props.linkpath - , nlink: entry.props.nlink - , dev: entry.props.dev - , ino: entry.props.ino - } - - var wanted = expectEntries[ee ++] - - t.equivalent(found, wanted, "tar entry " + ee + " " + wanted.path) - }) -}) diff --git a/node_modules/node-gyp/node_modules/tar/test/extract.js b/node_modules/node-gyp/node_modules/tar/test/extract.js deleted file mode 100644 index eca4e7cc962db..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/extract.js +++ /dev/null @@ -1,367 +0,0 @@ -// Set the umask, so that it works the same everywhere. -process.umask(parseInt('22', 8)) - -var tap = require("tap") - , tar = require("../tar.js") - , fs = require("fs") - , path = require("path") - , file = path.resolve(__dirname, "fixtures/c.tar") - , target = path.resolve(__dirname, "tmp/extract-test") - , index = 0 - , fstream = require("fstream") - - , ee = 0 - , expectEntries = -[ { path: 'c.txt', - mode: '644', - type: '0', - depth: undefined, - size: 513, - linkpath: '', - nlink: undefined, - dev: undefined, - ino: undefined }, - { path: 'cc.txt', - mode: '644', - type: '0', - depth: undefined, - size: 513, - linkpath: '', - nlink: undefined, - dev: undefined, - ino: undefined }, - { path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: '644', - type: '0', - depth: undefined, - size: 100, - linkpath: '', - nlink: undefined, - dev: undefined, - ino: undefined }, - { path: 'Ω.txt', - mode: '644', - type: '0', - depth: undefined, - size: 2, - linkpath: '', - nlink: undefined, - dev: undefined, - ino: undefined }, - { path: 'Ω.txt', - mode: '644', - type: '0', - depth: undefined, - size: 2, - linkpath: '', - nlink: 1, - dev: 234881026, - ino: 51693379 }, - { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: '644', - type: '0', - depth: undefined, - size: 200, - linkpath: '', - nlink: 1, - dev: 234881026, - ino: 51681874 }, - { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: '644', - type: '0', - depth: undefined, - size: 201, - linkpath: '', - nlink: undefined, - dev: undefined, - ino: undefined }, - { path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL', - mode: '777', - type: '2', - depth: undefined, - size: 0, - linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - nlink: undefined, - dev: undefined, - ino: undefined }, - { path: '200-hard', - mode: '644', - type: '0', - depth: undefined, - size: 200, - linkpath: '', - nlink: 2, - dev: 234881026, - ino: 51681874 }, - { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: '644', - type: '1', - depth: undefined, - size: 0, - linkpath: path.resolve(target, '200-hard'), - nlink: 2, - dev: 234881026, - ino: 51681874 } ] - - , ef = 0 - , expectFiles = -[ { path: '', - mode: '40755', - type: 'Directory', - depth: 0, - linkpath: undefined }, - { path: '/200-hard', - mode: '100644', - type: 'File', - depth: 1, - size: 200, - linkpath: undefined, - nlink: 2 }, - { path: '/200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL', - mode: '120777', - type: 'SymbolicLink', - depth: 1, - size: 200, - linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - nlink: 1 }, - { path: '/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: '100644', - type: 'Link', - depth: 1, - size: 200, - linkpath: path.join(target, '200-hard'), - nlink: 2 }, - { path: '/c.txt', - mode: '100644', - type: 'File', - depth: 1, - size: 513, - linkpath: undefined, - nlink: 1 }, - { path: '/cc.txt', - mode: '100644', - type: 'File', - depth: 1, - size: 513, - linkpath: undefined, - nlink: 1 }, - { path: '/r', - mode: '40755', - type: 'Directory', - depth: 1, - linkpath: undefined }, - { path: '/r/e', - mode: '40755', - type: 'Directory', - depth: 2, - linkpath: undefined }, - { path: '/r/e/a', - mode: '40755', - type: 'Directory', - depth: 3, - linkpath: undefined }, - { path: '/r/e/a/l', - mode: '40755', - type: 'Directory', - depth: 4, - linkpath: undefined }, - { path: '/r/e/a/l/l', - mode: '40755', - type: 'Directory', - depth: 5, - linkpath: undefined }, - { path: '/r/e/a/l/l/y', - mode: '40755', - type: 'Directory', - depth: 6, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-', - mode: '40755', - type: 'Directory', - depth: 7, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d', - mode: '40755', - type: 'Directory', - depth: 8, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e', - mode: '40755', - type: 'Directory', - depth: 9, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e', - mode: '40755', - type: 'Directory', - depth: 10, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p', - mode: '40755', - type: 'Directory', - depth: 11, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-', - mode: '40755', - type: 'Directory', - depth: 12, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f', - mode: '40755', - type: 'Directory', - depth: 13, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o', - mode: '40755', - type: 'Directory', - depth: 14, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l', - mode: '40755', - type: 'Directory', - depth: 15, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d', - mode: '40755', - type: 'Directory', - depth: 16, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e', - mode: '40755', - type: 'Directory', - depth: 17, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r', - mode: '40755', - type: 'Directory', - depth: 18, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-', - mode: '40755', - type: 'Directory', - depth: 19, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p', - mode: '40755', - type: 'Directory', - depth: 20, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a', - mode: '40755', - type: 'Directory', - depth: 21, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t', - mode: '40755', - type: 'Directory', - depth: 22, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h', - mode: '40755', - type: 'Directory', - depth: 23, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: '100644', - type: 'File', - depth: 24, - size: 100, - linkpath: undefined, - nlink: 1 }, - { path: '/Ω.txt', - mode: '100644', - type: 'File', - depth: 1, - size: 2, - linkpath: undefined, - nlink: 1 } ] - - - -// The extract class basically just pipes the input -// to a Reader, and then to a fstream.DirWriter - -// So, this is as much a test of fstream.Reader and fstream.Writer -// as it is of tar.Extract, but it sort of makes sense. - -tap.test("preclean", function (t) { - require("rimraf").sync(__dirname + "/tmp/extract-test") - t.pass("cleaned!") - t.end() -}) - -tap.test("extract test", function (t) { - var extract = tar.Extract(target) - var inp = fs.createReadStream(file) - - // give it a weird buffer size to try to break in odd places - inp.bufferSize = 1234 - - inp.pipe(extract) - - extract.on("end", function () { - t.equal(ee, expectEntries.length, "should see "+ee+" entries") - - // should get no more entries after end - extract.removeAllListeners("entry") - extract.on("entry", function (e) { - t.fail("Should not get entries after end!") - }) - - next() - }) - - extract.on("entry", function (entry) { - var found = - { path: entry.path - , mode: entry.props.mode.toString(8) - , type: entry.props.type - , depth: entry.props.depth - , size: entry.props.size - , linkpath: entry.props.linkpath - , nlink: entry.props.nlink - , dev: entry.props.dev - , ino: entry.props.ino - } - - var wanted = expectEntries[ee ++] - - t.equivalent(found, wanted, "tar entry " + ee + " " + wanted.path) - }) - - function next () { - var r = fstream.Reader({ path: target - , type: "Directory" - // this is just to encourage consistency - , sort: "alpha" }) - - r.on("ready", function () { - foundEntry(r) - }) - - r.on("end", finish) - - function foundEntry (entry) { - var p = entry.path.substr(target.length) - var found = - { path: p - , mode: entry.props.mode.toString(8) - , type: entry.props.type - , depth: entry.props.depth - , size: entry.props.size - , linkpath: entry.props.linkpath - , nlink: entry.props.nlink - } - - var wanted = expectFiles[ef ++] - - t.has(found, wanted, "unpacked file " + ef + " " + wanted.path) - - entry.on("entry", foundEntry) - } - - function finish () { - t.equal(ef, expectFiles.length, "should have "+ef+" items") - t.end() - } - } -}) diff --git a/node_modules/node-gyp/node_modules/tar/test/fixtures.tgz b/node_modules/node-gyp/node_modules/tar/test/fixtures.tgz deleted file mode 100644 index f1676023afa2b..0000000000000 Binary files a/node_modules/node-gyp/node_modules/tar/test/fixtures.tgz and /dev/null differ diff --git a/node_modules/node-gyp/node_modules/tar/test/header.js b/node_modules/node-gyp/node_modules/tar/test/header.js deleted file mode 100644 index 8ea6f79500de7..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/header.js +++ /dev/null @@ -1,183 +0,0 @@ -var tap = require("tap") -var TarHeader = require("../lib/header.js") -var tar = require("../tar.js") -var fs = require("fs") - - -var headers = - { "a.txt file header": - [ "612e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303430312031313635313336303333332030313234353100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - , { cksumValid: true - , path: 'a.txt' - , mode: 420 - , uid: 24561 - , gid: 20 - , size: 257 - , mtime: 1319493851 - , cksum: 5417 - , type: '0' - , linkpath: '' - , ustar: 'ustar\0' - , ustarver: '00' - , uname: 'isaacs' - , gname: 'staff' - , devmaj: 0 - , devmin: 0 - , fill: '' } - ] - - , "omega pax": // the extended header from omega tar. - [ "5061784865616465722fcea92e74787400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303137302031313534333731303631312030313530353100207800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - , { cksumValid: true - , path: 'PaxHeader/Ω.txt' - , mode: 420 - , uid: 24561 - , gid: 20 - , size: 120 - , mtime: 1301254537 - , cksum: 6697 - , type: 'x' - , linkpath: '' - , ustar: 'ustar\0' - , ustarver: '00' - , uname: 'isaacs' - , gname: 'staff' - , devmaj: 0 - , devmin: 0 - , fill: '' } ] - - , "omega file header": - [ "cea92e7478740000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030322031313534333731303631312030313330373200203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - , { cksumValid: true - , path: 'Ω.txt' - , mode: 420 - , uid: 24561 - , gid: 20 - , size: 2 - , mtime: 1301254537 - , cksum: 5690 - , type: '0' - , linkpath: '' - , ustar: 'ustar\0' - , ustarver: '00' - , uname: 'isaacs' - , gname: 'staff' - , devmaj: 0 - , devmin: 0 - , fill: '' } ] - - , "foo.js file header": - [ "666f6f2e6a730000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030342031313534333637303734312030313236313700203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - , { cksumValid: true - , path: 'foo.js' - , mode: 420 - , uid: 24561 - , gid: 20 - , size: 4 - , mtime: 1301246433 - , cksum: 5519 - , type: '0' - , linkpath: '' - , ustar: 'ustar\0' - , ustarver: '00' - , uname: 'isaacs' - , gname: 'staff' - , devmaj: 0 - , devmin: 0 - , fill: '' } - ] - - , "b.txt file header": - [ "622e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030313030302031313635313336303637372030313234363100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - , { cksumValid: true - , path: 'b.txt' - , mode: 420 - , uid: 24561 - , gid: 20 - , size: 512 - , mtime: 1319494079 - , cksum: 5425 - , type: '0' - , linkpath: '' - , ustar: 'ustar\0' - , ustarver: '00' - , uname: 'isaacs' - , gname: 'staff' - , devmaj: 0 - , devmin: 0 - , fill: '' } - ] - - , "deep nested file": - [ "636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363633030303634342000303537373631200030303030323420003030303030303030313434203131363532313531353333203034333331340020300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000075737461720030306973616163730000000000000000000000000000000000000000000000000000737461666600000000000000000000000000000000000000000000000000000030303030303020003030303030302000722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d2f662f6f2f6c2f642f652f722f2d2f702f612f742f680000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - , { cksumValid: true, - path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' - , mode: 420 - , uid: 24561 - , gid: 20 - , size: 100 - , mtime: 1319687003 - , cksum: 18124 - , type: '0' - , linkpath: '' - , ustar: 'ustar\0' - , ustarver: '00' - , uname: 'isaacs' - , gname: 'staff' - , devmaj: 0 - , devmin: 0 - , fill: '' } - ] - } - -tap.test("parsing", function (t) { - Object.keys(headers).forEach(function (name) { - var h = headers[name] - , header = new Buffer(h[0], "hex") - , expect = h[1] - , parsed = new TarHeader(header) - - // console.error(parsed) - t.has(parsed, expect, "parse " + name) - }) - t.end() -}) - -tap.test("encoding", function (t) { - Object.keys(headers).forEach(function (name) { - var h = headers[name] - , expect = new Buffer(h[0], "hex") - , encoded = TarHeader.encode(h[1]) - - // might have slightly different bytes, since the standard - // isn't very strict, but should have the same semantics - // checkSum will be different, but cksumValid will be true - - var th = new TarHeader(encoded) - delete h[1].block - delete h[1].needExtended - delete h[1].cksum - t.has(th, h[1], "fields "+name) - }) - t.end() -}) - -// test these manually. they're a bit rare to find in the wild -tap.test("parseNumeric tests", function (t) { - var parseNumeric = TarHeader.parseNumeric - , numbers = - { "303737373737373700": 2097151 - , "30373737373737373737373700": 8589934591 - , "303030303036343400": 420 - , "800000ffffffffffff": 281474976710655 - , "ffffff000000000001": -281474976710654 - , "ffffff000000000000": -281474976710655 - , "800000000000200000": 2097152 - , "8000000000001544c5": 1393861 - , "ffffffffffff1544c5": -15383354 } - Object.keys(numbers).forEach(function (n) { - var b = new Buffer(n, "hex") - t.equal(parseNumeric(b), numbers[n], n + " === " + numbers[n]) - }) - t.end() -}) diff --git a/node_modules/node-gyp/node_modules/tar/test/pack-no-proprietary.js b/node_modules/node-gyp/node_modules/tar/test/pack-no-proprietary.js deleted file mode 100644 index d4b03a1fe936b..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/pack-no-proprietary.js +++ /dev/null @@ -1,886 +0,0 @@ -// This is exactly like test/pack.js, except that it's excluding -// any proprietary headers. -// -// This loses some information about the filesystem, but creates -// tarballs that are supported by more versions of tar, especially -// old non-spec-compliant copies of gnutar. - -// the symlink file is excluded from git, because it makes -// windows freak the hell out. -var fs = require("fs") - , path = require("path") - , symlink = path.resolve(__dirname, "fixtures/symlink") -try { fs.unlinkSync(symlink) } catch (e) {} -fs.symlinkSync("./hardlink-1", symlink) -process.on("exit", function () { - fs.unlinkSync(symlink) -}) - -var tap = require("tap") - , tar = require("../tar.js") - , pkg = require("../package.json") - , Pack = tar.Pack - , fstream = require("fstream") - , Reader = fstream.Reader - , Writer = fstream.Writer - , input = path.resolve(__dirname, "fixtures/") - , target = path.resolve(__dirname, "tmp/pack.tar") - , uid = process.getuid ? process.getuid() : 0 - , gid = process.getgid ? process.getgid() : 0 - - , entries = - - // the global header and root fixtures/ dir are going to get - // a different date each time, so omit that bit. - // Also, dev/ino values differ across machines, so that's not - // included. - [ [ 'entry', - { path: 'fixtures/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'extendedHeader', - { path: 'PaxHeader/fixtures/200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: uid, - gid: gid, - type: 'x', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - uid: uid, - gid: gid, - size: 200 } ] - - , [ 'entry', - { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: uid, - gid: gid, - size: 200, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/a.txt', - mode: 420, - uid: uid, - gid: gid, - size: 257, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/b.txt', - mode: 420, - uid: uid, - gid: gid, - size: 512, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/c.txt', - mode: 420, - uid: uid, - gid: gid, - size: 513, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/cc.txt', - mode: 420, - uid: uid, - gid: gid, - size: 513, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/dir/', - mode: 488, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/dir/sub/', - mode: 488, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/foo.js', - mode: 420, - uid: uid, - gid: gid, - size: 4, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/hardlink-1', - mode: 420, - uid: uid, - gid: gid, - size: 200, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/hardlink-2', - mode: 420, - uid: uid, - gid: gid, - size: 0, - type: '1', - linkpath: 'fixtures/hardlink-1', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/omega.txt', - mode: 420, - uid: uid, - gid: gid, - size: 2, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/packtest/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/packtest/omega.txt', - mode: 420, - uid: uid, - gid: gid, - size: 2, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/packtest/star.4.html', - mode: 420, - uid: uid, - gid: gid, - size: 54081, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'extendedHeader', - { path: 'PaxHeader/fixtures/packtest/Ω.txt', - mode: 420, - uid: uid, - gid: gid, - type: 'x', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: 'fixtures/packtest/Ω.txt', - uid: uid, - gid: gid, - size: 2 } ] - - , [ 'entry', - { path: 'fixtures/packtest/Ω.txt', - mode: 420, - uid: uid, - gid: gid, - size: 2, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: uid, - gid: gid, - size: 100, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/symlink', - uid: uid, - gid: gid, - size: 0, - type: '2', - linkpath: 'hardlink-1', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'extendedHeader', - { path: 'PaxHeader/fixtures/Ω.txt', - mode: 420, - uid: uid, - gid: gid, - type: 'x', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: "fixtures/Ω.txt" - , uid: uid - , gid: gid - , size: 2 } ] - - , [ 'entry', - { path: 'fixtures/Ω.txt', - mode: 420, - uid: uid, - gid: gid, - size: 2, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - ] - - -// first, make sure that the hardlinks are actually hardlinks, or this -// won't work. Git has a way of replacing them with a copy. -var hard1 = path.resolve(__dirname, "fixtures/hardlink-1") - , hard2 = path.resolve(__dirname, "fixtures/hardlink-2") - , fs = require("fs") - -try { fs.unlinkSync(hard2) } catch (e) {} -fs.linkSync(hard1, hard2) - -tap.test("with global header", { timeout: 10000 }, function (t) { - runTest(t, true) -}) - -tap.test("without global header", { timeout: 10000 }, function (t) { - runTest(t, false) -}) - -function alphasort (a, b) { - return a === b ? 0 - : a.toLowerCase() > b.toLowerCase() ? 1 - : a.toLowerCase() < b.toLowerCase() ? -1 - : a > b ? 1 - : -1 -} - - -function runTest (t, doGH) { - var reader = Reader({ path: input - , filter: function () { - return !this.path.match(/\.(tar|hex)$/) - } - , sort: alphasort - }) - - var props = doGH ? pkg : {} - props.noProprietary = true - var pack = Pack(props) - var writer = Writer(target) - - // global header should be skipped regardless, since it has no content. - var entry = 0 - - t.ok(reader, "reader ok") - t.ok(pack, "pack ok") - t.ok(writer, "writer ok") - - pack.pipe(writer) - - var parse = tar.Parse() - t.ok(parse, "parser should be ok") - - pack.on("data", function (c) { - // console.error("PACK DATA") - if (c.length !== 512) { - // this one is too noisy, only assert if it'll be relevant - t.equal(c.length, 512, "parser should emit data in 512byte blocks") - } - parse.write(c) - }) - - pack.on("end", function () { - // console.error("PACK END") - t.pass("parser ends") - parse.end() - }) - - pack.on("error", function (er) { - t.fail("pack error", er) - }) - - parse.on("error", function (er) { - t.fail("parse error", er) - }) - - writer.on("error", function (er) { - t.fail("writer error", er) - }) - - reader.on("error", function (er) { - t.fail("reader error", er) - }) - - parse.on("*", function (ev, e) { - var wanted = entries[entry++] - if (!wanted) { - t.fail("unexpected event: "+ev) - return - } - t.equal(ev, wanted[0], "event type should be "+wanted[0]) - - if (ev !== wanted[0] || e.path !== wanted[1].path) { - console.error("wanted", wanted) - console.error([ev, e.props]) - e.on("end", function () { - console.error(e.fields) - throw "break" - }) - } - - t.has(e.props, wanted[1], "properties "+wanted[1].path) - if (wanted[2]) { - e.on("end", function () { - if (!e.fields) { - t.ok(e.fields, "should get fields") - } else { - t.has(e.fields, wanted[2], "should get expected fields") - } - }) - } - }) - - reader.pipe(pack) - - writer.on("close", function () { - t.equal(entry, entries.length, "should get all expected entries") - t.pass("it finished") - t.end() - }) - -} diff --git a/node_modules/node-gyp/node_modules/tar/test/pack.js b/node_modules/node-gyp/node_modules/tar/test/pack.js deleted file mode 100644 index 0f16c07bb0162..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/pack.js +++ /dev/null @@ -1,952 +0,0 @@ - -// the symlink file is excluded from git, because it makes -// windows freak the hell out. -var fs = require("fs") - , path = require("path") - , symlink = path.resolve(__dirname, "fixtures/symlink") -try { fs.unlinkSync(symlink) } catch (e) {} -fs.symlinkSync("./hardlink-1", symlink) -process.on("exit", function () { - fs.unlinkSync(symlink) -}) - - -var tap = require("tap") - , tar = require("../tar.js") - , pkg = require("../package.json") - , Pack = tar.Pack - , fstream = require("fstream") - , Reader = fstream.Reader - , Writer = fstream.Writer - , input = path.resolve(__dirname, "fixtures/") - , target = path.resolve(__dirname, "tmp/pack.tar") - , uid = process.getuid ? process.getuid() : 0 - , gid = process.getgid ? process.getgid() : 0 - - , entries = - - // the global header and root fixtures/ dir are going to get - // a different date each time, so omit that bit. - // Also, dev/ino values differ across machines, so that's not - // included. - [ [ 'globalExtendedHeader', - { path: 'PaxHeader/', - mode: 438, - uid: 0, - gid: 0, - type: 'g', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' }, - { "NODETAR.author": pkg.author, - "NODETAR.name": pkg.name, - "NODETAR.description": pkg.description, - "NODETAR.version": pkg.version, - "NODETAR.repository.type": pkg.repository.type, - "NODETAR.repository.url": pkg.repository.url, - "NODETAR.main": pkg.main, - "NODETAR.scripts.test": pkg.scripts.test } ] - - , [ 'entry', - { path: 'fixtures/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'extendedHeader', - { path: 'PaxHeader/fixtures/200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: uid, - gid: gid, - type: 'x', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'NODETAR.depth': '1', - 'NODETAR.type': 'File', - nlink: 1, - uid: uid, - gid: gid, - size: 200, - 'NODETAR.blksize': '4096', - 'NODETAR.blocks': '8' } ] - - , [ 'entry', - { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: uid, - gid: gid, - size: 200, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '', - 'NODETAR.depth': '1', - 'NODETAR.type': 'File', - nlink: 1, - 'NODETAR.blksize': '4096', - 'NODETAR.blocks': '8' } ] - - , [ 'entry', - { path: 'fixtures/a.txt', - mode: 420, - uid: uid, - gid: gid, - size: 257, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/b.txt', - mode: 420, - uid: uid, - gid: gid, - size: 512, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/c.txt', - mode: 420, - uid: uid, - gid: gid, - size: 513, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/cc.txt', - mode: 420, - uid: uid, - gid: gid, - size: 513, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/dir/', - mode: 488, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/dir/sub/', - mode: 488, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - - , [ 'entry', - { path: 'fixtures/foo.js', - mode: 420, - uid: uid, - gid: gid, - size: 4, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/hardlink-1', - mode: 420, - uid: uid, - gid: gid, - size: 200, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/hardlink-2', - mode: 420, - uid: uid, - gid: gid, - size: 0, - type: '1', - linkpath: 'fixtures/hardlink-1', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/omega.txt', - mode: 420, - uid: uid, - gid: gid, - size: 2, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/packtest/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/packtest/omega.txt', - mode: 420, - uid: uid, - gid: gid, - size: 2, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/packtest/star.4.html', - mode: 420, - uid: uid, - gid: gid, - size: 54081, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'extendedHeader', - { path: 'PaxHeader/fixtures/packtest/Ω.txt', - mode: 420, - uid: uid, - gid: gid, - type: 'x', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: 'fixtures/packtest/Ω.txt', - 'NODETAR.depth': '2', - 'NODETAR.type': 'File', - nlink: 1, - uid: uid, - gid: gid, - size: 2, - 'NODETAR.blksize': '4096', - 'NODETAR.blocks': '8' } ] - - , [ 'entry', - { path: 'fixtures/packtest/Ω.txt', - mode: 420, - uid: uid, - gid: gid, - size: 2, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '', - 'NODETAR.depth': '2', - 'NODETAR.type': 'File', - nlink: 1, - 'NODETAR.blksize': '4096', - 'NODETAR.blocks': '8' } ] - - , [ 'entry', - { path: 'fixtures/r/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: uid, - gid: gid, - size: 100, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/symlink', - uid: uid, - gid: gid, - size: 0, - type: '2', - linkpath: 'hardlink-1', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'extendedHeader', - { path: 'PaxHeader/fixtures/Ω.txt', - mode: 420, - uid: uid, - gid: gid, - type: 'x', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: "fixtures/Ω.txt" - , "NODETAR.depth": "1" - , "NODETAR.type": "File" - , nlink: 1 - , uid: uid - , gid: gid - , size: 2 - , "NODETAR.blksize": "4096" - , "NODETAR.blocks": "8" } ] - - , [ 'entry', - { path: 'fixtures/Ω.txt', - mode: 420, - uid: uid, - gid: gid, - size: 2, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '', - 'NODETAR.depth': '1', - 'NODETAR.type': 'File', - nlink: 1, - 'NODETAR.blksize': '4096', - 'NODETAR.blocks': '8' } ] - ] - - -// first, make sure that the hardlinks are actually hardlinks, or this -// won't work. Git has a way of replacing them with a copy. -var hard1 = path.resolve(__dirname, "fixtures/hardlink-1") - , hard2 = path.resolve(__dirname, "fixtures/hardlink-2") - , fs = require("fs") - -try { fs.unlinkSync(hard2) } catch (e) {} -fs.linkSync(hard1, hard2) - -tap.test("with global header", { timeout: 10000 }, function (t) { - runTest(t, true) -}) - -tap.test("without global header", { timeout: 10000 }, function (t) { - runTest(t, false) -}) - -tap.test("with from base", { timeout: 10000 }, function (t) { - runTest(t, true, true) -}) - -function alphasort (a, b) { - return a === b ? 0 - : a.toLowerCase() > b.toLowerCase() ? 1 - : a.toLowerCase() < b.toLowerCase() ? -1 - : a > b ? 1 - : -1 -} - - -function runTest (t, doGH, doFromBase) { - var reader = Reader({ path: input - , filter: function () { - return !this.path.match(/\.(tar|hex)$/) - } - , sort: alphasort - }) - - var props = doGH ? pkg : {} - if(doFromBase) props.fromBase = true; - - var pack = Pack(props) - var writer = Writer(target) - - // skip the global header if we're not doing that. - var entry = doGH ? 0 : 1 - - t.ok(reader, "reader ok") - t.ok(pack, "pack ok") - t.ok(writer, "writer ok") - - pack.pipe(writer) - - var parse = tar.Parse() - t.ok(parse, "parser should be ok") - - pack.on("data", function (c) { - // console.error("PACK DATA") - if (c.length !== 512) { - // this one is too noisy, only assert if it'll be relevant - t.equal(c.length, 512, "parser should emit data in 512byte blocks") - } - parse.write(c) - }) - - pack.on("end", function () { - // console.error("PACK END") - t.pass("parser ends") - parse.end() - }) - - pack.on("error", function (er) { - t.fail("pack error", er) - }) - - parse.on("error", function (er) { - t.fail("parse error", er) - }) - - writer.on("error", function (er) { - t.fail("writer error", er) - }) - - reader.on("error", function (er) { - t.fail("reader error", er) - }) - - parse.on("*", function (ev, e) { - var wanted = entries[entry++] - if (!wanted) { - t.fail("unexpected event: "+ev) - return - } - t.equal(ev, wanted[0], "event type should be "+wanted[0]) - - if(doFromBase) { - if(wanted[1].path.indexOf('fixtures/') && wanted[1].path.length == 100) - wanted[1].path = wanted[1].path.replace('fixtures/', '') + 'ccccccccc' - - if(wanted[1]) wanted[1].path = wanted[1].path.replace('fixtures/', '').replace('//', '/') - if(wanted[1].path == '') wanted[1].path = '/' - if(wanted[2] && wanted[2].path) wanted[2].path = wanted[2].path.replace('fixtures', '').replace(/^\//, '') - - wanted[1].linkpath = wanted[1].linkpath.replace('fixtures/', '') - } - - if (ev !== wanted[0] || e.path !== wanted[1].path) { - console.error("wanted", wanted) - console.error([ev, e.props]) - e.on("end", function () { - console.error(e.fields) - throw "break" - }) - } - - - t.has(e.props, wanted[1], "properties "+wanted[1].path) - if (wanted[2]) { - e.on("end", function () { - if (!e.fields) { - t.ok(e.fields, "should get fields") - } else { - t.has(e.fields, wanted[2], "should get expected fields") - } - }) - } - }) - - reader.pipe(pack) - - writer.on("close", function () { - t.equal(entry, entries.length, "should get all expected entries") - t.pass("it finished") - t.end() - }) - -} diff --git a/node_modules/node-gyp/node_modules/tar/test/parse-discard.js b/node_modules/node-gyp/node_modules/tar/test/parse-discard.js deleted file mode 100644 index da01a65ccc7d7..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/parse-discard.js +++ /dev/null @@ -1,29 +0,0 @@ -var tap = require("tap") - , tar = require("../tar.js") - , fs = require("fs") - , path = require("path") - , file = path.resolve(__dirname, "fixtures/c.tar") - -tap.test("parser test", function (t) { - var parser = tar.Parse() - var total = 0 - var dataTotal = 0 - - parser.on("end", function () { - - t.equals(total-513,dataTotal,'should have discarded only c.txt') - - t.end() - }) - - fs.createReadStream(file) - .pipe(parser) - .on('entry',function(entry){ - if(entry.path === 'c.txt') entry.abort() - - total += entry.size; - entry.on('data',function(data){ - dataTotal += data.length - }) - }) -}) diff --git a/node_modules/node-gyp/node_modules/tar/test/parse.js b/node_modules/node-gyp/node_modules/tar/test/parse.js deleted file mode 100644 index f765a50129bff..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/parse.js +++ /dev/null @@ -1,359 +0,0 @@ -var tap = require("tap") - , tar = require("../tar.js") - , fs = require("fs") - , path = require("path") - , file = path.resolve(__dirname, "fixtures/c.tar") - , index = 0 - - , expect = -[ [ 'entry', - { path: 'c.txt', - mode: 420, - uid: 24561, - gid: 20, - size: 513, - mtime: new Date('Wed, 26 Oct 2011 01:10:58 GMT'), - cksum: 5422, - type: '0', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '' }, - undefined ], - [ 'entry', - { path: 'cc.txt', - mode: 420, - uid: 24561, - gid: 20, - size: 513, - mtime: new Date('Wed, 26 Oct 2011 01:11:02 GMT'), - cksum: 5525, - type: '0', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '' }, - undefined ], - [ 'entry', - { path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: 24561, - gid: 20, - size: 100, - mtime: new Date('Thu, 27 Oct 2011 03:43:23 GMT'), - cksum: 18124, - type: '0', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '' }, - undefined ], - [ 'entry', - { path: 'Ω.txt', - mode: 420, - uid: 24561, - gid: 20, - size: 2, - mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'), - cksum: 5695, - type: '0', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '' }, - undefined ], - [ 'extendedHeader', - { path: 'PaxHeader/Ω.txt', - mode: 420, - uid: 24561, - gid: 20, - size: 120, - mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'), - cksum: 6702, - type: 'x', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: 'Ω.txt', - ctime: 1319737909, - atime: 1319739061, - dev: 234881026, - ino: 51693379, - nlink: 1 } ], - [ 'entry', - { path: 'Ω.txt', - mode: 420, - uid: 24561, - gid: 20, - size: 2, - mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'), - cksum: 5695, - type: '0', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '', - ctime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'), - atime: new Date('Thu, 27 Oct 2011 18:11:01 GMT'), - dev: 234881026, - ino: 51693379, - nlink: 1 }, - undefined ], - [ 'extendedHeader', - { path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: 24561, - gid: 20, - size: 353, - mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), - cksum: 14488, - type: 'x', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - ctime: 1319686868, - atime: 1319741254, - 'LIBARCHIVE.creationtime': '1319686852', - dev: 234881026, - ino: 51681874, - nlink: 1 } ], - [ 'entry', - { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: 24561, - gid: 20, - size: 200, - mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), - cksum: 14570, - type: '0', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '', - ctime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), - atime: new Date('Thu, 27 Oct 2011 18:47:34 GMT'), - 'LIBARCHIVE.creationtime': '1319686852', - dev: 234881026, - ino: 51681874, - nlink: 1 }, - undefined ], - [ 'longPath', - { path: '././@LongLink', - mode: 0, - uid: 0, - gid: 0, - size: 201, - mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'), - cksum: 4976, - type: 'L', - linkpath: '', - ustar: false }, - '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ], - [ 'entry', - { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: 1000, - gid: 1000, - size: 201, - mtime: new Date('Thu, 27 Oct 2011 22:21:50 GMT'), - cksum: 14086, - type: '0', - linkpath: '', - ustar: false }, - undefined ], - [ 'longLinkpath', - { path: '././@LongLink', - mode: 0, - uid: 0, - gid: 0, - size: 201, - mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'), - cksum: 4975, - type: 'K', - linkpath: '', - ustar: false }, - '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ], - [ 'longPath', - { path: '././@LongLink', - mode: 0, - uid: 0, - gid: 0, - size: 201, - mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'), - cksum: 4976, - type: 'L', - linkpath: '', - ustar: false }, - '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL' ], - [ 'entry', - { path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL', - mode: 511, - uid: 1000, - gid: 1000, - size: 0, - mtime: new Date('Fri, 28 Oct 2011 23:05:17 GMT'), - cksum: 21603, - type: '2', - linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - ustar: false }, - undefined ], - [ 'extendedHeader', - { path: 'PaxHeader/200-hard', - mode: 420, - uid: 24561, - gid: 20, - size: 143, - mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), - cksum: 6533, - type: 'x', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '' }, - { ctime: 1320617144, - atime: 1320617232, - 'LIBARCHIVE.creationtime': '1319686852', - dev: 234881026, - ino: 51681874, - nlink: 2 } ], - [ 'entry', - { path: '200-hard', - mode: 420, - uid: 24561, - gid: 20, - size: 200, - mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), - cksum: 5526, - type: '0', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '', - ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'), - atime: new Date('Sun, 06 Nov 2011 22:07:12 GMT'), - 'LIBARCHIVE.creationtime': '1319686852', - dev: 234881026, - ino: 51681874, - nlink: 2 }, - undefined ], - [ 'extendedHeader', - { path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: 24561, - gid: 20, - size: 353, - mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), - cksum: 14488, - type: 'x', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - ctime: 1320617144, - atime: 1320617406, - 'LIBARCHIVE.creationtime': '1319686852', - dev: 234881026, - ino: 51681874, - nlink: 2 } ], - [ 'entry', - { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: 24561, - gid: 20, - size: 0, - mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), - cksum: 15173, - type: '1', - linkpath: '200-hard', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '', - ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'), - atime: new Date('Sun, 06 Nov 2011 22:10:06 GMT'), - 'LIBARCHIVE.creationtime': '1319686852', - dev: 234881026, - ino: 51681874, - nlink: 2 }, - undefined ] ] - - -tap.test("parser test", function (t) { - var parser = tar.Parse() - - parser.on("end", function () { - t.equal(index, expect.length, "saw all expected events") - t.end() - }) - - fs.createReadStream(file) - .pipe(parser) - .on("*", function (ev, entry) { - var wanted = expect[index] - if (!wanted) { - return t.fail("Unexpected event: " + ev) - } - var result = [ev, entry.props] - entry.on("end", function () { - result.push(entry.fields || entry.body) - - t.equal(ev, wanted[0], index + " event type") - t.equivalent(entry.props, wanted[1], wanted[1].path + " entry properties") - if (wanted[2]) { - t.equivalent(result[2], wanted[2], "metadata values") - } - index ++ - }) - }) -}) diff --git a/node_modules/node-gyp/node_modules/tar/test/zz-cleanup.js b/node_modules/node-gyp/node_modules/tar/test/zz-cleanup.js deleted file mode 100644 index a00ff7faa0390..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/zz-cleanup.js +++ /dev/null @@ -1,20 +0,0 @@ -// clean up the fixtures - -var tap = require("tap") -, rimraf = require("rimraf") -, test = tap.test -, path = require("path") - -test("clean fixtures", function (t) { - rimraf(path.resolve(__dirname, "fixtures"), function (er) { - t.ifError(er, "rimraf ./fixtures/") - t.end() - }) -}) - -test("clean tmp", function (t) { - rimraf(path.resolve(__dirname, "tmp"), function (er) { - t.ifError(er, "rimraf ./tmp/") - t.end() - }) -}) diff --git a/node_modules/node-gyp/package.json b/node_modules/node-gyp/package.json index ba6913209c47d..afa45f3969095 100644 --- a/node_modules/node-gyp/package.json +++ b/node_modules/node-gyp/package.json @@ -1,70 +1,65 @@ { - "_from": "node-gyp@latest", - "_id": "node-gyp@3.8.0", + "_from": "node-gyp@5.0.7", + "_id": "node-gyp@5.0.7", "_inBundle": false, - "_integrity": "sha512-3g8lYefrRRzvGeSowdJKAKyks8oUpLEd/DyPV4eMhVlhJ0aNaZqIrNUIPuEWWTAoPqyFkfGrM67MC69baqn6vA==", + "_integrity": "sha512-K8aByl8OJD51V0VbUURTKsmdswkQQusIvlvmTyhHlIT1hBvaSxzdxpSle857XuXa7uc02UEZx9OR5aDxSWS5Qw==", "_location": "/node-gyp", - "_phantomChildren": { - "abbrev": "1.1.1", - "block-stream": "0.0.9", - "fstream": "1.0.11", - "inherits": "2.0.3" - }, + "_phantomChildren": {}, "_requested": { - "type": "tag", + "type": "version", "registry": true, - "raw": "node-gyp@latest", + "raw": "node-gyp@5.0.7", "name": "node-gyp", "escapedName": "node-gyp", - "rawSpec": "latest", + "rawSpec": "5.0.7", "saveSpec": null, - "fetchSpec": "latest" + "fetchSpec": "5.0.7" }, "_requiredBy": [ "#USER", "/", "/npm-lifecycle" ], - "_resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-3.8.0.tgz", - "_shasum": "540304261c330e80d0d5edce253a68cb3964218c", - "_spec": "node-gyp@latest", - "_where": "/Users/zkat/Documents/code/work/npm", + "_resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-5.0.7.tgz", + "_shasum": "dd4225e735e840cf2870e4037c2ed9c28a31719e", + "_spec": "node-gyp@5.0.7", + "_where": "/Users/mperrotte/npminc/cli", "author": { "name": "Nathan Rajlich", "email": "nathan@tootallnate.net", "url": "http://tootallnate.net" }, "bin": { - "node-gyp": "./bin/node-gyp.js" + "node-gyp": "bin/node-gyp.js" }, "bugs": { "url": "https://github.com/nodejs/node-gyp/issues" }, "bundleDependencies": false, "dependencies": { - "fstream": "^1.0.0", - "glob": "^7.0.3", - "graceful-fs": "^4.1.2", - "mkdirp": "^0.5.0", - "nopt": "2 || 3", - "npmlog": "0 || 1 || 2 || 3 || 4", - "osenv": "0", - "request": "^2.87.0", - "rimraf": "2", - "semver": "~5.3.0", - "tar": "^2.0.0", - "which": "1" + "env-paths": "^2.2.0", + "glob": "^7.1.4", + "graceful-fs": "^4.2.2", + "mkdirp": "^0.5.1", + "nopt": "^4.0.1", + "npmlog": "^4.1.2", + "request": "^2.88.0", + "rimraf": "^2.6.3", + "semver": "^5.7.1", + "tar": "^4.4.12", + "which": "^1.3.1" }, "deprecated": false, "description": "Node.js native addon build tool", "devDependencies": { - "bindings": "~1.2.1", - "nan": "^2.0.0", - "require-inject": "~1.3.0", - "tape": "~4.2.0" + "bindings": "^1.5.0", + "nan": "^2.14.0", + "require-inject": "^1.4.4", + "standard": "^14.3.1", + "tap": "~12.7.0" }, "engines": { - "node": ">= 0.8.0" + "node": ">= 6.0.0" }, "homepage": "https://github.com/nodejs/node-gyp#readme", "installVersion": 9, @@ -86,7 +81,8 @@ "url": "git://github.com/nodejs/node-gyp.git" }, "scripts": { - "test": "tape test/test-*" + "lint": "standard */*.js test/**/*.js", + "test": "npm run lint && tap --timeout=120 test/test-*" }, - "version": "3.8.0" + "version": "5.0.7" } diff --git a/node_modules/node-gyp/src/win_delay_load_hook.cc b/node_modules/node-gyp/src/win_delay_load_hook.cc index e75954b605101..169f8029f10fd 100644 --- a/node_modules/node-gyp/src/win_delay_load_hook.cc +++ b/node_modules/node-gyp/src/win_delay_load_hook.cc @@ -1,14 +1,16 @@ /* * When this file is linked to a DLL, it sets up a delay-load hook that - * intervenes when the DLL is trying to load 'node.exe' or 'iojs.exe' - * dynamically. Instead of trying to locate the .exe file it'll just return - * a handle to the process image. + * intervenes when the DLL is trying to load the host executable + * dynamically. Instead of trying to locate the .exe file it'll just + * return a handle to the process image. * - * This allows compiled addons to work when node.exe or iojs.exe is renamed. + * This allows compiled addons to work when the host executable is renamed. */ #ifdef _MSC_VER +#pragma managed(push, off) + #ifndef WIN32_LEAN_AND_MEAN #define WIN32_LEAN_AND_MEAN #endif @@ -23,8 +25,7 @@ static FARPROC WINAPI load_exe_hook(unsigned int event, DelayLoadInfo* info) { if (event != dliNotePreLoadLibrary) return NULL; - if (_stricmp(info->szDll, "iojs.exe") != 0 && - _stricmp(info->szDll, "node.exe") != 0) + if (_stricmp(info->szDll, HOST_BINARY) != 0) return NULL; m = GetModuleHandle(NULL); @@ -33,4 +34,6 @@ static FARPROC WINAPI load_exe_hook(unsigned int event, DelayLoadInfo* info) { decltype(__pfnDliNotifyHook2) __pfnDliNotifyHook2 = load_exe_hook; +#pragma managed(pop) + #endif diff --git a/node_modules/node-gyp/test/common.js b/node_modules/node-gyp/test/common.js new file mode 100644 index 0000000000000..b714ee29029d3 --- /dev/null +++ b/node_modules/node-gyp/test/common.js @@ -0,0 +1,3 @@ +const envPaths = require('env-paths') + +module.exports.devDir = () => envPaths('node-gyp', { suffix: '' }).cache diff --git a/node_modules/node-gyp/test/docker.sh b/node_modules/node-gyp/test/docker.sh index ac21aa8d75c98..67014209d0003 100755 --- a/node_modules/node-gyp/test/docker.sh +++ b/node_modules/node-gyp/test/docker.sh @@ -2,8 +2,7 @@ #set -e -test_node_versions="0.8.28 0.10.40 0.12.7 4.3.0 5.6.0" -test_iojs_versions="1.8.4 2.4.0 3.3.0" +test_node_versions="6.17.0 8.15.1 10.15.3 11.12.0" myuid=$(id -u) mygid=$(id -g) @@ -77,25 +76,6 @@ for v in $test_node_versions; do " done -# An image for each of the io.js versions we want to test with that version installed and the latest npm -for v in $test_iojs_versions; do - setup_container "node-gyp-test/${v}" "node-gyp-test/clones" " - curl -sL https://iojs.org/dist/v${v}/iojs-v${v}-linux-x64.tar.gz | tar -zxv --strip-components=1 -C /usr/ && - npm install npm@latest -g && - node -v && npm -v - " -done - -# Run the tests for all of the test images we've created, -# we should see node-gyp doing its download, configure and run thing -# _NOTE: bignum doesn't compile on 0.8 currently so it'll fail for that version only_ -for v in $test_node_versions $test_iojs_versions; do - run_tests $v " - cd node-buffertools && npm install --loglevel=info && npm test && cd - " - # removed for now, too noisy: cd node-bignum && npm install --loglevel=info && npm test -done - # Test use of --target=x.y.z to compile against alternate versions test_download_node_version() { local run_with_ver="$1" @@ -112,9 +92,7 @@ test_download_node_version() { } test_download_node_version "0.12.7" "0.10.30/src" "0.10.30" -test_download_node_version "3.3.0" "iojs-1.8.4/src" "1.8.4" # should download the headers file -test_download_node_version "3.3.0" "iojs-3.3.0/include/node" "3.3.0" test_download_node_version "4.3.0" "4.3.0/include/node" "4.3.0" test_download_node_version "5.6.0" "5.6.0/include/node" "5.6.0" @@ -124,36 +102,6 @@ test_download_node_version "5.6.0" "5.6.0/include/node" "5.6.0" # point for tarballs # we can test whether it uses the proxy because after 2 connections the proxy will # die and therefore should not be running at the end of the test, `nc` can tell us this -run_tests "3.3.0" " - (node /node-gyp-src/test/simple-proxy.js 8080 /foobar/ https://iojs.org/dist/ &) && - cd node-buffertools && - /node-gyp-src/bin/node-gyp.js --loglevel=info --dist-url=http://localhost:8080/foobar/ rebuild && - nc -z localhost 8080 && echo -e \"\\n\\n\\033[31mFAILED TO USE LOCAL PROXY\\033[39m\\n\\n\" -" - -# REMOVE after next semver-major -run_tests "3.3.0" " - (node /node-gyp-src/test/simple-proxy.js 8080 /doobar/ https://iojs.org/dist/ &) && - cd node-buffertools && - NVM_IOJS_ORG_MIRROR=http://localhost:8080/doobar/ /node-gyp-src/bin/node-gyp.js --loglevel=info rebuild && - nc -z localhost 8080 && echo -e \"\\n\\n\\033[31mFAILED TO USE LOCAL PROXY\\033[39m\\n\\n\" -" - -# REMOVE after next semver-major -run_tests "0.12.7" " - (node /node-gyp-src/test/simple-proxy.js 8080 /boombar/ https://nodejs.org/dist/ &) && - cd node-buffertools && - NVM_NODEJS_ORG_MIRROR=http://localhost:8080/boombar/ /node-gyp-src/bin/node-gyp.js --loglevel=info rebuild && - nc -z localhost 8080 && echo -e \"\\n\\n\\033[31mFAILED TO USE LOCAL PROXY\\033[39m\\n\\n\" -" - -run_tests "3.3.0" " - (node /node-gyp-src/test/simple-proxy.js 8080 /doobar/ https://iojs.org/dist/ &) && - cd node-buffertools && - IOJS_ORG_MIRROR=http://localhost:8080/doobar/ /node-gyp-src/bin/node-gyp.js --loglevel=info rebuild && - nc -z localhost 8080 && echo -e \"\\n\\n\\033[31mFAILED TO USE LOCAL PROXY\\033[39m\\n\\n\" -" - run_tests "0.12.7" " (node /node-gyp-src/test/simple-proxy.js 8080 /boombar/ https://nodejs.org/dist/ &) && cd node-buffertools && diff --git a/node_modules/node-gyp/test/fixtures/VS_2017_BuildTools_minimal.txt b/node_modules/node-gyp/test/fixtures/VS_2017_BuildTools_minimal.txt new file mode 100644 index 0000000000000..244f6b0798240 --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/VS_2017_BuildTools_minimal.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildTools","version":"15.9.28307.665","packages":["Microsoft.VisualStudio.Product.BuildTools","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.JavaScript.LanguageService","Microsoft.VisualStudio.JavaScript.LanguageService.Resources","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.CodeAnalysis.VisualStudio.Setup.Resources","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.CodeAnalysis.ExpressionEvaluator.Resources","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.Debugger.Resources","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.PackageGroup.CoreEditor","PortableFacades","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.DiaSymReader","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer","Microsoft.TeamFoundation.OfficeIntegration","Microsoft.TeamFoundation.OfficeIntegration.Resources","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.SLNX.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.Editors","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.Component.Windows10SDK.17134","Win10SDK_10.0.17134","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.Workload.MSBuildTools","Microsoft.VisualStudio.Component.CoreBuildTools","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.BuildTools.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Component.MSBuild","Microsoft.PythonTools.BuildCore.Vsix","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers.Resources","Microsoft.CodeAnalysis.Compilers","Microsoft.Net.PackageGroup.4.6.1.Redist","Microsoft.VisualStudio.NativeImageSupport","Microsoft.Build"]}] diff --git a/node_modules/node-gyp/test/fixtures/VS_2017_Community_workload.txt b/node_modules/node-gyp/test/fixtures/VS_2017_Community_workload.txt new file mode 100644 index 0000000000000..dd5e77dafb9df --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/VS_2017_Community_workload.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community","version":"15.9.28307.665","packages":["Microsoft.VisualStudio.Component.Windows10SDK.IpOverUsb","Win10SDK_IpOverUsb","Microsoft.VisualStudio.Component.VC.ATL.ARM64","Microsoft.VisualCpp.ATL.ARM64","Microsoft.VisualStudio.Component.VC.ATL.ARM","Microsoft.VisualCpp.ATL.ARM","Microsoft.VisualStudio.Component.VC.Tools.ARM","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualStudio.Graphics.Analyzer.Resources","Microsoft.Icecap.Analysis","Microsoft.VisualCpp.CRT.Redist.arm.OneCore.Desktop","Microsoft.VisualCpp.CRT.arm.Store","Microsoft.VisualCpp.CRT.arm.Desktop","Microsoft.VisualStudio.PackageGroup.VC.Tools.x64.ARM","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetarm","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetARM.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM.Resources","Microsoft.VisualCpp.Premium.Tools.ARM.Base","Microsoft.VisualCpp.Premium.Tools.ARM.Base.Resources","Microsoft.VisualCpp.PGO.ARM","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualStudio.Product.Community","Microsoft.VisualCpp.Tools.Hostx86.Targetarm","Microsoft.VisualStudio.Component.VC.Tools.ARM64","Microsoft.VisualStudio.VC.MSBuild.Arm64","Microsoft.VisualCpp.CRT.Redist.ARM64.OneCore.Desktop","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.CRT.ARM64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ARM64.Store","Microsoft.VisualCpp.CRT.ARM64.Desktop","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.Icecap.Analysis.Resources","Microsoft.VisualCpp.VCTip.hostX86.targetARM","Microsoft.VisualStudio.PackageGroup.VC.Tools.x64.ARM64","Microsoft.VisualCpp.Tools.Core","Microsoft.VisualCpp.PGO.ARM64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetarm64","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetARM64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM64.Resources","Microsoft.VisualCpp.Premium.Tools.ARM64.Base","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Tools.HostX86.TargetARM.Resources","Microsoft.VisualCpp.CRT.Redist.ARM64","Microsoft.VisualCpp.CRT.arm.OneCore.Desktop","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Component.WixToolset.VisualStudioExtension.Dev15","WixToolset.VisualStudioExtension.Dev15","Microsoft.VisualCpp.MFC.X64","Microsoft.VisualCpp.ATL.Headers","Microsoft.VisualStudio.Component.VC.CMake.Project","Microsoft.VisualStudio.VC.CMake","Microsoft.VisualStudio.VC.CMake.Project","Microsoft.VisualStudio.Component.Windows10SDK.17763","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","MLGen","Microsoft.VisualStudio.Graphics.Analyzer","Microsoft.VisualStudio.Component.TestTools.Core","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.NuGet.Licenses","SQLCommon","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualCpp.Tools.HostX64.TargetARM","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualCpp.HTMLHelpWorkshop.Msi","Microsoft.Icecap.Collection.Msi.Resources","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.VCTip.hostX64.targetARM","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Templates.UnitTest","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.Graphics.Appid","Microsoft.VisualCpp.ATL.Source","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.Debugger.ImmersiveActivateHelper.Msi","Microsoft.VisualStudio.Debugger.JustInTime","Microsoft.DiagnosticsHub.CpuSampling","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common","Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res","Microsoft.VisualStudio.ProTools.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualCpp.Tools.HostX64.TargetARM.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent","Microsoft.Component.MSBuild","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.WebToolsExtensions","Microsoft.VisualCpp.Tools.Hostx86.Targetarm64","Microsoft.VisualStudio.TextTemplating.MSBuild","Microsoft.VisualCpp.VCTip.hostX86.targetARM64","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualCpp.Tools.HostX86.TargetARM64.Resources","Microsoft.VisualStudio.RazorExtension","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualCpp.MFC.Source","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.Graphics.Viewers","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.MFC.Redist.X86","Microsoft.VisualStudio.WebToolsExtensions.Chip","Microsoft.DiagnosticsHub.Runtime.Resources","Microsoft.DiagnosticsHub.CpuSampling.Targeted","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.Component.VC.DiagnosticTools","Microsoft.VisualCpp.MFC.Redist.X64","Microsoft.VisualStudio.PackageGroup.TestTools.Native","Microsoft.VisualStudio.Graphics.Viewers.Resources","Microsoft.VisualCpp.MFC.MBCS","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Component.TextTemplating","Win10SDK_10.0.17763","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.VisualCpp.MFC.MBCS.X64","Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage","Microsoft.VisualStudio.Graphics.EnableTools","Microsoft.VisualStudio.Graphics.Appid.Resources","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualCpp.MFC.Headers","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.Graphics.Analyzer.Targeted","Microsoft.VisualCpp.CRT.Headers","Microsoft.DiagnosticsHub.Runtime.Targeted","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetARM64","Microsoft.VisualCpp.VCTip.hostX64.targetARM64","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.Icecap.Collection.Msi","Microsoft.VisualCpp.ATL.X86","Microsoft.VisualCpp.Tools.HostX64.TargetARM64.Resources","Microsoft.VisualStudio.Component.VC.ATLMFC","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.Icecap.Collection.Msi.Resources.Targeted","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualStudio.Component.Graphics.Tools","Microsoft.VisualStudio.WebTools.Resources","Microsoft.VisualCpp.ATL.X64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualStudio.Component.Graphics.Win81","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.Icecap.Analysis.Resources.Targeted","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.MFC.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualStudio.TextTemplating.Core","Microsoft.VisualStudio.JavaScript.LanguageService","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.Component.VC.TestAdapterForBoostTest","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.CredentialProvider","Microsoft.VisualStudio.VC.Templates.Desktop","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.ComponentGroup.WebToolsExtensions","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core","Microsoft.VisualStudio.TextTemplating.Integration","Microsoft.VisualStudio.Component.NuGet","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualCpp.PGO.Headers","Microsoft.DiagnosticsHub.Collection","Microsoft.Icecap.Collection.Msi.Targeted","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.WebTools.WSP.FSA","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualStudio.Branding.Community","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.WebToolsExtensions.Common","Microsoft.VisualStudio.WebTools.MSBuild","Microsoft.VisualStudio.NuGet.Core","Microsoft.DiagnosticsHub.Collection.Service","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.DiaSymReader.Native","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.Community.Extra.Resources","Microsoft.VisualStudio.Component.Roslyn.LanguageServices","Microsoft.DiagnosticsHub.Collection.StopService.Install","Microsoft.VisualStudio.InteractiveWindow","Microsoft.PackageGroup.DiagnosticsHub.Platform","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.Community.Extra","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestTools","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources","Microsoft.VisualStudio.PackageGroup.TestTools.Core","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualStudio.TestTools.Pex.Common","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy","Microsoft.VisualStudio.PackageGroup.MinShell.Interop","Microsoft.CodeAnalysis.ExpressionEvaluator.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualStudio.PackageGroup.CoreEditor","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional","Microsoft.VisualStudio.Debugger.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors","sqlsysclrtypes","Microsoft.VisualStudio.ProTools","Component.Microsoft.VisualStudio.RazorExtension","Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI","Microsoft.Build.Dependencies","Microsoft.VisualStudio.WebTools.WSP.FSA.Resources","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualStudio.VC.Ide.ATL.Resources","Microsoft.VisualStudio.VC.Templates.UnitTest.Resources","Microsoft.VisualStudio.Debugger.Managed","Microsoft.VisualStudio.Workload.NativeDesktop","Microsoft.VisualStudio.Component.VC.TestAdapterForGoogleTest","Microsoft.VisualStudio.Debugger.JustInTime.Msi","Microsoft.Net.PackageGroup.4.6.1.Redist","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","sqlsysclrtypes","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.TestTools.TestWIExtension","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.WebTools","Microsoft.VisualStudio.Component.VC.Redist.14.Latest","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.TextTemplating.Integration.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.TeamFoundation.OfficeIntegration.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.JavaScript.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.TestAdapterForGoogleTest","Microsoft.VisualStudio.PackageGroup.Community","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.Component.Windows10SDK.17134","Microsoft.VisualStudio.PackageGroup.Core","PortableFacades","Microsoft.DiaSymReader","Microsoft.DiagnosticsHub.Runtime","Microsoft.VisualStudio.Component.CoreEditor","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.Community","Microsoft.TeamFoundation.OfficeIntegration","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualCpp.Tools.Common.UtilsPrereq","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.ServiceHub","Microsoft.VisualStudio.Editors","Microsoft.VisualStudio.TeamExplorer","Microsoft.CodeAnalysis.VisualStudio.InteractiveComponents.Resources","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.GraphProvider","Microsoft.CodeAnalysis.VisualStudio.InteractiveComponents","Microsoft.CodeAnalysis.VisualStudio.Setup.Interactive.Resources","Microsoft.CodeAnalysis.VisualStudio.Setup.Resources","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.NuGet.Build.Tasks","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.CodeAnalysis.Compilers.Resources","Microsoft.CodeAnalysis.Compilers","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.Build","Microsoft.VisualStudio.VC.Ide.TestAdapterForBoostTest","Microsoft.VisualStudio.VC.Ide.ATL","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.Workload.CoreEditor","Microsoft.VisualStudio.MinShell.Interop","Microsoft.Build.FileTracker.Msi","Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core","Microsoft.MSHtml","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips","Microsoft.VisualStudio.Devenv.Msi","Microsoft.VisualStudio.Component.VC.ATL","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualStudio.SLNX.VSIX","Microsoft.VisualStudio.CoreEditor","Win10SDK_10.0.17134","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.Component.Debugger.JustInTime","Microsoft.VisualStudio.VC.Ide.MFC","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.PackageGroup.TeamExplorer","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.Devenv","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualStudio.Devenv.Resources","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.MefHosting","Microsoft.DiagnosticsHub.Collection.StopService.Uninstall","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualCpp.MFC.X86","Microsoft.VisualStudio.Log.Resources","Microsoft.Icecap.Analysis.Targeted","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.MinShell.x86","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.LanguageServer","Microsoft.VisualStudio.NativeImageSupport","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.Devenv.Config","Microsoft.VisualStudio.MinShell.Resources","Microsoft.VisualStudio.Initializer","Microsoft.Net.PackageGroup.4.6.Redist"]}] diff --git a/node_modules/node-gyp/test/fixtures/VS_2017_Express.txt b/node_modules/node-gyp/test/fixtures/VS_2017_Express.txt new file mode 100644 index 0000000000000..c4b3b5f2b0163 --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/VS_2017_Express.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\WDExpress","version":"15.9.28307.858","packages":["Microsoft.VisualStudio.Product.WDExpress","Microsoft.VisualStudio.Workload.WDExpress","Microsoft.VisualStudio.Component.Windows10SDK.17763","MLGen","Win10SDK_10.0.17763","Microsoft.VisualStudio.Component.Windows10SDK.14393","Win10SDK_10.0.14393.795","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.Component.VC.Tools.ARM64","Microsoft.VisualStudio.VC.MSBuild.Arm64","Microsoft.VisualCpp.CRT.Redist.ARM64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.ARM64","Microsoft.VisualCpp.CRT.ARM64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ARM64.Store","Microsoft.VisualCpp.CRT.ARM64.Desktop","Microsoft.VisualCpp.Tools.Hostx86.Targetarm64","Microsoft.VisualCpp.VCTip.hostX86.targetARM64","Microsoft.VisualCpp.Tools.HostX86.TargetARM64.Resources","Microsoft.VisualStudio.Component.VC.Tools.ARM","Microsoft.VisualCpp.Tools.Hostx86.Targetarm","Microsoft.VisualCpp.VCTip.hostX86.targetARM","Microsoft.VisualCpp.Tools.HostX86.TargetARM.Resources","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.arm.OneCore.Desktop","Microsoft.VisualCpp.CRT.arm.OneCore.Desktop","Microsoft.VisualCpp.CRT.arm.Store","Microsoft.VisualCpp.CRT.arm.Desktop","Microsoft.VisualStudio.VC.Templates.UnitTest","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP","Microsoft.VisualStudio.VC.Templates.UnitTest.Resources","Microsoft.VisualStudio.VC.Templates.Desktop","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.JavaScript.LanguageService","Microsoft.VisualStudio.JavaScript.LanguageService.Resources","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.VisualStudio.Component.VC.CLI.Support","Microsoft.VisualCpp.CLI.X86","Microsoft.VisualCpp.CLI.X64","Microsoft.VisualCpp.CLI.Source","Microsoft.VisualCpp.CLI.ARM64","Microsoft.VisualCpp.CLI.ARM","Microsoft.VisualStudio.VC.Templates.CLR","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Templates.CLR.Resources","Microsoft.Component.VC.Runtime.OSSupport","Microsoft.Windows.UniversalCRT.Tools.Msi","Microsoft.Windows.UniversalCRT.Tools.Msi","Microsoft.Windows.UniversalCRT.ExtensionSDK.Msi","Microsoft.Windows.UniversalCRT.HeadersLibsSources.Msi","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.Component.HelpViewer","Microsoft.HelpViewer","Microsoft.VisualStudio.Help.Configuration.Msi","Microsoft.VisualStudio.Component.SQL.DataSources","Microsoft.VisualStudio.Component.SQL.SSDT","Microsoft.VisualStudio.Component.SQL.CMDUtils","sqlcmdlnutils","Microsoft.VisualStudio.Component.Common.Azure.Tools","Microsoft.VisualStudio.Azure.CommonAzureTools","SSDT","Microsoft.VisualStudio.Component.SQL.ADAL","sql_adalsql","Microsoft.VisualStudio.Component.NuGet","Microsoft.CredentialProvider","Microsoft.VisualStudio.NuGet.Licenses","Microsoft.VisualStudio.Component.SQL.LocalDB.Runtime","Microsoft.VisualStudio.Component.SQL.NCLI","sqllocaldb","sqlncli","Microsoft.VisualStudio.Component.EntityFramework","Microsoft.VisualStudio.PackageGroup.DslRuntime","Microsoft.VisualStudio.Dsl.Core","Microsoft.VisualStudio.Dsl.GraphObject","Microsoft.VisualStudio.Dsl.Core.Resources","Microsoft.VisualStudio.EntityFrameworkTools","Microsoft.VisualStudio.EntityFrameworkTools.Msi","Microsoft.VisualStudio.Component.Roslyn.LanguageServices","Microsoft.VisualStudio.InteractiveWindow","Microsoft.DiaSymReader.Native","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.CodeAnalysis.VisualStudio.InteractiveComponents.Resources","Microsoft.CodeAnalysis.VisualStudio.InteractiveComponents","Microsoft.CodeAnalysis.VisualStudio.Setup.Interactive.Resources","Microsoft.Net.ComponentGroup.TargetingPacks.Common","Microsoft.Net.Component.4.6.TargetingPack","Microsoft.Net.4.6.TargetingPack","Microsoft.Net.Component.4.5.2.TargetingPack","Microsoft.Net.4.5.2.TargetingPack","Microsoft.Net.Component.4.5.1.TargetingPack","Microsoft.Net.4.5.1.TargetingPack","Microsoft.Net.Component.4.5.TargetingPack","Microsoft.Net.4.5.TargetingPack","Microsoft.Net.Component.4.TargetingPack","Microsoft.Net.4.TargetingPack","Microsoft.Net.ComponentGroup.DevelopmentPrerequisites","Microsoft.Net.Component.4.6.1.TargetingPack","Microsoft.Net.4.6.1.TargetingPack","Microsoft.Net.Cumulative.TargetingPack.Resources","Microsoft.Net.Component.4.6.1.SDK","Microsoft.Net.4.6.1.SDK","Microsoft.VisualStudio.Component.TextTemplating","Microsoft.VisualStudio.TextTemplating.MSBuild","Microsoft.VisualStudio.TextTemplating.Integration","Microsoft.VisualStudio.TextTemplating.Core","Microsoft.VisualStudio.TextTemplating.Integration.Resources","Microsoft.VisualStudio.Component.VisualStudioData","Microsoft.VisualStudio.Component.SQL.CLR","Microsoft.VisualStudio.ProTools","sqlsysclrtypes","sqlsysclrtypes","SQLCommon","Microsoft.VisualStudio.ProTools.Resources","Microsoft.VisualStudio.XamlDiagnostics","Microsoft.VisualStudio.XamlDiagnostics.Resources","Microsoft.VisualStudio.XamlDesigner","Microsoft.VisualStudio.XamlDesigner.Resources","Microsoft.VisualStudio.XamlDesigner.Executables","Microsoft.VisualStudio.XamlShared","Microsoft.VisualStudio.XamlShared.Resources","Microsoft.VisualStudio.PackageGroup.TestTools.Managed","Microsoft.VisualStudio.PackageGroup.IntelliTrace.Core","Microsoft.IntelliTrace.Core","Microsoft.IntelliTrace.Core.Targeted","Microsoft.IntelliTrace.ProfilerProxy.Msi.x64","Microsoft.IntelliTrace.ProfilerProxy.Msi","Microsoft.VisualStudio.NuGet.Core","Microsoft.VisualStudio.TestWindow.SourceBasedTestDiscovery","Microsoft.VisualStudio.TestWindow.Dotnet","Microsoft.VisualStudio.TestTools.TestGeneration","Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage","Microsoft.VisualStudio.PackageGroup.TestTools.Enterprise","Microsoft.VisualStudio.PackageGroup.TestTools.MSTestV2.Managed","Microsoft.VisualStudio.TestTools.MSTestV2.WizardExtension.UnitTest","Microsoft.VisualStudio.PackageGroup.TestTools.Core","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.Pex.Common","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy","Microsoft.VisualStudio.PackageGroup.MinShell.Interop","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestTools","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common","Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.TestTools.TestWIExtension","Microsoft.VisualStudio.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors","Microsoft.Component.ClickOnce","Microsoft.VisualStudio.PackageGroup.ClickOnce.MSBuild","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.ClickOnce.SignTool.Msi","Microsoft.SQL.ClickOnceBootstrapper.Msi","Microsoft.Net.ClickOnceBootstrapper","Microsoft.ClickOnce.BootStrapper.Msi.Resources","Microsoft.ClickOnce.BootStrapper.Msi","Microsoft.VisualStudio.WebTools.WSP.FSA","Microsoft.VisualStudio.WebTools.WSP.FSA.Resources","Microsoft.VisualStudio.PackageGroup.Community","Microsoft.VisualStudio.Community.Extra.Resources","Microsoft.VisualStudio.Community.Extra","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.CodeAnalysis.VisualStudio.Setup.Resources","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.CodeAnalysis.ExpressionEvaluator.Resources","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.Debugger.Resources","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.Editors","Microsoft.VisualStudio.ClickOnce.Resources","Microsoft.VisualStudio.ClickOnce","Microsoft.Component.MSBuild","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers.Resources","Microsoft.CodeAnalysis.Compilers","Microsoft.Net.PackageGroup.4.6.1.Redist","Microsoft.VisualStudio.TemplateEngine","Microsoft.VisualStudio.WebToolsExtensions.Common","Microsoft.NET.Sdk","Microsoft.VisualStudio.PackageGroup.TestTools.Templates.Managed","Microsoft.VisualStudio.TestTools.Templates.Managed","Microsoft.VisualStudio.TestTools.Templates.Managed.Resources","Microsoft.VisualStudio.Templates.VB.MSTestv2.Desktop.UnitTest","Microsoft.VisualStudio.Templates.CS.MSTestv2.Desktop.UnitTest","Microsoft.VisualStudio.Templates.VB.Wpf","Microsoft.VisualStudio.Templates.VB.Wpf.Resources","Microsoft.VisualStudio.Templates.VB.Winforms","Microsoft.VisualStudio.Templates.VB.ManagedCore","Microsoft.VisualStudio.Templates.VB.Shared","Microsoft.VisualStudio.Templates.VB.Shared.Resources","Microsoft.VisualStudio.Templates.VB.ManagedCore.Resources","Microsoft.VisualStudio.Templates.CS.GettingStarted.Desktop.Package","Microsoft.VisualStudio.Templates.GetStarted.Desktop.Setup","Microsoft.VisualStudio.Templates.CS.GettingStarted.Console.Package","Microsoft.VisualStudio.Templates.GetStarted.Resources","Microsoft.VisualStudio.Templates.GetStarted.Common.Setup","Microsoft.VisualStudio.Templates.GetStarted.Console.Setup","Microsoft.VisualStudio.Templates.CS.Wpf","Microsoft.VisualStudio.Templates.CS.Wpf.Resources","Microsoft.VisualStudio.Templates.CS.Winforms","Microsoft.VisualStudio.Templates.CS.ManagedCore","Microsoft.VisualStudio.Templates.CS.Shared","Microsoft.VisualStudio.Templates.Editorconfig.Wizard.Setup","Templates.Editorconfig.SolutionFile.Setup","Microsoft.VisualStudio.Templates.CS.Shared.Resources","Microsoft.VisualStudio.Templates.CS.ManagedCore.Resources","Microsoft.VisualStudio.Component.CoreEditor","Microsoft.VisualStudio.PackageGroup.CoreEditor","PortableFacades","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.DiaSymReader","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Build","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer","Microsoft.TeamFoundation.OfficeIntegration","Microsoft.TeamFoundation.OfficeIntegration.Resources","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.SLNX.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.WDExpress","Microsoft.VisualStudio.WDExpress.Resources","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.MinShell.x86","Microsoft.VisualStudio.NativeImageSupport","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.LanguageServer","Microsoft.VisualStudio.MinShell.Resources","Microsoft.Net.PackageGroup.4.6.Redist","Microsoft.VisualStudio.Branding.WDExpress"]}] diff --git a/node_modules/node-gyp/test/fixtures/VS_2017_Unusable.txt b/node_modules/node-gyp/test/fixtures/VS_2017_Unusable.txt new file mode 100644 index 0000000000000..fc0a257f44783 --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/VS_2017_Unusable.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildToolsUnusable","version":"15.9.28307.665","packages":["Microsoft.VisualStudio.Product.BuildTools","Microsoft.VisualStudio.Component.Windows10SDK.17134","Win10SDK_10.0.17134","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.Workload.MSBuildTools","Microsoft.VisualStudio.Component.CoreBuildTools","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.BuildTools.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Component.MSBuild","Microsoft.PythonTools.BuildCore.Vsix","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers.Resources","Microsoft.CodeAnalysis.Compilers","Microsoft.Net.PackageGroup.4.6.1.Redist","Microsoft.Net.4.6.1.FullRedist.NonThreshold","Microsoft.Windows.UniversalCRT.Msu.81","Microsoft.VisualStudio.NativeImageSupport","Microsoft.Build"]}] diff --git a/node_modules/node-gyp/test/fixtures/VS_2019_BuildTools_minimal.txt b/node_modules/node-gyp/test/fixtures/VS_2019_BuildTools_minimal.txt new file mode 100644 index 0000000000000..f07d254164882 --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/VS_2019_BuildTools_minimal.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools","version":"16.1.28922.388","packages":["Microsoft.VisualStudio.Product.BuildTools","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.CodeSense.Community","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Platform.CallHierarchy","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.PackageGroup.CoreEditor","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer.Common","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.OpenFolder.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Platform.NavigateTo","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VisualC.Logging","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.VCPkgDatabase","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.PackageGroup.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Agent","Microsoft.VisualStudio.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Concord","Microsoft.VisualStudio.VC.Ide.Debugger.Concord.Resources","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.DiaSymReader","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualStudio.Debugger.Concord.Managed","Microsoft.VisualStudio.Debugger.Concord.Managed.Resources","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.PerfLib","Microsoft.VisualStudio.Debugger.Package.DiagHub.Client.VSx86","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.VisualStudio.Editors","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.Platform.Editor","Microsoft.VisualStudio.Debugger.Concord","Microsoft.VisualStudio.Debugger.Concord.Resources","Microsoft.VisualStudio.Debugger.Resources","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.VisualStudio.Component.Windows10SDK.17134","Win10SDK_10.0.17134","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.VC.MSBuild.x86.v142","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.X64.v142","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM.v142","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.Workload.MSBuildTools","Microsoft.VisualStudio.Component.CoreBuildTools","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.BuildTools.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Component.MSBuild","Microsoft.PythonTools.BuildCore.Vsix","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers","Microsoft.Net.PackageGroup.4.7.2.Redist","Microsoft.VisualStudio.NativeImageSupport","Microsoft.Build","Microsoft.VisualStudio.PackageGroup.NuGet","Microsoft.VisualStudio.NuGet.BuildTools"]}] diff --git a/node_modules/node-gyp/test/fixtures/VS_2019_Community_workload.txt b/node_modules/node-gyp/test/fixtures/VS_2019_Community_workload.txt new file mode 100644 index 0000000000000..50071c25f189e --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/VS_2019_Community_workload.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community","version":"16.1.28922.388","packages":["Microsoft.VisualStudio.Workload.NativeDesktop","Microsoft.VisualStudio.Component.VC.TestAdapterForGoogleTest","Microsoft.VisualStudio.VC.Ide.TestAdapterForGoogleTest","Microsoft.VisualStudio.Component.VC.TestAdapterForBoostTest","Microsoft.VisualStudio.VC.Ide.TestAdapterForBoostTest","Microsoft.VisualStudio.Component.VC.ATL","Microsoft.VisualStudio.VC.Ide.ATL","Microsoft.VisualStudio.VC.Ide.ATL.Resources","Microsoft.VisualCpp.ATL.X86","Microsoft.VisualCpp.ATL.X64","Microsoft.VisualCpp.ATL.Source","Microsoft.VisualCpp.ATL.Headers","Microsoft.VisualStudio.Component.VC.CMake.Project","Microsoft.VisualStudio.VC.CMake","Microsoft.VisualStudio.VC.CMake.Project","Microsoft.VisualStudio.VC.ExternalBuildFramework","Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core","Microsoft.VisualStudio.PackageGroup.TestTools.Native","Microsoft.VisualStudio.Component.VC.Redist.14.Latest","Microsoft.VisualStudio.VC.Templates.UnitTest","Microsoft.VisualStudio.VC.UnitTest.Desktop.Build.Core","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP","Microsoft.VisualStudio.VC.Templates.UnitTest.Resources","Microsoft.VisualStudio.VC.Templates.Desktop","Microsoft.VisualStudio.Component.Debugger.JustInTime","Microsoft.VisualStudio.Debugger.ImmersiveActivateHelper.Msi","Microsoft.VisualStudio.Debugger.JustInTime","Microsoft.VisualStudio.Debugger.JustInTime.Msi","Microsoft.VisualStudio.Component.Windows10SDK.17763","Win10SDK_10.0.17763","Microsoft.VisualStudio.Component.VC.DiagnosticTools","Microsoft.VisualStudio.Component.Graphics.Tools","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.Graphics.Viewers","Microsoft.VisualStudio.Graphics.Viewers.Resources","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.Graphics.Analyzer","Microsoft.VisualStudio.Graphics.Analyzer.Targeted","Microsoft.VisualStudio.Graphics.Analyzer.Resources","Microsoft.VisualStudio.Graphics.Appid","Microsoft.VisualStudio.Graphics.Appid.Resources","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.VC.MSBuild.X64.v142","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM.v142","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.VC.MSBuild.x86.v142","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.VCPkgDatabase","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.Base.Resources","Component.Microsoft.VisualStudio.LiveShare","Microsoft.VisualStudio.LiveShare","Microsoft.Icecap.Analysis","Microsoft.Icecap.Analysis.Targeted","Microsoft.Icecap.Analysis.Resources","Microsoft.Icecap.Analysis.Resources.Targeted","Microsoft.Icecap.Collection.Msi","Microsoft.Icecap.Collection.Msi.Targeted","Microsoft.Icecap.Collection.Msi.Resources","Microsoft.Icecap.Collection.Msi.Resources.Targeted","Microsoft.DiagnosticsHub.Instrumentation","Microsoft.DiagnosticsHub.CpuSampling.ExternalDependencies","Microsoft.DiagnosticsHub.CpuSampling","Microsoft.DiagnosticsHub.CpuSampling.Targeted","Microsoft.PackageGroup.DiagnosticsHub.Platform","Microsoft.DiagnosticsHub.Runtime.ExternalDependencies","Microsoft.DiagnosticsHub.Runtime.ExternalDependencies.Targeted","Microsoft.DiagnosticsHub.Collection.ExternalDependencies.x64","Microsoft.DiagnosticsHub.Collection.StopService.Uninstall","Microsoft.DiagnosticsHub.Runtime","Microsoft.DiagnosticsHub.Runtime.Targeted","Microsoft.DiagnosticsHub.Collection","Microsoft.DiagnosticsHub.Collection.Service","Microsoft.DiagnosticsHub.Collection.StopService.Install","Microsoft.VisualStudio.Component.IntelliCode","Microsoft.VisualStudio.IntelliCode","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage","Microsoft.VisualStudio.PackageGroup.TestTools.Core","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.Pex.Common","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy","Microsoft.VisualStudio.PackageGroup.MinShell.Interop","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestTools","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common","Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.TestTools.TestWIExtension","Microsoft.VisualStudio.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors","Microsoft.VisualStudio.LiveShareApi","Microsoft.VisualStudio.Component.TextTemplating","Microsoft.VisualStudio.TextTemplating.MSBuild","Microsoft.VisualStudio.TextTemplating.Integration","Microsoft.VisualStudio.TextTemplating.Core","Microsoft.VisualStudio.TextTemplating.Integration.Resources","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.Component.MSBuild","Microsoft.NuGet.Build.Tasks","Microsoft.DiagnosticsHub.KB2882822.Win7","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.ComponentGroup.WebToolsExtensions","Microsoft.VisualStudio.ProTools","sqlsysclrtypes","sqlsysclrtypes","SQLCommon","Microsoft.VisualStudio.ProTools.Resources","Microsoft.VisualStudio.WebToolsExtensions","Microsoft.VisualStudio.WebTools","Microsoft.VisualStudio.WebTools.Resources","Microsoft.VisualStudio.WebTools.MSBuild","Microsoft.VisualStudio.WebTools.WSP.FSA","Microsoft.VisualStudio.WebTools.WSP.FSA.Resources","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.VisualC.Logging","Microsoft.WebTools.Shared","Microsoft.WebTools.DotNet.Core.ItemTemplates","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.Windows.UniversalCRT.Msu.7","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers","Microsoft.VisualStudio.Component.NuGet","Microsoft.CredentialProvider","Microsoft.VisualStudio.NuGet.PowershellBindingRedirect","Microsoft.VisualStudio.NuGet.Licenses","Microsoft.VisualStudio.PackageGroup.Community","Microsoft.VisualStudio.Community.Extra.Resources","Microsoft.VisualStudio.Community.Extra","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.CodeSense.Community","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.PackageGroup.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Agent","Microsoft.VisualStudio.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Concord","Microsoft.VisualStudio.VC.Ide.Debugger.Concord.Resources","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualStudio.Debugger.Concord.Managed","Microsoft.VisualStudio.Debugger.Concord.Managed.Resources","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote.DbgHelp.Win8","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote.DbgHelp.Win8","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.Debugger.Package.DiagHub.Client.VSx86","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.Debugger.DbgHelp.Win8","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.Debugger.Concord","Microsoft.VisualStudio.Debugger.Concord.Resources","Microsoft.VisualStudio.Debugger.Resources","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Platform.CallHierarchy","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.Devenv.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.Editors","Microsoft.VisualStudio.Product.Community","Microsoft.VisualStudio.Workload.CoreEditor","Microsoft.VisualStudio.Component.CoreEditor","Microsoft.VisualStudio.PackageGroup.CoreEditor","Microsoft.VisualCpp.Tools.Common.UtilsPrereq","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.DiaSymReader","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Build","Microsoft.VisualStudio.PackageGroup.NuGet","Microsoft.VisualStudio.NuGet.Core","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer.Common","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.OpenFolder.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.Devenv","Microsoft.VisualStudio.Devenv.Resources","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Platform.NavigateTo","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.PerfLib","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.Platform.Editor","Microsoft.VisualStudio.MinShell.x86","Microsoft.VisualStudio.NativeImageSupport","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.LanguageServer","Microsoft.VisualStudio.Devenv.Config","Microsoft.VisualStudio.MinShell.Resources","Microsoft.Net.PackageGroup.4.7.2.Redist","Microsoft.Net.4.7.2.FullRedist","Microsoft.VisualStudio.Branding.Community"]}] diff --git a/node_modules/node-gyp/test/fixtures/VS_2019_Preview.txt b/node_modules/node-gyp/test/fixtures/VS_2019_Preview.txt new file mode 100644 index 0000000000000..806509e7ce865 --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/VS_2019_Preview.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Preview","version":"16.0.28608.199","packages":["Microsoft.VisualStudio.Product.Enterprise","Microsoft.VisualStudio.Workload.NativeDesktop","Microsoft.VisualStudio.Component.VC.TestAdapterForGoogleTest","Microsoft.VisualStudio.VC.Ide.TestAdapterForGoogleTest","Microsoft.VisualStudio.Component.VC.TestAdapterForBoostTest","Microsoft.VisualStudio.VC.Ide.TestAdapterForBoostTest","Microsoft.VisualStudio.Component.VC.ATL","Microsoft.VisualStudio.VC.Ide.ATL","Microsoft.VisualStudio.VC.Ide.ATL.Resources","Microsoft.VisualCpp.ATL.X86","Microsoft.VisualCpp.ATL.X64","Microsoft.VisualCpp.ATL.Source","Microsoft.VisualCpp.ATL.Headers","Microsoft.VisualStudio.Component.VC.CMake.Project","Microsoft.VisualStudio.VC.CMake","Microsoft.VisualStudio.VC.CMake.Project","Microsoft.VisualStudio.VC.ExternalBuildFramework","Microsoft.VisualStudio.Component.VC.DiagnosticTools","Microsoft.VisualStudio.Component.Graphics.Tools","Microsoft.VisualStudio.Graphics.Viewers","Microsoft.VisualStudio.Graphics.Viewers.Resources","Microsoft.VisualStudio.Graphics.EnableTools","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.Graphics.Analyzer","Microsoft.VisualStudio.Graphics.Analyzer.Targeted","Microsoft.VisualStudio.Graphics.Analyzer.Resources","Microsoft.VisualStudio.Graphics.Appid","Microsoft.VisualStudio.Graphics.Appid.Resources","Microsoft.VisualStudio.Component.Windows10SDK.17763","Win10SDK_10.0.17763","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core","Microsoft.VisualStudio.PackageGroup.TestTools.Native","Microsoft.VisualStudio.ComponentGroup.ArchitectureTools.Native","Microsoft.VisualStudio.Component.ClassDesigner","Microsoft.VisualStudio.ClassDesigner","Microsoft.VisualStudio.ClassDesigner.Resources","Microsoft.VisualStudio.Component.VC.Redist.14.Latest","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualStudio.VC.Templates.UnitTest","Microsoft.VisualStudio.VC.UnitTest.Desktop.Build.Core","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP","Microsoft.VisualStudio.VC.Templates.UnitTest.Resources","Microsoft.VisualStudio.VC.Templates.Desktop","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.VC.MSBuild.X64.v142","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM.v142","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.VC.MSBuild.x86.v142","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.VCPkgDatabase","Microsoft.VisualStudio.VC.Ide.Progression.Enterprise","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.VisualStudio.Component.CodeMap","Microsoft.VisualStudio.Component.GraphDocument","Microsoft.VisualStudio.Vmp","Microsoft.VisualStudio.GraphDocument","Microsoft.VisualStudio.GraphDocument.Resources","Microsoft.VisualStudio.CodeMap","Microsoft.VisualStudio.Component.SQL.LocalDB.Runtime","Microsoft.VisualStudio.Component.SQL.NCLI","sqllocaldb","sqlncli","Microsoft.VisualStudio.ComponentGroup.WebToolsExtensions","Microsoft.VisualStudio.WebToolsExtensions","Microsoft.VisualStudio.WebTools","Microsoft.VisualStudio.WebTools.Resources","Microsoft.VisualStudio.WebTools.MSBuild","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.Component.NuGet","Microsoft.CredentialProvider","Component.Microsoft.VisualStudio.LiveShare","Microsoft.VisualStudio.LiveShare","Microsoft.VisualStudio.Component.Debugger.JustInTime","Microsoft.VisualStudio.Debugger.ImmersiveActivateHelper.Msi","Microsoft.VisualStudio.Debugger.JustInTime","Microsoft.VisualStudio.Debugger.JustInTime.Msi","Microsoft.VisualStudio.Component.IntelliTrace.FrontEnd","Microsoft.IntelliTrace.DiagnosticsHubAgent.Targeted","Microsoft.IntelliTrace.Debugger","Microsoft.IntelliTrace.Debugger.Targeted","Microsoft.IntelliTrace.FrontEnd","Microsoft.DiagnosticsHub.Instrumentation","Microsoft.DiagnosticsHub.CpuSampling","Microsoft.DiagnosticsHub.CpuSampling.Targeted","Microsoft.PackageGroup.DiagnosticsHub.Platform","Microsoft.DiagnosticsHub.Collection.StopService.Uninstall","Microsoft.DiagnosticsHub.Runtime","Microsoft.DiagnosticsHub.Runtime.Targeted","Microsoft.DiagnosticsHub.Runtime.Resources","Microsoft.DiagnosticsHub.Collection","Microsoft.DiagnosticsHub.Collection.Service","Microsoft.DiagnosticsHub.Collection.StopService.Install","Microsoft.VisualStudio.Dsl.GraphObject","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.NuGet.Licenses","Microsoft.WebTools.Shared","Microsoft.VisualStudio.WebToolsExtensions.DotNet.Core.ItemTemplates","Microsoft.VisualStudio.Component.TextTemplating","Microsoft.VisualStudio.TextTemplating.MSBuild","Microsoft.VisualStudio.TextTemplating.Integration","Microsoft.VisualStudio.TextTemplating.Core","Microsoft.VisualStudio.TextTemplating.Integration.Resources","Microsoft.VisualStudio.ProTools","sqlsysclrtypes","sqlsysclrtypes","SQLCommon","Microsoft.VisualStudio.ProTools.Resources","Microsoft.VisualStudio.NuGet.Core","Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage","Microsoft.VisualStudio.PackageGroup.IntelliTrace.Core","Microsoft.IntelliTrace.Core","Microsoft.IntelliTrace.Core.Concord","Microsoft.IntelliTrace.Core.Targeted","Microsoft.IntelliTrace.ProfilerProxy.Msi.x64","Microsoft.IntelliTrace.ProfilerProxy.Msi","Microsoft.VisualStudio.TestTools.DynamicCodeCoverage","Microsoft.VisualStudio.TestTools.CodeCoverage.Msi","Microsoft.VisualStudio.TestTools.CodeCoverage","Microsoft.Icecap.Analysis","Microsoft.Icecap.Analysis.Targeted","Microsoft.Icecap.Analysis.Resources","Microsoft.Icecap.Analysis.Resources.Targeted","Microsoft.Icecap.Collection.Msi","Microsoft.Icecap.Collection.Msi.Targeted","Microsoft.Icecap.Collection.Msi.Resources","Microsoft.Icecap.Collection.Msi.Resources.Targeted","Microsoft.VisualStudio.PackageGroup.TestTools.Core","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.Pex.Common","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy","Microsoft.VisualStudio.PackageGroup.MinShell.Interop","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestTools","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Remote","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Premium","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common","Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.TestTools.TestWIExtension","Microsoft.VisualStudio.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors","Microsoft.VisualStudio.TestTools.NE.Msi.Targeted","Microsoft.VisualStudio.TestTools.NetworkEmulation","Microsoft.VisualStudio.TestTools.DataCollectors","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.WebTools.WSP.FSA","Microsoft.VisualStudio.WebTools.WSP.FSA.Resources","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualStudio.PackageGroup.Community","Microsoft.VisualStudio.Community.Extra.Resources","Microsoft.VisualStudio.Community.Extra","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.CodeSense","Microsoft.VisualStudio.CodeSense.Community","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.PackageGroup.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Agent","Microsoft.VisualStudio.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Concord","Microsoft.VisualStudio.VC.Ide.Debugger.Concord.Resources","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.VisualStudio.Debugger.Concord.Managed","Microsoft.VisualStudio.Debugger.Concord.Managed.Resources","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.Debugger.Package.DiagHub.Client.VSx86","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.Debugger.Concord","Microsoft.VisualStudio.Debugger.Concord.Resources","Microsoft.VisualStudio.Debugger.Resources","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.ProfessionalCore","Microsoft.VisualStudio.Professional","Microsoft.VisualStudio.Professional.Msi","Microsoft.VisualStudio.PackageGroup.EnterpriseCore","Microsoft.VisualStudio.Enterprise.Msi","Microsoft.VisualStudio.Enterprise","Microsoft.ShDocVw","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Platform.CallHierarchy","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.Devenv.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.Editors","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.Component.MSBuild","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers","Microsoft.VisualStudio.Workload.CoreEditor","Microsoft.VisualStudio.Component.CoreEditor","Microsoft.VisualStudio.PackageGroup.CoreEditor","Microsoft.VisualCpp.Tools.Common.UtilsPrereq","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.DiaSymReader","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Build","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer.Common","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.SLNX.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.Devenv","Microsoft.VisualStudio.Devenv.Resources","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Platform.NavigateTo","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.Platform.Search","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.Platform.Editor","Microsoft.VisualStudio.MinShell.x86","Microsoft.VisualStudio.NativeImageSupport","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.LanguageServer","Microsoft.VisualStudio.Devenv.Config","Microsoft.VisualStudio.MinShell.Resources","Microsoft.Net.PackageGroup.4.7.2.Redist","Microsoft.VisualStudio.Branding.Enterprise"]}] diff --git a/node_modules/node-gyp/test/fixtures/ca.crt b/node_modules/node-gyp/test/fixtures/ca.crt index 9d2755a74f6cd..aaf97575b18b4 100644 --- a/node_modules/node-gyp/test/fixtures/ca.crt +++ b/node_modules/node-gyp/test/fixtures/ca.crt @@ -1,21 +1,21 @@ -----BEGIN CERTIFICATE----- -MIIDbzCCAlcCAmm6MA0GCSqGSIb3DQEBCwUAMH0xCzAJBgNVBAYTAlVTMQswCQYD -VQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZMBcGA1UECgwQU3Ryb25n -TG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRowGAYDVQQDDBFjYS5zdHJv -bmdsb29wLmNvbTAeFw0xNTEyMDgyMzM1MzNaFw00MzA0MjQyMzM1MzNaMH0xCzAJ -BgNVBAYTAlVTMQswCQYDVQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZ -MBcGA1UECgwQU3Ryb25nTG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRow -GAYDVQQDDBFjYS5zdHJvbmdsb29wLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBANfj86jkvvYDjHBgiqWhk9Cj+bqiMq3MqnV0CBO4iuK33Fo6XssE -H+yVdXlIBFbFe6t655MdBVOR2Sfj7WqNh96vhu6PyDHiwcQlTaiLU6nhIed1J4Wv -lvnJHFmp8Wbtx5AgLT4UYu03ftvXEl2DLi3vhSL2tRM1ebXHB/KPbRWkb25DPX0P -foOHot3f2dgNe2x6kponf7E/QDmAu3s7Nlkfh+ryDhgGU7wocXEhXbprNqRqOGNo -xbXgUI+/9XDxYT/7Gn5LF/fPjtN+aB0SKMnTsDhprVlZie83mlqJ46fOOrR+vrsQ -mi/1m/TadrARtZoIExC/cQRdVM05EK4tUa8CAwEAATANBgkqhkiG9w0BAQsFAAOC -AQEAQ7k5WhyhDTIGYCNzRnrMHWSzGqa1y4tJMW06wafJNRqTm1cthq1ibc6Hfq5a -K10K0qMcgauRTfQ1MWrVCTW/KnJ1vkhiTOH+RvxapGn84gSaRmV6KZen0+gMsgae -KEGe/3Hn+PmDVV+PTamHgPACfpTww38WHIe/7Ce9gHfG7MZ8cKHNZhDy0IAYPln+ -YRwMLd7JNQffHAbWb2CE1mcea4H/12U8JZW5tHCF6y9V+7IuDzqwIrLKcW3lG17n -VUG6ODF/Ryqn3V5X+TL91YyXi6c34y34IpC7MQDV/67U7+5Bp5CfeDPWW2wVSrW+ -uGZtfEvhbNm6m2i4UNmpCXxUZQ== +MIIDZDCCAkwCCQCAzfCLqrJvuTANBgkqhkiG9w0BAQsFADB0MQswCQYDVQQGEwJV +UzELMAkGA1UECAwCQ0ExEDAOBgNVBAoMB05vZGUuanMxETAPBgNVBAsMCG5vZGUt +Z3lwMRIwEAYDVQQDDAlsb2NhbGhvc3QxHzAdBgkqhkiG9w0BCQEWEGJ1aWxkQG5v +ZGVqcy5vcmcwHhcNMTkwNjIyMDYyMjMzWhcNMjIwNDExMDYyMjMzWjB0MQswCQYD +VQQGEwJVUzELMAkGA1UECAwCQ0ExEDAOBgNVBAoMB05vZGUuanMxETAPBgNVBAsM +CG5vZGUtZ3lwMRIwEAYDVQQDDAlsb2NhbGhvc3QxHzAdBgkqhkiG9w0BCQEWEGJ1 +aWxkQG5vZGVqcy5vcmcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDS +CHjvtVW4HdbbUwZ/ZV9s6U4x0KSoyNQrsCZjB8kRpFPe50DS5mfmu2SNBGYKRgzk +4QEEwFB9N2o8YTWsCefSRl6ti4ToPZqulU4hhRKYrEGtMJcRzi3IN7s200JaO3UH +01Su8ruO0NESb5zEU1Ykfh8Lub8TGEAINmgI61d/5d5Aq3kDjUHQJt1Ekw03Ylnu +juQyCGZxLxnngu0mIvwzyL/UeeUgsfQLzvppUk6In7tC1zzMjSPWo0c8qu6KvrW4 +bKYnkZkzdQifzbpO5ERMEsh5HWq0uHa6+dgcVHFvlhdqF4Uat87ygNplVf0txsZB +MNVqbz1k6xkZYMnzDoydAgMBAAEwDQYJKoZIhvcNAQELBQADggEBADspZGtKpWxy +J1W3FA1aeQhMvequQTcMRz4avkm4K4HfTdV1iVD4CbvdezBphouBlyLVLDFJP7RZ +m7dBJVgBwnxufoFLne8cR2MGqDRoySbFT1AtDJdxabE6Fg+QGUpgOQfeBJ6ANlSB ++qJ+HG4QA+Ouh5hxz9mgYwkIsMUABHiwENdZ/kT8Edw4xKgd3uH0YP4iiePMD66c +rzW3uXH5J1jnKgBlpxtog4P6dHCcoq+PZJ17W5bdXNyqC1LPzQqniZ2BNcEZ4ix3 +slAZAOWD1zLLGJhBPMV1fa0sHNBWc6oicr3YK/IDb0cp9kiLvnUu1pHy+LWQGqtC +rceJuGsnJEQ= -----END CERTIFICATE----- diff --git a/node_modules/node-gyp/test/fixtures/server.crt b/node_modules/node-gyp/test/fixtures/server.crt index fe13bb96c599c..5d0c440e0788a 100644 --- a/node_modules/node-gyp/test/fixtures/server.crt +++ b/node_modules/node-gyp/test/fixtures/server.crt @@ -1,19 +1,21 @@ -----BEGIN CERTIFICATE----- -MIIDJjCCAg4CAhnOMA0GCSqGSIb3DQEBBQUAMH0xCzAJBgNVBAYTAlVTMQswCQYD -VQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZMBcGA1UECgwQU3Ryb25n -TG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRowGAYDVQQDDBFjYS5zdHJv -bmdsb29wLmNvbTAeFw0xNTEyMDgyMzM1MzNaFw00MzA0MjQyMzM1MzNaMBkxFzAV -BgNVBAMMDnN0cm9uZ2xvb3AuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB -CgKCAQEAwOYI7OZ2FX/YjRgLZoDQlbPc5UZXU/j0e1wwiJNPtPEax9Y5Uoza0Pnt -Ikzkc2SfvQ+IJrhXo385tI0W5juuqbHnE7UrjUuPjUX6NHevkxcs/flmjan5wnZM -cPsGhH71WDuUEEflvZihf2Se2x+xgZtMhc5XGmVmRuZFYKvkgUhA2/w8/QrK+jPT -n9QRJxZjWNh2RBdC1B7u4jffSmOSUljYFH1I2eTeY+Rdi6YUIYSU9gEoZxsv3Tia -SomfMF5jt2Mouo6MzA+IhLvvFjcrcph1Qxgi9RkfdCMMd+Ipm9YWELkyG1bDRpQy -0iyHD4gvVsAqz1Y2KdRSdc3Kt+nTqwIDAQABoxkwFzAVBgNVHREEDjAMhwQAAAAA -hwR/AAABMA0GCSqGSIb3DQEBBQUAA4IBAQAhy4J0hML3NgmDRHdL5/iTucBe22Mf -jJjg2aifD1S187dHm+Il4qZNO2plWwAhN0h704f+8wpsaALxUvBIu6nvlvcMP5PH -jGN5JLe2Km3UaPvYOQU2SgacLilu+uBcIo2JSHLV6O7ziqUj5Gior6YxDLCtEZie -Ea8aX5/YjuACtEMJ1JjRqjgkM66XAoUe0E8onOK3FgTIO3tGoTJwRp0zS50pFuP0 -PsZtT04ck6mmXEXXknNoAyBCvPypfms9OHqcUIW9fiQnrGbS/Ri4QSQYj0DtFk/1 -na4fY1gf3zTHxH8259b/TOOaPfTnCEsOQtjUrWNR4xhmVZ+HJy4yytUW +MIIDYjCCAkoCCQCSlmGR7KzZGTANBgkqhkiG9w0BAQsFADB0MQswCQYDVQQGEwJV +UzELMAkGA1UECAwCQ0ExEDAOBgNVBAoMB05vZGUuanMxETAPBgNVBAsMCG5vZGUt +Z3lwMRIwEAYDVQQDDAlsb2NhbGhvc3QxHzAdBgkqhkiG9w0BCQEWEGJ1aWxkQG5v +ZGVqcy5vcmcwHhcNMTkwNjIyMDYyNTU1WhcNMjkwNjE5MDYyNTU1WjByMQswCQYD +VQQGEwJVUzELMAkGA1UECAwCQ0ExEDAOBgNVBAoMB05vZGUuanMxETAPBgNVBAsM +CG5vZGUtZ3lwMRIwEAYDVQQDDAlsb2NhbGhvc3QxHTAbBgkqhkiG9w0BCQEWDmJ1 +aWxkQGlvanMub3JnMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA6S1E +2WchgmbJYqCnpN7310ZgHjIOqeJe6MpSue2u6z6mTNd5izgvQNaANmn3xLFCS5zs +uZaTvdPYPkcmSQzb1YcZSUYnAxZifjYARc6kb5GSBl3q+O70ELyFrimXfZ4JI+bd +IG9KiHY17DlvZZZj/csGYVWWg0mkeH3O5LPX6/DXQVh/9+gZ02/cdIBCAtZHQwqx +7tF/qZA/kD4GZNFpU1DYHzf9H6g9htoCqmNHQWrV2T9yFybt6mbZp9kglBmyKYCc +7hmQnb7N/mHn1yIuwhBsirCJTfKH86gN81u8M3+SVHA2VUHDllcNhpDWlmInXA+I +tHdGZHCp95ohqpCPgQIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQCdvYj6CD0ZLwT2 +3t1r+deC3TJuHlNVSeKeT7wIfFnh2FW5riGV0q/w6eXPLTHjuiS6YmpAAbdNUgX/ +sq64FqI2RLpX6pgY5yB0SKopMcJxMLKqmF4zHpIHxtYN5EmN3PR0vehneBR/nZ2T +3ikvWD5JeXlm7Dfw+tjijdxM/sEoDWErGup4mMKMd1s5s830p+ITJUa50d0DLFdH +mqPSbUZF8mMPwGJd+nu1Ht3gTLtK7+gYJgGtXMJmGC0Qg77EJHDB2NbotgDGNmSU +1H9BpAeFHHIcbh2Rr7kkTvnh/c03vFe+CsDZmezcmRpRzW1fKj3YbfqBxU4XwJrL +a5T/N9xU -----END CERTIFICATE----- diff --git a/node_modules/node-gyp/test/fixtures/server.key b/node_modules/node-gyp/test/fixtures/server.key index f8227f4c0c2d4..a8447391e02e2 100644 --- a/node_modules/node-gyp/test/fixtures/server.key +++ b/node_modules/node-gyp/test/fixtures/server.key @@ -1,28 +1,27 @@ ------BEGIN PRIVATE KEY----- -MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDA5gjs5nYVf9iN -GAtmgNCVs9zlRldT+PR7XDCIk0+08RrH1jlSjNrQ+e0iTORzZJ+9D4gmuFejfzm0 -jRbmO66psecTtSuNS4+NRfo0d6+TFyz9+WaNqfnCdkxw+waEfvVYO5QQR+W9mKF/ -ZJ7bH7GBm0yFzlcaZWZG5kVgq+SBSEDb/Dz9Csr6M9Of1BEnFmNY2HZEF0LUHu7i -N99KY5JSWNgUfUjZ5N5j5F2LphQhhJT2AShnGy/dOJpKiZ8wXmO3Yyi6jozMD4iE -u+8WNytymHVDGCL1GR90Iwx34imb1hYQuTIbVsNGlDLSLIcPiC9WwCrPVjYp1FJ1 -zcq36dOrAgMBAAECggEACg60Xm2xsHNG/ixHw+NpfLSxCr89JGKxlJD88tIDcOK1 -S8AOoxA3BHhTddteeenALmJV7fbkkuC6SICmtgBcnfppmuxyRd6vsGT6o6ut2tR1 -gxRy1WYMYKg8WhOshlH8RspscODeyKDhorvDUJd5cNGBDuTwQ68PwxiUe3La6iac -EVQoKohg9EmRIhMF1i8I00zXE8p3XENrlTc491ipc+gLPIP5vtqHyQztEUkZHkWd -dXbs+n1hGCr+4FxrphGYEW80HINzmume7dGChr8nvF4ZZcuWW13DJuNim6pQno1i -hM8VdXm8XphLh0XEGI5OCfu/CetkBILZRXKltZk6AQKBgQDoBqJzRlp7regYNU4q -usfS+43tPNaJ0o4DIzcLawqpmK/B/cZStzHl14Sm62BVkKV6cnWAJPeLkENPMFoV -7Q7wLZBJxpPzqXkpeiDkKN4Wovca891Rffne5Sz6IDB5mOxMjfKIEPd5RkmB5Lkp -qQLwm3YJ2AJcLagG/Gi1DFDRAQKBgQDU1G9T43Mjke6TXG0u7gCSb+VwyDRsrvJA -u2vy6+MANRc1EEF31YLmTKOU5XxUmhtIu7TUbgPoNi0HuRFXx4Zul3BPlAosLMJv -kNQbA/9d0YQAfSgTsploN5CX65dLZ4ejIzVgDZREzpIBWTze6YZTA2DT5iOIet84 -DD5DujY4qwKBgG0PuUo/9oYOD3tZiv1wwD5+uY6auykbTF9TLStzzBY9y9d+hrsY -mx6zOAoRtz1g+TdeF7b9KVJzo//T9XQ68nuYnyreaWrt7SK+4jj8sK+pOEd1+0Cz -20CXLpX/jWmKpP+y9R5aA0kA7cpdjV90rwoTuN8Vpr5XQ5TNDhaTzGUBAoGABYig -fGXlkH8y3NICZL37ddNC+/O4qTrDQbudyusnM9ItkEuj6CG9DY/gkPaGjQyUuQdo -ZD2YDGmcMh81vDqL3ERDv03yFcP0KkJxwWIRObdA32JhsGFsa7FGKS0O+f7vH+bC -dITl3gQg97gCRSl9PJtR4TCSq/HF7Acld01YK5ECgYEAwLFB5JIuxrowJe74cCMP -n5Rwuc8vWdOsg+ytvQTv0/hVCdzcaLet6YvagnWTWaU7PUwTFxZs/mLQ9CAWVutK -IRzs/GWxGFjH5xotDaJdDDzSdQye4tUqvUVxv7zzzsVycCPBYFkyRQ8Tmr5FLtUJ -Cl48TZ6J8Rx5avjdtOw3QC8= ------END PRIVATE KEY----- +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEA6S1E2WchgmbJYqCnpN7310ZgHjIOqeJe6MpSue2u6z6mTNd5 +izgvQNaANmn3xLFCS5zsuZaTvdPYPkcmSQzb1YcZSUYnAxZifjYARc6kb5GSBl3q ++O70ELyFrimXfZ4JI+bdIG9KiHY17DlvZZZj/csGYVWWg0mkeH3O5LPX6/DXQVh/ +9+gZ02/cdIBCAtZHQwqx7tF/qZA/kD4GZNFpU1DYHzf9H6g9htoCqmNHQWrV2T9y +Fybt6mbZp9kglBmyKYCc7hmQnb7N/mHn1yIuwhBsirCJTfKH86gN81u8M3+SVHA2 +VUHDllcNhpDWlmInXA+ItHdGZHCp95ohqpCPgQIDAQABAoIBABW8R4ewalo6dJlB ++n6O3jFt+PW3mtBRLqGqgm2cb0q0a1IMX+MPWLBFjmwEErl+AH0F4rcmBx2Ryr17 +amEy1qcf0caXyHksNAApznqzWXag7iizxnxv4cZRnHBwphNqkNWM5p3oYd04j6w2 +amDg1O9KZozaKo6QZclpiMiezwjKG+PVZLT8p7afswjv+yDWPDByhlcGiye9QD1T +VuZ0QCoXp6N/8JxW0gdkLp9NqFvGeGFzJ5h6L+d7A6BWw8akXrBRHHcKkyvVYBfd +myhSzSK4FPFMaxaEY/65FlVSyAO6ezGm3Umx4g7mkFjLdwKWaIOjkBkPeFgl3Pp4 +7Lo5X3UCgYEA/FrrIwmEU5ayulBVScEMKeavu5eNY4r0Sqbpov2oyTdYe8G49Pzy +ryMXfunY43moLKpajGwgTKRGvdqFtK08AAkaCssiAPkP3rZuZvMTF4sLo/vlWrjP +3er+tUqj22BzXi5XV0BAvH8Y3TL8KQ3he/8JxDvkC811/DQ9Y/Da3U8CgYEA7Itw +UM37URma08Bj9VTMoL9ZCyURewX+ZLDb2+O8sXGXJs28i1RkE6PTBlnRmedn+Jjk +byzQ5Cs5wA5uMbhYTA7kgXOs1bvgQqmlLmyL6FfHkucoMhr2Di7VeGf4OxE26JZ8 +JdY4+1MOyI3A2rR8WU+GmHxy0ay4K2xe6W0vsi8CgYBoGLEKIPDe8jkDtgOYivOT +jT9MaLXALB+dc8DIpU4swpHTaxP6qyUIrbcReTEolJSU6Ci16BxiwRkVU8D3yMYJ +VbfSX/zE3fh37FUaToa/nXHN0SjJBZdpeXhcHE//PIgaf48zxKNvnhYJmPB/luQ+ +m/PRaMsnOzfCM2JniYEe7QKBgGwjnxhB4tgDtaWCue/pcZc3gzS2IJS2e8N6mzie +l6Ajhu+FdOHZldrotUuc+la61OxwsVYmDeWR4VftAPGYDj3PPSX1RRl9R5wSRGLB +2wBASQvew6CMdNqtDIh8N56BUzHnwh/mHKzBHuwO6hDSHFsUITtLAY7bwGKRq55Z +fUmfAoGBANOYFyoJoDLcl+Jd750lyqfCifcGtkRdmZMtrPXaYnD8ZGme9vz1vsK/ +4iUkV3mi7Z9s1LXMa/tPPfKdVhCM1PXost3/si0+u1Bz5yKqEPXlyy2ltpIVyGu8 +yiy7y75asp8Iii/1cgtwyp9+VeSif8wJ+MHQoGdGxvAQP80R3EjF +-----END RSA PRIVATE KEY----- diff --git a/node_modules/node-gyp/test/fixtures/test-charmap.py b/node_modules/node-gyp/test/fixtures/test-charmap.py index d9fa6fb25e6c6..b338f915bc38f 100644 --- a/node_modules/node-gyp/test/fixtures/test-charmap.py +++ b/node_modules/node-gyp/test/fixtures/test-charmap.py @@ -1,22 +1,30 @@ +from __future__ import print_function import sys import locale -reload(sys) +try: + reload(sys) +except NameError: # Python 3 + pass def main(): encoding = locale.getdefaultlocale()[1] if not encoding: return False - sys.setdefaultencoding(encoding) + try: + sys.setdefaultencoding(encoding) + except AttributeError: # Python 3 + pass + textmap = { 'cp936': u'\u4e2d\u6587', 'cp1252': u'Lat\u012Bna', 'cp932': u'\u306b\u307b\u3093\u3054' } - if textmap.has_key(encoding): - print textmap[encoding] + if encoding in textmap: + print(textmap[encoding]) return True if __name__ == '__main__': - print main() + print(main()) diff --git a/node_modules/node-gyp/test/process-exec-sync.js b/node_modules/node-gyp/test/process-exec-sync.js index 859cbc1f6fe2b..21763bc26de10 100644 --- a/node_modules/node-gyp/test/process-exec-sync.js +++ b/node_modules/node-gyp/test/process-exec-sync.js @@ -1,16 +1,18 @@ 'use strict' -var fs = require('graceful-fs') -var child_process = require('child_process') +const fs = require('graceful-fs') +const childProcess = require('child_process') -if (!String.prototype.startsWith) { - String.prototype.startsWith = function(search, pos) { - return this.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search +function startsWith (str, search, pos) { + if (String.prototype.startsWith) { + return str.startsWith(search, pos) } + + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search } -function processExecSync(file, args, options) { - var child, error, timeout, tmpdir, command, quote +function processExecSync (file, args, options) { + var child, error, timeout, tmpdir, command command = makeCommand(file, args) /* @@ -22,19 +24,19 @@ function processExecSync(file, args, options) { // init timeout timeout = Date.now() + options.timeout // init tmpdir - var os_temp_base = '/tmp' - var os = determine_os() - os_temp_base = '/tmp' + var osTempBase = '/tmp' + var os = determineOS() + osTempBase = '/tmp' if (process.env.TMP) { - os_temp_base = process.env.TMP + osTempBase = process.env.TMP } - if (os_temp_base[os_temp_base.length - 1] !== '/') { - os_temp_base += '/' + if (osTempBase[osTempBase.length - 1] !== '/') { + osTempBase += '/' } - tmpdir = os_temp_base + 'processExecSync.' + Date.now() + Math.random() + tmpdir = osTempBase + 'processExecSync.' + Date.now() + Math.random() fs.mkdirSync(tmpdir) // init command @@ -47,7 +49,7 @@ function processExecSync(file, args, options) { } // init child - child = child_process.exec(command, options) + child = childProcess.exec(command, options) var maxTry = 100000 // increases the test time by 6 seconds on win-2016-node-0.10 var tryCount = 0 @@ -84,23 +86,23 @@ function processExecSync(file, args, options) { return child.stdout } -function makeCommand(file, args) { +function makeCommand (file, args) { var command, quote command = file if (args.length > 0) { - for(var i in args) { + for (var i in args) { command = command + ' ' if (args[i][0] === '-') { command = command + args[i] } else { if (!quote) { - command = command + '\"' + command = command + '"' quote = true } command = command + args[i] if (quote) { if (args.length === (parseInt(i) + 1)) { - command = command + '\"' + command = command + '"' } } } @@ -109,29 +111,29 @@ function makeCommand(file, args) { return command } -function determine_os() { +function determineOS () { var os = '' var tmpVar = '' if (process.env.OSTYPE) { tmpVar = process.env.OSTYPE - } else if (process.env.OS) { + } else if (process.env.OS) { tmpVar = process.env.OS } else { - //default is linux + // default is linux tmpVar = 'linux' } - if (tmpVar.startsWith('linux')) { + if (startsWith(tmpVar, 'linux')) { os = 'linux' } - if (tmpVar.startsWith('win')) { + if (startsWith(tmpVar, 'win')) { os = 'win' } return os } -function unlinkFile(file) { +function unlinkFile (file) { fs.unlinkSync(file) } diff --git a/node_modules/node-gyp/test/simple-proxy.js b/node_modules/node-gyp/test/simple-proxy.js index e55330c445bf6..cb0dfcfec7edc 100644 --- a/node_modules/node-gyp/test/simple-proxy.js +++ b/node_modules/node-gyp/test/simple-proxy.js @@ -1,23 +1,26 @@ -var http = require('http') - , https = require('https') - , server = http.createServer(handler) - , port = +process.argv[2] - , prefix = process.argv[3] - , upstream = process.argv[4] - , calls = 0 +'use strict' + +const http = require('http') +const https = require('https') +const server = http.createServer(handler) +const port = +process.argv[2] +const prefix = process.argv[3] +const upstream = process.argv[4] +var calls = 0 server.listen(port) function handler (req, res) { - if (req.url.indexOf(prefix) != 0) + if (req.url.indexOf(prefix) !== 0) { throw new Error('request url [' + req.url + '] does not start with [' + prefix + ']') + } var upstreamUrl = upstream + req.url.substring(prefix.length) - console.log(req.url + ' -> ' + upstreamUrl) https.get(upstreamUrl, function (ures) { ures.on('end', function () { - if (++calls == 2) + if (++calls === 2) { server.close() + } }) ures.pipe(res) }) diff --git a/node_modules/node-gyp/test/test-addon.js b/node_modules/node-gyp/test/test-addon.js index 89350effc46b4..f79eff73c169e 100644 --- a/node_modules/node-gyp/test/test-addon.js +++ b/node_modules/node-gyp/test/test-addon.js @@ -1,29 +1,33 @@ 'use strict' -var test = require('tape') -var path = require('path') -var fs = require('graceful-fs') -var child_process = require('child_process') -var addonPath = path.resolve(__dirname, 'node_modules', 'hello_world') -var nodeGyp = path.resolve(__dirname, '..', 'bin', 'node-gyp.js') -var execFileSync = child_process.execFileSync || require('./process-exec-sync') -var execFile = child_process.execFile - -function runHello() { +const test = require('tap').test +const path = require('path') +const fs = require('graceful-fs') +const childProcess = require('child_process') +const os = require('os') +const addonPath = path.resolve(__dirname, 'node_modules', 'hello_world') +const nodeGyp = path.resolve(__dirname, '..', 'bin', 'node-gyp.js') +const execFileSync = childProcess.execFileSync || require('./process-exec-sync') +const execFile = childProcess.execFile + +function runHello (hostProcess) { + if (!hostProcess) { + hostProcess = process.execPath + } var testCode = "console.log(require('hello_world').hello())" - return execFileSync(process.execPath, ['-e', testCode], { cwd: __dirname }).toString() + return execFileSync(hostProcess, ['-e', testCode], { cwd: __dirname }).toString() } -function getEncoding() { - var code = 'import locale;print locale.getdefaultlocale()[1]' +function getEncoding () { + var code = 'import locale;print(locale.getdefaultlocale()[1])' return execFileSync('python', ['-c', code]).toString().trim() } -function checkCharmapValid() { +function checkCharmapValid () { var data try { data = execFileSync('python', ['fixtures/test-charmap.py'], - { cwd: __dirname }) + { cwd: __dirname }) } catch (err) { return false } @@ -38,7 +42,7 @@ test('build simple addon', function (t) { var cmd = [nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose'] var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { var logLines = stderr.toString().trim().split(/\r?\n/) - var lastLine = logLines[logLines.length-1] + var lastLine = logLines[logLines.length - 1] t.strictEqual(err, null) t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') t.strictEqual(runHello().trim(), 'world') @@ -55,9 +59,9 @@ test('build simple addon in path with non-ascii characters', function (t) { } var testDirNames = { - 'cp936': '文件夹', - 'cp1252': 'Latīna', - 'cp932': 'フォルダ' + cp936: '文件夹', + cp1252: 'Latīna', + cp932: 'フォルダ' } // Select non-ascii characters by current encoding var testDirName = testDirNames[getEncoding()] @@ -68,14 +72,15 @@ test('build simple addon in path with non-ascii characters', function (t) { t.plan(3) - var data, configPath = path.join(addonPath, 'build', 'config.gypi') + var data + var configPath = path.join(addonPath, 'build', 'config.gypi') try { data = fs.readFileSync(configPath, 'utf8') } catch (err) { t.error(err) return } - var config = JSON.parse(data.replace(/\#.+\n/, '')) + var config = JSON.parse(data.replace(/#.+\n/, '')) var nodeDir = config.variables.nodedir var testNodeDir = path.join(addonPath, testDirName) // Create symbol link to path with non-ascii characters @@ -93,8 +98,14 @@ test('build simple addon in path with non-ascii characters', function (t) { } } - var cmd = [nodeGyp, 'rebuild', '-C', addonPath, - '--loglevel=verbose', '-nodedir=' + testNodeDir] + var cmd = [ + nodeGyp, + 'rebuild', + '-C', + addonPath, + '--loglevel=verbose', + '-nodedir=' + testNodeDir + ] var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { try { fs.unlink(testNodeDir) @@ -103,7 +114,7 @@ test('build simple addon in path with non-ascii characters', function (t) { } var logLines = stderr.toString().trim().split(/\r?\n/) - var lastLine = logLines[logLines.length-1] + var lastLine = logLines[logLines.length - 1] t.strictEqual(err, null) t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') t.strictEqual(runHello().trim(), 'world') @@ -111,3 +122,29 @@ test('build simple addon in path with non-ascii characters', function (t) { proc.stdout.setEncoding('utf-8') proc.stderr.setEncoding('utf-8') }) + +test('addon works with renamed host executable', function (t) { + // No `fs.copyFileSync` before node8. + if (process.version.substr(1).split('.')[0] < 8) { + t.skip('skipping test for old node version') + t.end() + return + } + + t.plan(3) + + var notNodePath = path.join(os.tmpdir(), 'notnode' + path.extname(process.execPath)) + fs.copyFileSync(process.execPath, notNodePath) + + var cmd = [nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose'] + var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { + var logLines = stderr.toString().trim().split(/\r?\n/) + var lastLine = logLines[logLines.length - 1] + t.strictEqual(err, null) + t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') + t.strictEqual(runHello(notNodePath).trim(), 'world') + fs.unlinkSync(notNodePath) + }) + proc.stdout.setEncoding('utf-8') + proc.stderr.setEncoding('utf-8') +}) diff --git a/node_modules/node-gyp/test/test-configure-python.js b/node_modules/node-gyp/test/test-configure-python.js index f235bdbba1c9f..d08f9e5ed38ef 100644 --- a/node_modules/node-gyp/test/test-configure-python.js +++ b/node_modules/node-gyp/test/test-configure-python.js @@ -1,21 +1,24 @@ 'use strict' -var test = require('tape') -var path = require('path') -var gyp = require('../lib/node-gyp') -var requireInject = require('require-inject') -var configure = requireInject('../lib/configure', { +const test = require('tap').test +const path = require('path') +const devDir = require('./common').devDir() +const gyp = require('../lib/node-gyp') +const requireInject = require('require-inject') +const configure = requireInject('../lib/configure', { 'graceful-fs': { - 'openSync': function (file, mode) { return 0; }, - 'closeSync': function (fd) { }, - 'writeFile': function (file, data, cb) { cb() }, - 'stat': function (file, cb) { cb(null, {}) } + openSync: function () { return 0 }, + closeSync: function () { }, + writeFile: function (file, data, cb) { cb() }, + stat: function (file, cb) { cb(null, {}) } } }) -var EXPECTED_PYPATH = path.join(__dirname, '..', 'gyp', 'pylib') -var SEPARATOR = process.platform == 'win32' ? ';' : ':' -var SPAWN_RESULT = { on: function () { } } +const EXPECTED_PYPATH = path.join(__dirname, '..', 'gyp', 'pylib') +const SEPARATOR = process.platform === 'win32' ? ';' : ':' +const SPAWN_RESULT = { on: function () { } } + +require('npmlog').level = 'warn' test('configure PYTHONPATH with no existing env', function (t) { t.plan(1) @@ -28,6 +31,7 @@ test('configure PYTHONPATH with no existing env', function (t) { t.equal(process.env.PYTHONPATH, EXPECTED_PYPATH) return SPAWN_RESULT } + prog.devDir = devDir configure(prog, [], t.fail) }) @@ -40,7 +44,6 @@ test('configure PYTHONPATH with existing env of one dir', function (t) { var prog = gyp() prog.parseArgv([]) prog.spawn = function () { - t.equal(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR)) var dirs = process.env.PYTHONPATH.split(SEPARATOR) @@ -48,6 +51,7 @@ test('configure PYTHONPATH with existing env of one dir', function (t) { return SPAWN_RESULT } + prog.devDir = devDir configure(prog, [], t.fail) }) @@ -62,7 +66,6 @@ test('configure PYTHONPATH with existing env of multiple dirs', function (t) { var prog = gyp() prog.parseArgv([]) prog.spawn = function () { - t.equal(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR)) var dirs = process.env.PYTHONPATH.split(SEPARATOR) @@ -70,5 +73,6 @@ test('configure PYTHONPATH with existing env of multiple dirs', function (t) { return SPAWN_RESULT } + prog.devDir = devDir configure(prog, [], t.fail) }) diff --git a/node_modules/node-gyp/test/test-download.js b/node_modules/node-gyp/test/test-download.js index 6e6f64f058c2e..738a43f276b09 100644 --- a/node_modules/node-gyp/test/test-download.js +++ b/node_modules/node-gyp/test/test-download.js @@ -1,37 +1,45 @@ 'use strict' -var fs = require('fs') -var http = require('http') -var https = require('https') -var test = require('tape') -var install = require('../lib/install') +const test = require('tap').test +const fs = require('fs') +const path = require('path') +const http = require('http') +const https = require('https') +const install = require('../lib/install') +const semver = require('semver') +const devDir = require('./common').devDir() +const rimraf = require('rimraf') +const gyp = require('../lib/node-gyp') +const log = require('npmlog') + +log.level = 'warn' test('download over http', function (t) { t.plan(2) var server = http.createServer(function (req, res) { t.strictEqual(req.headers['user-agent'], - 'node-gyp v42 (node ' + process.version + ')') + 'node-gyp v42 (node ' + process.version + ')') res.end('ok') server.close() }) - var host = '127.0.0.1' + var host = 'localhost' server.listen(0, host, function () { var port = this.address().port var gyp = { opts: {}, - version: '42', + version: '42' } var url = 'http://' + host + ':' + port var req = install.test.download(gyp, {}, url) req.on('response', function (res) { var body = '' res.setEncoding('utf8') - res.on('data', function(data) { + res.on('data', function (data) { body += data }) - res.on('end', function() { + res.on('end', function () { t.strictEqual(body, 'ok') }) }) @@ -41,17 +49,17 @@ test('download over http', function (t) { test('download over https with custom ca', function (t) { t.plan(3) - var cert = fs.readFileSync(__dirname + '/fixtures/server.crt', 'utf8') - var key = fs.readFileSync(__dirname + '/fixtures/server.key', 'utf8') + var cert = fs.readFileSync(path.join(__dirname, 'fixtures/server.crt'), 'utf8') + var key = fs.readFileSync(path.join(__dirname, 'fixtures/server.key'), 'utf8') - var cafile = __dirname + '/fixtures/ca.crt' + var cafile = path.join(__dirname, '/fixtures/ca.crt') var ca = install.test.readCAFile(cafile) t.strictEqual(ca.length, 1) var options = { ca: ca, cert: cert, key: key } var server = https.createServer(options, function (req, res) { t.strictEqual(req.headers['user-agent'], - 'node-gyp v42 (node ' + process.version + ')') + 'node-gyp v42 (node ' + process.version + ')') res.end('ok') server.close() }) @@ -60,22 +68,22 @@ test('download over https with custom ca', function (t) { throw err }) - var host = '127.0.0.1' + var host = 'localhost' server.listen(8000, host, function () { var port = this.address().port var gyp = { opts: { cafile: cafile }, - version: '42', + version: '42' } var url = 'https://' + host + ':' + port var req = install.test.download(gyp, {}, url) req.on('response', function (res) { var body = '' res.setEncoding('utf8') - res.on('data', function(data) { + res.on('data', function (data) { body += data }) - res.on('end', function() { + res.on('end', function () { t.strictEqual(body, 'ok') }) }) @@ -85,7 +93,7 @@ test('download over https with custom ca', function (t) { test('download with missing cafile', function (t) { t.plan(1) var gyp = { - opts: { cafile: 'no.such.file' }, + opts: { cafile: 'no.such.file' } } try { install.test.download(gyp, {}, 'http://bad/') @@ -95,8 +103,71 @@ test('download with missing cafile', function (t) { }) test('check certificate splitting', function (t) { - var cas = install.test.readCAFile(__dirname + '/fixtures/ca-bundle.crt') + var cas = install.test.readCAFile(path.join(__dirname, 'fixtures/ca-bundle.crt')) t.plan(2) t.strictEqual(cas.length, 2) t.notStrictEqual(cas[0], cas[1]) }) + +// only run this test if we are running a version of Node with predictable version path behavior + +test('download headers (actual)', function (t) { + if (process.env.FAST_TEST || + process.release.name !== 'node' || + semver.prerelease(process.version) !== null || + semver.satisfies(process.version, '<10')) { + return t.skip('Skipping acutal download of headers due to test environment configuration') + } + + t.plan(17) + + const expectedDir = path.join(devDir, process.version.replace(/^v/, '')) + rimraf(expectedDir, (err) => { + t.ifError(err) + + const prog = gyp() + prog.parseArgv([]) + prog.devDir = devDir + log.level = 'warn' + install(prog, [], (err) => { + t.ifError(err) + + fs.readFile(path.join(expectedDir, 'installVersion'), 'utf8', (err, data) => { + t.ifError(err) + t.strictEqual(data, '9\n', 'correct installVersion') + }) + + fs.readdir(path.join(expectedDir, 'include/node'), (err, list) => { + t.ifError(err) + + t.ok(list.includes('common.gypi')) + t.ok(list.includes('config.gypi')) + t.ok(list.includes('node.h')) + t.ok(list.includes('node_version.h')) + t.ok(list.includes('openssl')) + t.ok(list.includes('uv')) + t.ok(list.includes('uv.h')) + t.ok(list.includes('v8-platform.h')) + t.ok(list.includes('v8.h')) + t.ok(list.includes('zlib.h')) + }) + + fs.readFile(path.join(expectedDir, 'include/node/node_version.h'), 'utf8', (err, contents) => { + t.ifError(err) + + const lines = contents.split('\n') + + // extract the 3 version parts from the defines to build a valid version string and + // and check them against our current env version + const version = ['major', 'minor', 'patch'].reduce((version, type) => { + const re = new RegExp(`^#define\\sNODE_${type.toUpperCase()}_VERSION`) + const line = lines.find((l) => re.test(l)) + const i = line ? parseInt(line.replace(/^[^0-9]+([0-9]+).*$/, '$1'), 10) : 'ERROR' + return `${version}${type !== 'major' ? '.' : 'v'}${i}` + }, '') + + t.strictEqual(version, process.version) + }) + }) + }) +}) diff --git a/node_modules/node-gyp/test/test-find-accessible-sync.js b/node_modules/node-gyp/test/test-find-accessible-sync.js index d336243dd0d7b..0a2e584c4fb33 100644 --- a/node_modules/node-gyp/test/test-find-accessible-sync.js +++ b/node_modules/node-gyp/test/test-find-accessible-sync.js @@ -1,13 +1,13 @@ 'use strict' -var test = require('tape') -var path = require('path') -var requireInject = require('require-inject') -var configure = requireInject('../lib/configure', { +const test = require('tap').test +const path = require('path') +const requireInject = require('require-inject') +const configure = requireInject('../lib/configure', { 'graceful-fs': { - 'closeSync': function (fd) { return undefined }, - 'openSync': function (path) { - if (readableFiles.some(function (f) { return f === path} )) { + closeSync: function () { return undefined }, + openSync: function (path) { + if (readableFiles.some(function (f) { return f === path })) { return 0 } else { var error = new Error('ENOENT - not found') @@ -17,11 +17,11 @@ var configure = requireInject('../lib/configure', { } }) -var dir = path.sep + 'testdir' -var readableFile = 'readable_file' -var anotherReadableFile = 'another_readable_file' -var readableFileInDir = 'somedir' + path.sep + readableFile -var readableFiles = [ +const dir = path.sep + 'testdir' +const readableFile = 'readable_file' +const anotherReadableFile = 'another_readable_file' +const readableFileInDir = 'somedir' + path.sep + readableFile +const readableFiles = [ path.resolve(dir, readableFile), path.resolve(dir, anotherReadableFile), path.resolve(dir, readableFileInDir) @@ -38,7 +38,7 @@ test('find accessible - empty array', function (t) { test('find accessible - single item array, readable', function (t) { t.plan(1) - var candidates = [ readableFile ] + var candidates = [readableFile] var found = configure.test.findAccessibleSync('test', dir, candidates) t.strictEqual(found, path.resolve(dir, readableFile)) }) @@ -46,7 +46,7 @@ test('find accessible - single item array, readable', function (t) { test('find accessible - single item array, readable in subdir', function (t) { t.plan(1) - var candidates = [ readableFileInDir ] + var candidates = [readableFileInDir] var found = configure.test.findAccessibleSync('test', dir, candidates) t.strictEqual(found, path.resolve(dir, readableFileInDir)) }) @@ -54,25 +54,23 @@ test('find accessible - single item array, readable in subdir', function (t) { test('find accessible - single item array, unreadable', function (t) { t.plan(1) - var candidates = [ 'unreadable_file' ] + var candidates = ['unreadable_file'] var found = configure.test.findAccessibleSync('test', dir, candidates) t.strictEqual(found, undefined) }) - test('find accessible - multi item array, no matches', function (t) { t.plan(1) - var candidates = [ 'non_existent_file', 'unreadable_file' ] + var candidates = ['non_existent_file', 'unreadable_file'] var found = configure.test.findAccessibleSync('test', dir, candidates) t.strictEqual(found, undefined) }) - test('find accessible - multi item array, single match', function (t) { t.plan(1) - var candidates = [ 'non_existent_file', readableFile ] + var candidates = ['non_existent_file', readableFile] var found = configure.test.findAccessibleSync('test', dir, candidates) t.strictEqual(found, path.resolve(dir, readableFile)) }) @@ -80,7 +78,7 @@ test('find accessible - multi item array, single match', function (t) { test('find accessible - multi item array, return first match', function (t) { t.plan(1) - var candidates = [ 'non_existent_file', anotherReadableFile, readableFile ] + var candidates = ['non_existent_file', anotherReadableFile, readableFile] var found = configure.test.findAccessibleSync('test', dir, candidates) t.strictEqual(found, path.resolve(dir, anotherReadableFile)) }) diff --git a/node_modules/node-gyp/test/test-find-node-directory.js b/node_modules/node-gyp/test/test-find-node-directory.js index 46659d0cfe8fa..f1380d162ae7c 100644 --- a/node_modules/node-gyp/test/test-find-node-directory.js +++ b/node_modules/node-gyp/test/test-find-node-directory.js @@ -1,8 +1,10 @@ -var test = require('tape') -var path = require('path') -var findNodeDirectory = require('../lib/find-node-directory') +'use strict' -var platforms = ['darwin', 'freebsd', 'linux', 'sunos', 'win32', 'aix'] +const test = require('tap').test +const path = require('path') +const findNodeDirectory = require('../lib/find-node-directory') + +const platforms = ['darwin', 'freebsd', 'linux', 'sunos', 'win32', 'aix'] // we should find the directory based on the directory // the script is running in and it should match the layout @@ -11,10 +13,10 @@ var platforms = ['darwin', 'freebsd', 'linux', 'sunos', 'win32', 'aix'] test('test find-node-directory - node install', function (t) { t.plan(platforms.length) for (var next = 0; next < platforms.length; next++) { - var processObj = {execPath: '/x/y/bin/node', platform: platforms[next]} + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } t.equal( findNodeDirectory('/x/deps/npm/node_modules/node-gyp/lib', processObj), - path.join('/x')) + path.join('/x')) } }) @@ -26,15 +28,15 @@ test('test find-node-directory - node install', function (t) { test('test find-node-directory - node build', function (t) { t.plan(platforms.length) for (var next = 0; next < platforms.length; next++) { - var processObj = {execPath: '/x/y/bin/node', platform: platforms[next]} + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } if (platforms[next] === 'win32') { t.equal( findNodeDirectory('/y/node_modules/npm/node_modules/node-gyp/lib', - processObj), path.join('/y')) + processObj), path.join('/y')) } else { t.equal( findNodeDirectory('/y/lib/node_modules/npm/node_modules/node-gyp/lib', - processObj), path.join('/y')) + processObj), path.join('/y')) } } }) @@ -44,7 +46,7 @@ test('test find-node-directory - node build', function (t) { test('test find-node-directory - node in bin directory', function (t) { t.plan(platforms.length) for (var next = 0; next < platforms.length; next++) { - var processObj = {execPath: '/x/y/bin/node', platform: platforms[next]} + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } t.equal( findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), path.join('/x/y')) @@ -58,15 +60,17 @@ test('test find-node-directory - node in build release dir', function (t) { for (var next = 0; next < platforms.length; next++) { var processObj if (platforms[next] === 'win32') { - processObj = {execPath: '/x/y/Release/node', platform: platforms[next]} + processObj = { execPath: '/x/y/Release/node', platform: platforms[next] } } else { - processObj = {execPath: '/x/y/out/Release/node', - platform: platforms[next]} + processObj = { + execPath: '/x/y/out/Release/node', + platform: platforms[next] + } } t.equal( findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), - path.join('/x/y')) + path.join('/x/y')) } }) @@ -77,14 +81,14 @@ test('test find-node-directory - node in Debug release dir', function (t) { for (var next = 0; next < platforms.length; next++) { var processObj if (platforms[next] === 'win32') { - processObj = {execPath: '/a/b/Debug/node', platform: platforms[next]} + processObj = { execPath: '/a/b/Debug/node', platform: platforms[next] } } else { - processObj = {execPath: '/a/b/out/Debug/node', platform: platforms[next]} + processObj = { execPath: '/a/b/out/Debug/node', platform: platforms[next] } } t.equal( findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), - path.join('/a/b')) + path.join('/a/b')) } }) @@ -93,7 +97,7 @@ test('test find-node-directory - node in Debug release dir', function (t) { test('test find-node-directory - not found', function (t) { t.plan(platforms.length) for (var next = 0; next < platforms.length; next++) { - var processObj = {execPath: '/x/y/z/y', platform:next} + var processObj = { execPath: '/x/y/z/y', platform: next } t.equal(findNodeDirectory('/a/b/c/d', processObj), '') } }) @@ -107,9 +111,9 @@ test('test find-node-directory - not found', function (t) { test('test find-node-directory - node install', function (t) { t.plan(platforms.length) for (var next = 0; next < platforms.length; next++) { - var processObj = {execPath: '/x/y/bin/node', platform: platforms[next]} + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } t.equal( findNodeDirectory('/x/y/z/a/b/c/deps/npm/node_modules/node-gyp/lib', - processObj), path.join('/x/y/z/a/b/c')) + processObj), path.join('/x/y/z/a/b/c')) } }) diff --git a/node_modules/node-gyp/test/test-find-python.js b/node_modules/node-gyp/test/test-find-python.js index 570eb180de7c7..1c86f45b73b19 100644 --- a/node_modules/node-gyp/test/test-find-python.js +++ b/node_modules/node-gyp/test/test-find-python.js @@ -1,339 +1,232 @@ 'use strict' -var test = require('tape') -var path = require('path') -var configure = require('../lib/configure') -var execFile = require('child_process').execFile -var PythonFinder = configure.test.PythonFinder +delete process.env.PYTHON + +const test = require('tap').test +const findPython = require('../lib/find-python') +const execFile = require('child_process').execFile +const PythonFinder = findPython.test.PythonFinder + +require('npmlog').level = 'warn' test('find python', function (t) { t.plan(4) - configure.test.findPython('python', function (err, found) { + findPython.test.findPython(null, function (err, found) { t.strictEqual(err, null) var proc = execFile(found, ['-V'], function (err, stdout, stderr) { t.strictEqual(err, null) - t.strictEqual(stdout, '') - t.ok(/Python 2/.test(stderr)) + if (/Python 2/.test(stderr)) { + t.strictEqual(stdout, '') + t.ok(/Python 2/.test(stderr)) + } else { + t.ok(/Python 3/.test(stdout)) + t.strictEqual(stderr, '') + } }) proc.stdout.setEncoding('utf-8') proc.stderr.setEncoding('utf-8') }) }) -function poison(object, property) { - function fail() { - throw new Error('Property ' + property + ' should not have been accessed.') +function poison (object, property) { + function fail () { + console.error(Error(`Property ${property} should not have been accessed.`)) + process.abort() } var descriptor = { - configurable: true, + configurable: false, enumerable: false, - writable: true, - getter: fail, - setter: fail, + get: fail, + set: fail } Object.defineProperty(object, property, descriptor) } -// Work around a v0.10.x CI issue where path.resolve() on UNIX systems prefixes -// Windows paths with the current working directory. v0.12 and up are free of -// this issue because they use path.win32.resolve() which does the right thing. -var resolve = path.win32 && path.win32.resolve || function() { - function rstrip(s) { return s.replace(/\\+$/, '') } - return [].slice.call(arguments).map(rstrip).join('\\') +function TestPythonFinder () { + PythonFinder.apply(this, arguments) } - -function TestPythonFinder() { PythonFinder.apply(this, arguments) } TestPythonFinder.prototype = Object.create(PythonFinder.prototype) -poison(TestPythonFinder.prototype, 'env') -poison(TestPythonFinder.prototype, 'execFile') -poison(TestPythonFinder.prototype, 'resolve') -poison(TestPythonFinder.prototype, 'stat') -poison(TestPythonFinder.prototype, 'which') -poison(TestPythonFinder.prototype, 'win') +// Silence npmlog - remove for debugging +TestPythonFinder.prototype.log = { + silly: () => {}, + verbose: () => {}, + info: () => {}, + warn: () => {}, + error: () => {} +} +delete TestPythonFinder.prototype.env.NODE_GYP_FORCE_PYTHON test('find python - python', function (t) { - t.plan(5) + t.plan(6) var f = new TestPythonFinder('python', done) - f.which = function(program, cb) { - t.strictEqual(program, 'python') - cb(null, program) - } - f.execFile = function(program, args, opts, cb) { - t.strictEqual(program, 'python') - t.ok(/import sys/.test(args[1])) - cb(null, '2.7.0') + f.execFile = function (program, args, opts, cb) { + f.execFile = function (program, args, opts, cb) { + poison(f, 'execFile') + t.strictEqual(program, '/path/python') + t.ok(/sys\.version_info/.test(args[1])) + cb(null, '2.7.15') + } + t.strictEqual(program, + process.platform === 'win32' ? '"python"' : 'python') + t.ok(/sys\.executable/.test(args[1])) + cb(null, '/path/python') } - f.checkPython() + f.findPython() - function done(err, python) { + function done (err, python) { t.strictEqual(err, null) - t.strictEqual(python, 'python') + t.strictEqual(python, '/path/python') } }) test('find python - python too old', function (t) { - t.plan(4) - - var f = new TestPythonFinder('python', done) - f.which = function(program, cb) { - t.strictEqual(program, 'python') - cb(null, program) - } - f.execFile = function(program, args, opts, cb) { - t.strictEqual(program, 'python') - t.ok(/import sys/.test(args[1])) - cb(null, '2.3.4') - } - f.checkPython() - - function done(err, python) { - t.ok(/is not supported by gyp/.test(err)) - } -}) - -test('find python - python too new', function (t) { - t.plan(4) + t.plan(2) - var f = new TestPythonFinder('python', done) - f.which = function(program, cb) { - t.strictEqual(program, 'python') - cb(null, program) - } - f.execFile = function(program, args, opts, cb) { - t.strictEqual(program, 'python') - t.ok(/import sys/.test(args[1])) - cb(null, '3.0.0') + var f = new TestPythonFinder(null, done) + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(null, '/path/python') + } else if (/sys\.version_info/.test(args[args.length - 1])) { + cb(null, '2.3.4') + } else { + t.fail() + } } - f.checkPython() + f.findPython() - function done(err, python) { - t.ok(/is not supported by gyp/.test(err)) + function done (err) { + t.ok(/Could not find any Python/.test(err)) + t.ok(/not supported/i.test(f.errorLog)) } }) test('find python - no python', function (t) { t.plan(2) - var f = new TestPythonFinder('python', done) - f.which = function(program, cb) { - t.strictEqual(program, 'python') - cb(new Error('not found')) - } - f.checkPython() - - function done(err, python) { - t.ok(/Can't find Python executable/.test(err)) - } -}) - -test('find python - no python2', function (t) { - t.plan(6) - - var f = new TestPythonFinder('python2', done) - f.which = function(program, cb) { - f.which = function(program, cb) { - t.strictEqual(program, 'python') - cb(null, program) + var f = new TestPythonFinder(null, done) + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (/sys\.version_info/.test(args[args.length - 1])) { + cb(new Error('not a Python executable')) + } else { + t.fail() } - t.strictEqual(program, 'python2') - cb(new Error('not found')) } - f.execFile = function(program, args, opts, cb) { - t.strictEqual(program, 'python') - t.ok(/import sys/.test(args[1])) - cb(null, '2.7.0') - } - f.checkPython() + f.findPython() - function done(err, python) { - t.strictEqual(err, null) - t.strictEqual(python, 'python') + function done (err) { + t.ok(/Could not find any Python/.test(err)) + t.ok(/not in PATH/.test(f.errorLog)) } }) test('find python - no python2, no python, unix', function (t) { - t.plan(3) + t.plan(2) - var f = new TestPythonFinder('python2', done) - poison(f, 'checkPythonLauncher') + var f = new TestPythonFinder(null, done) + f.checkPyLauncher = t.fail f.win = false - f.which = function(program, cb) { - f.which = function(program, cb) { - t.strictEqual(program, 'python') + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { cb(new Error('not found')) + } else { + t.fail() } - t.strictEqual(program, 'python2') - cb(new Error('not found')) } - f.checkPython() + f.findPython() - function done(err, python) { - t.ok(/Can't find Python executable/.test(err)) + function done (err) { + t.ok(/Could not find any Python/.test(err)) + t.ok(/not in PATH/.test(f.errorLog)) } }) test('find python - no python, use python launcher', function (t) { - t.plan(8) - - var f = new TestPythonFinder('python', done) - f.env = {} - f.win = true - - f.which = function(program, cb) { - t.strictEqual(program, 'python') - cb(new Error('not found')) - } - f.execFile = function(program, args, opts, cb) { - f.execFile = function(program, args, opts, cb) { - t.strictEqual(program, 'Z:\\snake.exe') - t.ok(/import sys/.test(args[1])) - cb(null, '2.7.0') - } - t.strictEqual(program, 'py.exe') - t.notEqual(args.indexOf('-2'), -1) - t.notEqual(args.indexOf('-c'), -1) - cb(null, 'Z:\\snake.exe') - } - f.checkPython() - - function done(err, python) { - t.strictEqual(err, null) - t.strictEqual(python, 'Z:\\snake.exe') - } -}) - -test('find python - python 3, use python launcher', function (t) { - t.plan(10) + t.plan(4) - var f = new TestPythonFinder('python', done) - f.env = {} + var f = new TestPythonFinder(null, done) f.win = true - f.which = function(program, cb) { - t.strictEqual(program, 'python') - cb(null, program) - } - f.execFile = function(program, args, opts, cb) { - f.execFile = function(program, args, opts, cb) { - f.execFile = function(program, args, opts, cb) { - t.strictEqual(program, 'Z:\\snake.exe') - t.ok(/import sys/.test(args[1])) - cb(null, '2.7.0') - } - t.strictEqual(program, 'py.exe') + f.execFile = function (program, args, opts, cb) { + if (program === 'py.exe') { t.notEqual(args.indexOf('-2'), -1) t.notEqual(args.indexOf('-c'), -1) - cb(null, 'Z:\\snake.exe') + return cb(null, 'Z:\\snake.exe') + } + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (/sys\.version_info/.test(args[args.length - 1])) { + if (program === 'Z:\\snake.exe') { + cb(null, '2.7.14') + } else { + t.fail() + } + } else { + t.fail() } - t.strictEqual(program, 'python') - t.ok(/import sys/.test(args[1])) - cb(null, '3.0.0') } - f.checkPython() + f.findPython() - function done(err, python) { + function done (err, python) { t.strictEqual(err, null) t.strictEqual(python, 'Z:\\snake.exe') } }) -test('find python - python 3, use python launcher, python 2 too old', - function (t) { - t.plan(9) +test('find python - no python, no python launcher, good guess', function (t) { + t.plan(4) - var f = new TestPythonFinder('python', done) - f.checkedPythonLauncher = false - f.env = {} + var re = /C:[\\/]Python27[\\/]python[.]exe/ + var f = new TestPythonFinder(null, done) f.win = true - f.which = function(program, cb) { - t.strictEqual(program, 'python') - cb(null, program) - } - f.execFile = function(program, args, opts, cb) { - f.execFile = function(program, args, opts, cb) { - f.execFile = function(program, args, opts, cb) { - t.strictEqual(program, 'Z:\\snake.exe') - t.ok(/import sys/.test(args[1])) - cb(null, '2.3.4') + f.execFile = function (program, args, opts, cb) { + if (program === 'py.exe') { + f.execFile = function (program, args, opts, cb) { + poison(f, 'execFile') + t.ok(re.test(program)) + t.ok(/sys\.version_info/.test(args[args.length - 1])) + cb(null, '2.7.14') } - t.strictEqual(program, 'py.exe') - t.notEqual(args.indexOf('-2'), -1) - t.notEqual(args.indexOf('-c'), -1) - cb(null, 'Z:\\snake.exe') + return cb(new Error('not found')) } - t.strictEqual(program, 'python') - t.ok(/import sys/.test(args[1])) - cb(null, '3.0.0') - } - f.checkPython() - - function done(err, python) { - t.ok(/is not supported by gyp/.test(err)) - } -}) - -test('find python - no python, no python launcher, good guess', function (t) { - t.plan(6) - - var re = /C:[\\\/]Python27[\\\/]python[.]exe/ - var f = new TestPythonFinder('python', done) - f.env = {} - f.win = true - - f.which = function(program, cb) { - t.strictEqual(program, 'python') - cb(new Error('not found')) - } - f.execFile = function(program, args, opts, cb) { - f.execFile = function(program, args, opts, cb) { - t.ok(re.test(program)) - t.ok(/import sys/.test(args[1])) - cb(null, '2.7.0') + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else { + t.fail() } - t.strictEqual(program, 'py.exe') - cb(new Error('not found')) - } - f.resolve = resolve - f.stat = function(path, cb) { - t.ok(re.test(path)) - cb(null, {}) } - f.checkPython() + f.findPython() - function done(err, python) { + function done (err, python) { + t.strictEqual(err, null) t.ok(re.test(python)) } }) test('find python - no python, no python launcher, bad guess', function (t) { - t.plan(4) + t.plan(2) - var f = new TestPythonFinder('python', done) - f.env = { SystemDrive: 'Z:\\' } + var f = new TestPythonFinder(null, done) f.win = true - f.which = function(program, cb) { - t.strictEqual(program, 'python') - cb(new Error('not found')) - } - f.execFile = function(program, args, opts, cb) { - t.strictEqual(program, 'py.exe') - cb(new Error('not found')) - } - f.resolve = resolve - f.stat = function(path, cb) { - t.ok(/Z:[\\\/]Python27[\\\/]python.exe/.test(path)) - var err = new Error('not found') - err.code = 'ENOENT' - cb(err) + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (/sys\.version_info/.test(args[args.length - 1])) { + cb(new Error('not a Python executable')) + } else { + t.fail() + } } - f.checkPython() + f.findPython() - function done(err, python) { - t.ok(/Can't find Python executable/.test(err)) + function done (err) { + t.ok(/Could not find any Python/.test(err)) + t.ok(/not in PATH/.test(f.errorLog)) } }) diff --git a/node_modules/node-gyp/test/test-find-visualstudio.js b/node_modules/node-gyp/test/test-find-visualstudio.js new file mode 100644 index 0000000000000..1327cf8841111 --- /dev/null +++ b/node_modules/node-gyp/test/test-find-visualstudio.js @@ -0,0 +1,676 @@ +'use strict' + +const test = require('tap').test +const fs = require('fs') +const path = require('path') +const findVisualStudio = require('../lib/find-visualstudio') +const VisualStudioFinder = findVisualStudio.test.VisualStudioFinder + +const semverV1 = { major: 1, minor: 0, patch: 0 } + +delete process.env.VCINSTALLDIR + +function poison (object, property) { + function fail () { + console.error(Error(`Property ${property} should not have been accessed.`)) + process.abort() + } + var descriptor = { + configurable: false, + enumerable: false, + get: fail, + set: fail + } + Object.defineProperty(object, property, descriptor) +} + +function TestVisualStudioFinder () { VisualStudioFinder.apply(this, arguments) } +TestVisualStudioFinder.prototype = Object.create(VisualStudioFinder.prototype) +// Silence npmlog - remove for debugging +TestVisualStudioFinder.prototype.log = { + silly: () => {}, + verbose: () => {}, + info: () => {}, + warn: () => {}, + error: () => {} +} + +test('VS2013', function (t) { + t.plan(4) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\MSBuild12\\MSBuild.exe', + path: 'C:\\VS2013', + sdk: null, + toolset: 'v120', + version: '12.0', + versionMajor: 12, + versionMinor: 0, + versionYear: 2013 + }) + }) + + finder.findVisualStudio2017OrNewer = (cb) => { + finder.parseData(new Error(), '', '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + continue + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': + t.pass(`expected search for registry value ${fullName}`) + return cb(null, 'C:\\VS2013\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\12.0\\MSBuildToolsPath': + t.pass(`expected search for registry value ${fullName}`) + return cb(null, 'C:\\MSBuild12\\') + default: + t.fail(`unexpected search for registry value ${fullName}`) + } + } + return cb(new Error()) + } + finder.findVisualStudio() +}) + +test('VS2013 should not be found on new node versions', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder({ + major: 10, + minor: 0, + patch: 0 + }, null, (err, info) => { + t.ok(/find .* Visual Studio/i.test(err), 'expect error') + t.false(info, 'no data') + }) + + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', 'VS_2017_Unusable.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + continue + default: + t.fail(`unexpected search for registry value ${fullName}`) + } + } + return cb(new Error()) + } + finder.findVisualStudio() +}) + +test('VS2015', function (t) { + t.plan(4) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\MSBuild14\\MSBuild.exe', + path: 'C:\\VS2015', + sdk: null, + toolset: 'v140', + version: '14.0', + versionMajor: 14, + versionMinor: 0, + versionYear: 2015 + }) + }) + + finder.findVisualStudio2017OrNewer = (cb) => { + finder.parseData(new Error(), '', '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + t.pass(`expected search for registry value ${fullName}`) + return cb(null, 'C:\\VS2015\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\14.0\\MSBuildToolsPath': + t.pass(`expected search for registry value ${fullName}`) + return cb(null, 'C:\\MSBuild14\\') + default: + t.fail(`unexpected search for registry value ${fullName}`) + } + } + return cb(new Error()) + } + finder.findVisualStudio() +}) + +test('error from PowerShell', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(new Error(), '', '', (info) => { + t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('empty output from PowerShell', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(null, '', '', (info) => { + t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('output from PowerShell not JSON', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(null, 'AAAABBBB', '', (info) => { + t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('wrong JSON from PowerShell', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(null, '{}', '', (info) => { + t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('empty JSON from PowerShell', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(null, '[]', '', (info) => { + t.ok(/find .* Visual Studio/i.test(finder.errorLog[0]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('future version', function (t) { + t.plan(3) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(null, JSON.stringify([{ + packages: [ + 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64', + 'Microsoft.VisualStudio.Component.Windows10SDK.17763', + 'Microsoft.VisualStudio.VC.MSBuild.Base' + ], + path: 'C:\\VS', + version: '9999.9999.9999.9999' + }]), '', (info) => { + t.ok(/unknown version/i.test(finder.errorLog[0]), 'expect error') + t.ok(/find .* Visual Studio/i.test(finder.errorLog[1]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('single unusable VS2017', function (t) { + t.plan(3) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + const file = path.join(__dirname, 'fixtures', 'VS_2017_Unusable.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', (info) => { + t.ok(/checking/i.test(finder.errorLog[0]), 'expect error') + t.ok(/find .* Visual Studio/i.test(finder.errorLog[2]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('minimal VS2017 Build Tools', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + + 'BuildTools\\MSBuild\\15.0\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildTools', + sdk: '10.0.17134.0', + toolset: 'v141', + version: '15.9.28307.665', + versionMajor: 15, + versionMinor: 9, + versionYear: 2017 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2017_BuildTools_minimal.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +test('VS2017 Community with C++ workload', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + + 'Community\\MSBuild\\15.0\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community', + sdk: '10.0.17763.0', + toolset: 'v141', + version: '15.9.28307.665', + versionMajor: 15, + versionMinor: 9, + versionYear: 2017 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2017_Community_workload.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +test('VS2017 Express', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + + 'WDExpress\\MSBuild\\15.0\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\WDExpress', + sdk: '10.0.17763.0', + toolset: 'v141', + version: '15.9.28307.858', + versionMajor: 15, + versionMinor: 9, + versionYear: 2017 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', 'VS_2017_Express.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +test('VS2019 Preview with C++ workload', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + + 'Preview\\MSBuild\\Current\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Preview', + sdk: '10.0.17763.0', + toolset: 'v142', + version: '16.0.28608.199', + versionMajor: 16, + versionMinor: 0, + versionYear: 2019 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2019_Preview.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +test('minimal VS2019 Build Tools', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + + 'BuildTools\\MSBuild\\Current\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools', + sdk: '10.0.17134.0', + toolset: 'v142', + version: '16.1.28922.388', + versionMajor: 16, + versionMinor: 1, + versionYear: 2019 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2019_BuildTools_minimal.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +test('VS2019 Community with C++ workload', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + + 'Community\\MSBuild\\Current\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community', + sdk: '10.0.17763.0', + toolset: 'v142', + version: '16.1.28922.388', + versionMajor: 16, + versionMinor: 1, + versionYear: 2019 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2019_Community_workload.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +function allVsVersions (t, finder) { + finder.findVisualStudio2017OrNewer = (cb) => { + const data0 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_Unusable.txt'))) + const data1 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_BuildTools_minimal.txt'))) + const data2 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_Community_workload.txt'))) + const data3 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_Express.txt'))) + const data4 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2019_Preview.txt'))) + const data5 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2019_BuildTools_minimal.txt'))) + const data6 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2019_Community_workload.txt'))) + const data = JSON.stringify(data0.concat(data1, data2, data3, data4, + data5, data6)) + finder.parseData(null, data, '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': + continue + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': + return cb(null, 'C:\\VS2013\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\12.0\\MSBuildToolsPath': + return cb(null, 'C:\\MSBuild12\\') + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + return cb(null, 'C:\\VS2015\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\14.0\\MSBuildToolsPath': + return cb(null, 'C:\\MSBuild14\\') + default: + t.fail(`unexpected search for registry value ${fullName}`) + } + } + return cb(new Error()) + } +} + +test('fail when looking for invalid path', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, 'AABB', (err, info) => { + t.ok(/find .* Visual Studio/i.test(err), 'expect error') + t.false(info, 'no data') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2013 by version number', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, '2013', (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.versionYear, 2013) + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2013 by installation path', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2013', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, 'C:\\VS2013') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2015 by version number', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, '2015', (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.versionYear, 2015) + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2015 by installation path', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, 'C:\\VS2015') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2017 by version number', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, '2017', (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.versionYear, 2017) + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2017 by installation path', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2019 by version number', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, '2019', (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.versionYear, 2019) + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2019 by installation path', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('msvs_version match should be case insensitive', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, + 'c:\\program files (x86)\\microsoft visual studio\\2019\\BUILDTOOLS', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('latest version should be found by default', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.versionYear, 2019) + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('run on a usable VS Command Prompt', function (t) { + t.plan(2) + + process.env.VCINSTALLDIR = 'C:\\VS2015\\VC' + // VSINSTALLDIR is not defined on Visual C++ Build Tools 2015 + delete process.env.VSINSTALLDIR + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, 'C:\\VS2015') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('VCINSTALLDIR match should be case insensitive', function (t) { + t.plan(2) + + process.env.VCINSTALLDIR = + 'c:\\program files (x86)\\microsoft visual studio\\2019\\BUILDTOOLS\\VC' + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('run on a unusable VS Command Prompt', function (t) { + t.plan(2) + + process.env.VCINSTALLDIR = + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildToolsUnusable\\VC' + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.ok(/find .* Visual Studio/i.test(err), 'expect error') + t.false(info, 'no data') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('run on a VS Command Prompt with matching msvs_version', function (t) { + t.plan(2) + + process.env.VCINSTALLDIR = 'C:\\VS2015\\VC' + + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, 'C:\\VS2015') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('run on a VS Command Prompt with mismatched msvs_version', function (t) { + t.plan(2) + + process.env.VCINSTALLDIR = + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools\\VC' + + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', + (err, info) => { + t.ok(/find .* Visual Studio/i.test(err), 'expect error') + t.false(info, 'no data') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) diff --git a/node_modules/node-gyp/test/test-install.js b/node_modules/node-gyp/test/test-install.js index f647326a7fc66..c3317155e0f3d 100644 --- a/node_modules/node-gyp/test/test-install.js +++ b/node_modules/node-gyp/test/test-install.js @@ -1,7 +1,9 @@ 'use strict' -var test = require('tape') -var install = require('../lib/install').test.install +const test = require('tap').test +const install = require('../lib/install').test.install + +require('npmlog').level = 'error' // we expect a warning test('EACCES retry once', function (t) { t.plan(3) @@ -11,10 +13,9 @@ test('EACCES retry once', function (t) { var err = new Error() err.code = 'EACCES' cb(err) - t.ok(true); + t.ok(true) } - var gyp = {} gyp.devDir = __dirname gyp.opts = {} diff --git a/node_modules/node-gyp/test/test-options.js b/node_modules/node-gyp/test/test-options.js index d097f81be62c8..252baa2035e0c 100644 --- a/node_modules/node-gyp/test/test-options.js +++ b/node_modules/node-gyp/test/test-options.js @@ -1,15 +1,15 @@ -'use strict'; +'use strict' -var test = require('tape') -var gyp = require('../lib/node-gyp') +const test = require('tap').test +const gyp = require('../lib/node-gyp') test('options in environment', function (t) { t.plan(1) // `npm test` dumps a ton of npm_config_* variables in the environment. Object.keys(process.env) - .filter(function(key) { return /^npm_config_/.test(key) }) - .forEach(function(key) { delete process.env[key] }) + .filter(function (key) { return /^npm_config_/.test(key) }) + .forEach(function (key) { delete process.env[key] }) // Zero-length keys should get filtered out. process.env.npm_config_ = '42' @@ -18,8 +18,8 @@ test('options in environment', function (t) { // Except loglevel. process.env.npm_config_loglevel = 'debug' - var g = gyp(); - g.parseArgv(['rebuild']) // Also sets opts.argv. + var g = gyp() + g.parseArgv(['rebuild']) // Also sets opts.argv. t.deepEqual(Object.keys(g.opts).sort(), ['argv', 'x']) }) diff --git a/node_modules/node-gyp/test/test-process-release.js b/node_modules/node-gyp/test/test-process-release.js index 48411ae0a7d5a..c3ee0703c532f 100644 --- a/node_modules/node-gyp/test/test-process-release.js +++ b/node_modules/node-gyp/test/test-process-release.js @@ -1,5 +1,7 @@ -var test = require('tape') -var processRelease = require('../lib/process-release') +'use strict' + +const test = require('tap').test +const processRelease = require('../lib/process-release') test('test process release - process.version = 0.8.20', function (t) { t.plan(2) @@ -16,10 +18,9 @@ test('test process release - process.version = 0.8.20', function (t) { tarballUrl: 'https://nodejs.org/dist/v0.8.20/node-v0.8.20.tar.gz', shasumsUrl: 'https://nodejs.org/dist/v0.8.20/SHASUMS256.txt', versionDir: '0.8.20', - libUrl32: 'https://nodejs.org/dist/v0.8.20/node.lib', - libUrl64: 'https://nodejs.org/dist/v0.8.20/x64/node.lib', - libPath32: 'node.lib', - libPath64: 'x64/node.lib' + ia32: { libUrl: 'https://nodejs.org/dist/v0.8.20/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.8.20/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.8.20/arm64/node.lib', libPath: 'arm64/node.lib' } }) }) @@ -38,10 +39,9 @@ test('test process release - process.version = 0.10.21', function (t) { tarballUrl: 'https://nodejs.org/dist/v0.10.21/node-v0.10.21.tar.gz', shasumsUrl: 'https://nodejs.org/dist/v0.10.21/SHASUMS256.txt', versionDir: '0.10.21', - libUrl32: 'https://nodejs.org/dist/v0.10.21/node.lib', - libUrl64: 'https://nodejs.org/dist/v0.10.21/x64/node.lib', - libPath32: 'node.lib', - libPath64: 'x64/node.lib' + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.21/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.21/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.21/arm64/node.lib', libPath: 'arm64/node.lib' } }) }) @@ -61,10 +61,9 @@ test('test process release - process.version = 0.12.9', function (t) { tarballUrl: 'https://nodejs.org/dist/v0.12.9/node-v0.12.9.tar.gz', shasumsUrl: 'https://nodejs.org/dist/v0.12.9/SHASUMS256.txt', versionDir: '0.12.9', - libUrl32: 'https://nodejs.org/dist/v0.12.9/node.lib', - libUrl64: 'https://nodejs.org/dist/v0.12.9/x64/node.lib', - libPath32: 'node.lib', - libPath64: 'x64/node.lib' + ia32: { libUrl: 'https://nodejs.org/dist/v0.12.9/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.12.9/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.12.9/arm64/node.lib', libPath: 'arm64/node.lib' } }) }) @@ -84,10 +83,9 @@ test('test process release - process.version = 0.10.41', function (t) { tarballUrl: 'https://nodejs.org/dist/v0.10.41/node-v0.10.41.tar.gz', shasumsUrl: 'https://nodejs.org/dist/v0.10.41/SHASUMS256.txt', versionDir: '0.10.41', - libUrl32: 'https://nodejs.org/dist/v0.10.41/node.lib', - libUrl64: 'https://nodejs.org/dist/v0.10.41/x64/node.lib', - libPath32: 'node.lib', - libPath64: 'x64/node.lib' + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.41/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.41/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.41/arm64/node.lib', libPath: 'arm64/node.lib' } }) }) @@ -107,10 +105,9 @@ test('test process release - process.release ~ node@0.10.42', function (t) { tarballUrl: 'https://nodejs.org/dist/v0.10.42/node-v0.10.42-headers.tar.gz', shasumsUrl: 'https://nodejs.org/dist/v0.10.42/SHASUMS256.txt', versionDir: '0.10.42', - libUrl32: 'https://nodejs.org/dist/v0.10.42/node.lib', - libUrl64: 'https://nodejs.org/dist/v0.10.42/x64/node.lib', - libPath32: 'node.lib', - libPath64: 'x64/node.lib' + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.42/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.42/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.42/arm64/node.lib', libPath: 'arm64/node.lib' } }) }) @@ -130,10 +127,9 @@ test('test process release - process.release ~ node@0.12.10', function (t) { tarballUrl: 'https://nodejs.org/dist/v0.12.10/node-v0.12.10-headers.tar.gz', shasumsUrl: 'https://nodejs.org/dist/v0.12.10/SHASUMS256.txt', versionDir: '0.12.10', - libUrl32: 'https://nodejs.org/dist/v0.12.10/node.lib', - libUrl64: 'https://nodejs.org/dist/v0.12.10/x64/node.lib', - libPath32: 'node.lib', - libPath64: 'x64/node.lib' + ia32: { libUrl: 'https://nodejs.org/dist/v0.12.10/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.12.10/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.12.10/arm64/node.lib', libPath: 'arm64/node.lib' } }) }) @@ -155,10 +151,9 @@ test('test process release - process.release ~ node@4.1.23', function (t) { tarballUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz', shasumsUrl: 'https://nodejs.org/dist/v4.1.23/SHASUMS256.txt', versionDir: '4.1.23', - libUrl32: 'https://nodejs.org/dist/v4.1.23/win-x86/node.lib', - libUrl64: 'https://nodejs.org/dist/v4.1.23/win-x64/node.lib', - libPath32: 'win-x86/node.lib', - libPath64: 'win-x64/node.lib' + ia32: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } }) }) @@ -180,135 +175,61 @@ test('test process release - process.release ~ node@4.1.23 / corp build', functi tarballUrl: 'https://some.custom.location/node-v4.1.23-headers.tar.gz', shasumsUrl: 'https://some.custom.location/SHASUMS256.txt', versionDir: '4.1.23', - libUrl32: 'https://some.custom.location/win-x86/node.lib', - libUrl64: 'https://some.custom.location/win-x64/node.lib', - libPath32: 'win-x86/node.lib', - libPath64: 'win-x64/node.lib' - }) -}) - -test('test process release - process.version = 1.8.4', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v1.8.4', null) - - t.equal(release.semver.version, '1.8.4') - delete release.semver - - t.deepEqual(release, { - version: '1.8.4', - name: 'iojs', - baseUrl: 'https://iojs.org/download/release/v1.8.4/', - tarballUrl: 'https://iojs.org/download/release/v1.8.4/iojs-v1.8.4.tar.gz', - shasumsUrl: 'https://iojs.org/download/release/v1.8.4/SHASUMS256.txt', - versionDir: 'iojs-1.8.4', - libUrl32: 'https://iojs.org/download/release/v1.8.4/win-x86/iojs.lib', - libUrl64: 'https://iojs.org/download/release/v1.8.4/win-x64/iojs.lib', - libPath32: 'win-x86/iojs.lib', - libPath64: 'win-x64/iojs.lib' - }) -}) - -test('test process release - process.release ~ iojs@3.2.24', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v3.2.24', { - name: 'io.js', - headersUrl: 'https://iojs.org/download/release/v3.2.24/iojs-v3.2.24-headers.tar.gz' - }) - - t.equal(release.semver.version, '3.2.24') - delete release.semver - - t.deepEqual(release, { - version: '3.2.24', - name: 'iojs', - baseUrl: 'https://iojs.org/download/release/v3.2.24/', - tarballUrl: 'https://iojs.org/download/release/v3.2.24/iojs-v3.2.24-headers.tar.gz', - shasumsUrl: 'https://iojs.org/download/release/v3.2.24/SHASUMS256.txt', - versionDir: 'iojs-3.2.24', - libUrl32: 'https://iojs.org/download/release/v3.2.24/win-x86/iojs.lib', - libUrl64: 'https://iojs.org/download/release/v3.2.24/win-x64/iojs.lib', - libPath32: 'win-x86/iojs.lib', - libPath64: 'win-x64/iojs.lib' - }) -}) - -test('test process release - process.release ~ iojs@3.2.11 +libUrl32', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v3.2.11', { - name: 'io.js', - headersUrl: 'https://iojs.org/download/release/v3.2.11/iojs-v3.2.11-headers.tar.gz', - libUrl: 'https://iojs.org/download/release/v3.2.11/win-x86/iojs.lib' // custom - }) - - t.equal(release.semver.version, '3.2.11') - delete release.semver - - t.deepEqual(release, { - version: '3.2.11', - name: 'iojs', - baseUrl: 'https://iojs.org/download/release/v3.2.11/', - tarballUrl: 'https://iojs.org/download/release/v3.2.11/iojs-v3.2.11-headers.tar.gz', - shasumsUrl: 'https://iojs.org/download/release/v3.2.11/SHASUMS256.txt', - versionDir: 'iojs-3.2.11', - libUrl32: 'https://iojs.org/download/release/v3.2.11/win-x86/iojs.lib', - libUrl64: 'https://iojs.org/download/release/v3.2.11/win-x64/iojs.lib', - libPath32: 'win-x86/iojs.lib', - libPath64: 'win-x64/iojs.lib' + ia32: { libUrl: 'https://some.custom.location/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://some.custom.location/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://some.custom.location/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } }) }) -test('test process release - process.release ~ iojs@3.2.101 +libUrl64', function (t) { +test('test process release - process.release ~ node@12.8.0 Windows', function (t) { t.plan(2) - var release = processRelease([], { opts: {} }, 'v3.2.101', { - name: 'io.js', - headersUrl: 'https://iojs.org/download/release/v3.2.101/iojs-v3.2.101-headers.tar.gz', - libUrl: 'https://iojs.org/download/release/v3.2.101/win-x64/iojs.lib' // custom + var release = processRelease([], { opts: {} }, 'v12.8.0', { + name: 'node', + sourceUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0.tar.gz', + headersUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x64/node.lib' }) - t.equal(release.semver.version, '3.2.101') + t.equal(release.semver.version, '12.8.0') delete release.semver t.deepEqual(release, { - version: '3.2.101', - name: 'iojs', - baseUrl: 'https://iojs.org/download/release/v3.2.101/', - tarballUrl: 'https://iojs.org/download/release/v3.2.101/iojs-v3.2.101-headers.tar.gz', - shasumsUrl: 'https://iojs.org/download/release/v3.2.101/SHASUMS256.txt', - versionDir: 'iojs-3.2.101', - libUrl32: 'https://iojs.org/download/release/v3.2.101/win-x86/iojs.lib', - libUrl64: 'https://iojs.org/download/release/v3.2.101/win-x64/iojs.lib', - libPath32: 'win-x86/iojs.lib', - libPath64: 'win-x64/iojs.lib' + version: '12.8.0', + name: 'node', + baseUrl: 'https://nodejs.org/download/release/v12.8.0/', + tarballUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/download/release/v12.8.0/SHASUMS256.txt', + versionDir: '12.8.0', + ia32: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } }) }) -test('test process release - process.release ~ iojs@3.3.0 - borked win-ia32', function (t) { +test('test process release - process.release ~ node@12.8.0 Windows ARM64', function (t) { t.plan(2) - var release = processRelease([], { opts: {} }, 'v3.2.101', { - name: 'io.js', - headersUrl: 'https://iojs.org/download/release/v3.2.101/iojs-v3.2.101-headers.tar.gz', - libUrl: 'https://iojs.org/download/release/v3.2.101/win-ia32/iojs.lib' // custom + var release = processRelease([], { opts: {} }, 'v12.8.0', { + name: 'node', + sourceUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0.tar.gz', + headersUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-arm64/node.lib' }) - t.equal(release.semver.version, '3.2.101') + t.equal(release.semver.version, '12.8.0') delete release.semver t.deepEqual(release, { - version: '3.2.101', - name: 'iojs', - baseUrl: 'https://iojs.org/download/release/v3.2.101/', - tarballUrl: 'https://iojs.org/download/release/v3.2.101/iojs-v3.2.101-headers.tar.gz', - shasumsUrl: 'https://iojs.org/download/release/v3.2.101/SHASUMS256.txt', - versionDir: 'iojs-3.2.101', - libUrl32: 'https://iojs.org/download/release/v3.2.101/win-x86/iojs.lib', - libUrl64: 'https://iojs.org/download/release/v3.2.101/win-x64/iojs.lib', - libPath32: 'win-x86/iojs.lib', - libPath64: 'win-x64/iojs.lib' + version: '12.8.0', + name: 'node', + baseUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/', + tarballUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + shasumsUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/SHASUMS256.txt', + versionDir: '12.8.0', + ia32: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } }) }) @@ -330,35 +251,9 @@ test('test process release - process.release ~ node@4.1.23 --target=0.10.40', fu tarballUrl: 'https://nodejs.org/dist/v0.10.40/node-v0.10.40.tar.gz', shasumsUrl: 'https://nodejs.org/dist/v0.10.40/SHASUMS256.txt', versionDir: '0.10.40', - libUrl32: 'https://nodejs.org/dist/v0.10.40/node.lib', - libUrl64: 'https://nodejs.org/dist/v0.10.40/x64/node.lib', - libPath32: 'node.lib', - libPath64: 'x64/node.lib' - }) -}) - -test('test process release - process.release ~ node@4.1.23 --target=1.8.4', function (t) { - t.plan(2) - - var release = processRelease([], { opts: { target: '1.8.4' } }, 'v4.1.23', { - name: 'node', - headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' - }) - - t.equal(release.semver.version, '1.8.4') - delete release.semver - - t.deepEqual(release, { - version: '1.8.4', - name: 'iojs', - baseUrl: 'https://iojs.org/download/release/v1.8.4/', - tarballUrl: 'https://iojs.org/download/release/v1.8.4/iojs-v1.8.4.tar.gz', - shasumsUrl: 'https://iojs.org/download/release/v1.8.4/SHASUMS256.txt', - versionDir: 'iojs-1.8.4', - libUrl32: 'https://iojs.org/download/release/v1.8.4/win-x86/iojs.lib', - libUrl64: 'https://iojs.org/download/release/v1.8.4/win-x64/iojs.lib', - libPath32: 'win-x86/iojs.lib', - libPath64: 'win-x64/iojs.lib' + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.40/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.40/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.40/arm64/node.lib', libPath: 'arm64/node.lib' } }) }) @@ -380,10 +275,9 @@ test('test process release - process.release ~ node@4.1.23 --dist-url=https://fo tarballUrl: 'https://foo.bar/baz/v4.1.23/node-v4.1.23-headers.tar.gz', shasumsUrl: 'https://foo.bar/baz/v4.1.23/SHASUMS256.txt', versionDir: '4.1.23', - libUrl32: 'https://foo.bar/baz/v4.1.23/win-x86/node.lib', - libUrl64: 'https://foo.bar/baz/v4.1.23/win-x64/node.lib', - libPath32: 'win-x86/node.lib', - libPath64: 'win-x64/node.lib' + ia32: { libUrl: 'https://foo.bar/baz/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://foo.bar/baz/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://foo.bar/baz/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } }) }) @@ -405,14 +299,12 @@ test('test process release - process.release ~ frankenstein@4.1.23', function (t tarballUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23-headers.tar.gz', shasumsUrl: 'https://frankensteinjs.org/dist/v4.1.23/SHASUMS256.txt', versionDir: 'frankenstein-4.1.23', - libUrl32: 'https://frankensteinjs.org/dist/v4.1.23/win-x86/frankenstein.lib', - libUrl64: 'https://frankensteinjs.org/dist/v4.1.23/win-x64/frankenstein.lib', - libPath32: 'win-x86/frankenstein.lib', - libPath64: 'win-x64/frankenstein.lib' + ia32: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-x86/frankenstein.lib', libPath: 'win-x86/frankenstein.lib' }, + x64: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-x64/frankenstein.lib', libPath: 'win-x64/frankenstein.lib' }, + arm64: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-arm64/frankenstein.lib', libPath: 'win-arm64/frankenstein.lib' } }) }) - test('test process release - process.release ~ frankenstein@4.1.23 --dist-url=http://foo.bar/baz/', function (t) { t.plan(2) @@ -431,10 +323,9 @@ test('test process release - process.release ~ frankenstein@4.1.23 --dist-url=ht tarballUrl: 'http://foo.bar/baz/v4.1.23/frankenstein-v4.1.23-headers.tar.gz', shasumsUrl: 'http://foo.bar/baz/v4.1.23/SHASUMS256.txt', versionDir: 'frankenstein-4.1.23', - libUrl32: 'http://foo.bar/baz/v4.1.23/win-x86/frankenstein.lib', - libUrl64: 'http://foo.bar/baz/v4.1.23/win-x64/frankenstein.lib', - libPath32: 'win-x86/frankenstein.lib', - libPath64: 'win-x64/frankenstein.lib' + ia32: { libUrl: 'http://foo.bar/baz/v4.1.23/win-x86/frankenstein.lib', libPath: 'win-x86/frankenstein.lib' }, + x64: { libUrl: 'http://foo.bar/baz/v4.1.23/win-x64/frankenstein.lib', libPath: 'win-x64/frankenstein.lib' }, + arm64: { libUrl: 'http://foo.bar/baz/v4.1.23/win-arm64/frankenstein.lib', libPath: 'win-arm64/frankenstein.lib' } }) }) @@ -456,20 +347,18 @@ test('test process release - process.release ~ node@4.0.0-rc.4', function (t) { tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', versionDir: '4.0.0-rc.4', - libUrl32: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', - libUrl64: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', - libPath32: 'win-x86/node.lib', - libPath64: 'win-x64/node.lib' + ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } }) }) - test('test process release - process.release ~ node@4.0.0-rc.4 passed as argv[0]', function (t) { t.plan(2) // note the missing 'v' on the arg, it should normalise when checking // whether we're on the default or not - var release = processRelease([ '4.0.0-rc.4' ], { opts: {} }, 'v4.0.0-rc.4', { + var release = processRelease(['4.0.0-rc.4'], { opts: {} }, 'v4.0.0-rc.4', { name: 'node', headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' }) @@ -484,20 +373,18 @@ test('test process release - process.release ~ node@4.0.0-rc.4 passed as argv[0] tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', versionDir: '4.0.0-rc.4', - libUrl32: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', - libUrl64: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', - libPath32: 'win-x86/node.lib', - libPath64: 'win-x64/node.lib' + ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } }) }) - test('test process release - process.release ~ node@4.0.0-rc.4 - bogus string passed as argv[0]', function (t) { t.plan(2) // additional arguments can be passed in on the commandline that should be ignored if they // are not specifying a valid version @ position 0 - var release = processRelease([ 'this is no version!' ], { opts: {} }, 'v4.0.0-rc.4', { + var release = processRelease(['this is no version!'], { opts: {} }, 'v4.0.0-rc.4', { name: 'node', headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' }) @@ -512,10 +399,9 @@ test('test process release - process.release ~ node@4.0.0-rc.4 - bogus string pa tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', versionDir: '4.0.0-rc.4', - libUrl32: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', - libUrl64: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', - libPath32: 'win-x86/node.lib', - libPath64: 'win-x64/node.lib' + ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } }) }) @@ -539,99 +425,10 @@ test('test process release - NODEJS_ORG_MIRROR', function (t) { tarballUrl: 'http://foo.bar/v4.1.23/node-v4.1.23-headers.tar.gz', shasumsUrl: 'http://foo.bar/v4.1.23/SHASUMS256.txt', versionDir: '4.1.23', - libUrl32: 'http://foo.bar/v4.1.23/win-x86/node.lib', - libUrl64: 'http://foo.bar/v4.1.23/win-x64/node.lib', - libPath32: 'win-x86/node.lib', - libPath64: 'win-x64/node.lib' + ia32: { libUrl: 'http://foo.bar/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'http://foo.bar/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'http://foo.bar/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } }) delete process.env.NODEJS_ORG_MIRROR }) - -test('test process release - NVM_NODEJS_ORG_MIRROR', function (t) { - t.plan(2) - - process.env.NVM_NODEJS_ORG_MIRROR = 'http://foo.bar' - - var release = processRelease([], { opts: {} }, 'v4.1.23', { - name: 'node', - headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'node', - baseUrl: 'http://foo.bar/v4.1.23/', - tarballUrl: 'http://foo.bar/v4.1.23/node-v4.1.23-headers.tar.gz', - shasumsUrl: 'http://foo.bar/v4.1.23/SHASUMS256.txt', - versionDir: '4.1.23', - libUrl32: 'http://foo.bar/v4.1.23/win-x86/node.lib', - libUrl64: 'http://foo.bar/v4.1.23/win-x64/node.lib', - libPath32: 'win-x86/node.lib', - libPath64: 'win-x64/node.lib' - }) - - delete process.env.NVM_NODEJS_ORG_MIRROR -}) - -test('test process release - IOJS_ORG_MIRROR', function (t) { - t.plan(2) - - process.env.IOJS_ORG_MIRROR = 'http://foo.bar' - - var release = processRelease([], { opts: {} }, 'v3.2.24', { - name: 'io.js', - headersUrl: 'https://iojs.org/download/release/v3.2.24/iojs-v3.2.24-headers.tar.gz' - }) - - t.equal(release.semver.version, '3.2.24') - delete release.semver - - t.deepEqual(release, { - version: '3.2.24', - name: 'iojs', - baseUrl: 'http://foo.bar/v3.2.24/', - tarballUrl: 'http://foo.bar/v3.2.24/iojs-v3.2.24-headers.tar.gz', - shasumsUrl: 'http://foo.bar/v3.2.24/SHASUMS256.txt', - versionDir: 'iojs-3.2.24', - libUrl32: 'http://foo.bar/v3.2.24/win-x86/iojs.lib', - libUrl64: 'http://foo.bar/v3.2.24/win-x64/iojs.lib', - libPath32: 'win-x86/iojs.lib', - libPath64: 'win-x64/iojs.lib' - }) - - delete process.env.IOJS_ORG_MIRROR -}) - - -test('test process release - NVM_IOJS_ORG_MIRROR', function (t) { - t.plan(2) - - process.env.NVM_IOJS_ORG_MIRROR = 'http://foo.bar' - - var release = processRelease([], { opts: {} }, 'v3.2.24', { - name: 'io.js', - headersUrl: 'https://iojs.org/download/release/v3.2.24/iojs-v3.2.24-headers.tar.gz' - }) - - t.equal(release.semver.version, '3.2.24') - delete release.semver - - t.deepEqual(release, { - version: '3.2.24', - name: 'iojs', - baseUrl: 'http://foo.bar/v3.2.24/', - tarballUrl: 'http://foo.bar/v3.2.24/iojs-v3.2.24-headers.tar.gz', - shasumsUrl: 'http://foo.bar/v3.2.24/SHASUMS256.txt', - versionDir: 'iojs-3.2.24', - libUrl32: 'http://foo.bar/v3.2.24/win-x86/iojs.lib', - libUrl64: 'http://foo.bar/v3.2.24/win-x64/iojs.lib', - libPath32: 'win-x86/iojs.lib', - libPath64: 'win-x64/iojs.lib' - }) - - delete process.env.NVM_IOJS_ORG_MIRROR -}) diff --git a/node_modules/normalize-package-data/README.md b/node_modules/normalize-package-data/README.md index 3772777973905..d2bd7bc7ff606 100644 --- a/node_modules/normalize-package-data/README.md +++ b/node_modules/normalize-package-data/README.md @@ -94,7 +94,7 @@ If `version` field is given, the value of the version field must be a valid *sem ### Rules for license field -The `license` field should be a valid *SPDX license expression* or one of the special values allowed by [validate-npm-package-license](https://npmjs.com/packages/validate-npm-package-license). See [documentation for the license field in package.json](https://docs.npmjs.com/files/package.json#license). +The `license` field should be a valid *SPDX license expression* or one of the special values allowed by [validate-npm-package-license](https://npmjs.com/package/validate-npm-package-license). See [documentation for the license field in package.json](https://docs.npmjs.com/files/package.json#license). ## Credits diff --git a/node_modules/normalize-package-data/lib/fixer.js b/node_modules/normalize-package-data/lib/fixer.js index f070119674d2d..27682e9611afd 100644 --- a/node_modules/normalize-package-data/lib/fixer.js +++ b/node_modules/normalize-package-data/lib/fixer.js @@ -1,7 +1,7 @@ var semver = require("semver") var validateLicense = require('validate-npm-package-license'); var hostedGitInfo = require("hosted-git-info") -var isBuiltinModule = require("is-builtin-module") +var isBuiltinModule = require("resolve").isCore var depTypes = ["dependencies","devDependencies","optionalDependencies"] var extractDescription = require("./extract_description") var url = require("url") @@ -299,7 +299,8 @@ var fixer = module.exports = { } else{ if ( typeof(data.license) !== 'string' || - data.license.length < 1 + data.license.length < 1 || + data.license.trim() === '' ) { this.warn("invalidLicense") } else { diff --git a/node_modules/normalize-package-data/node_modules/resolve/.editorconfig b/node_modules/normalize-package-data/node_modules/resolve/.editorconfig new file mode 100644 index 0000000000000..bc228f8269443 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/.editorconfig @@ -0,0 +1,20 @@ +root = true + +[*] +indent_style = tab +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true +max_line_length = 150 + +[CHANGELOG.md] +indent_style = space +indent_size = 2 + +[*.json] +max_line_length = off + +[Makefile] +max_line_length = off diff --git a/node_modules/node-gyp/node_modules/nopt/.npmignore b/node_modules/normalize-package-data/node_modules/resolve/.eslintignore similarity index 100% rename from node_modules/node-gyp/node_modules/nopt/.npmignore rename to node_modules/normalize-package-data/node_modules/resolve/.eslintignore diff --git a/node_modules/normalize-package-data/node_modules/resolve/.travis.yml b/node_modules/normalize-package-data/node_modules/resolve/.travis.yml new file mode 100644 index 0000000000000..768129d572eeb --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/.travis.yml @@ -0,0 +1,269 @@ +language: node_js +os: + - linux +node_js: + - "11.6" + - "10.15" + - "9.11" + - "8.15" + - "7.10" + - "6.16" + - "5.12" + - "4.9" + - "iojs-v3.3" + - "iojs-v2.5" + - "iojs-v1.8" + - "0.12" + - "0.10" + - "0.8" + - "0.6" +before_install: + - 'case "${TRAVIS_NODE_VERSION}" in 0.*) export NPM_CONFIG_STRICT_SSL=false ;; esac' + - 'nvm install-latest-npm' +install: + - 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ] || [ "${TRAVIS_NODE_VERSION}" = "0.9" ]; then nvm install --latest-npm 0.8 && npm install && nvm use "${TRAVIS_NODE_VERSION}"; else npm install; fi;' +script: + - 'if [ -n "${PRETEST-}" ]; then npm run pretest ; fi' + - 'if [ -n "${POSTTEST-}" ]; then npm run posttest ; fi' + - 'if [ -n "${COVERAGE-}" ]; then npm run coverage ; fi' + - 'if [ -n "${TEST-}" ]; then npm run tests-only ; fi' +sudo: false +env: + - TEST=true +matrix: + fast_finish: true + include: + - node_js: "lts/*" + env: PRETEST=true + - node_js: "lts/*" + env: POSTTEST=true + - node_js: "11.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "11.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "11.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "11.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "11.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "11.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.14" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.13" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.12" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "10.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "9.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.14" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.13" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.12" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.15" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.14" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.13" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.12" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.4" + env: TEST=true ALLOW_FAILURE=true + allow_failures: + - os: osx + - env: TEST=true ALLOW_FAILURE=true + - node_js: "0.6" diff --git a/node_modules/normalize-package-data/node_modules/resolve/CHANGELOG.md b/node_modules/normalize-package-data/node_modules/resolve/CHANGELOG.md new file mode 100644 index 0000000000000..832ee0272043e --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/CHANGELOG.md @@ -0,0 +1,629 @@ +### Changelog + +All notable changes to this project will be documented in this file. Dates are displayed in UTC. + +#### [Unreleased](https://github.com/browserify/resolve/compare/v1.9.0...HEAD) + +- [Fix] `sync`/`async`: when package.json `main` is not a string, throw an error ([`#178`][]) +- [Tests] up to `v11.6`, `v10.15`, `v8.15`, `v6.16` (([`083e78c`][]) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape` (([`29a4994`][]) +- [Tests] add an additional test (([`2c67936`][]) + +[`083e78c`]: https://github.com/browserify/resolve/commit/083e78c1ae5c1708b7d41c9ad7c608caffeddcbf +[`29a4994`]: https://github.com/browserify/resolve/commit/29a499418d54b5befe9deef1bc7c38a9174cfbd8 +[`2c67936`]: https://github.com/browserify/resolve/commit/2c679363e852f7a0d570593527ea7038f0cd2c19 + +#### [v1.9.0](https://github.com/browserify/resolve/compare/v1.8.1...v1.9.0) - 17 December 2018 + +- [Fix] `sync`/`async`: fix `preserveSymlinks` option ([`#177`][]) +- [Fix] `sync`/`async`: when package.json `main` is not a string, throw an error ([`#178`][]) +- [Refactor] `node-modules-paths`: Change `paths` function option to receive a thunk for node modules resolution paths (([`d652f01`][]) +- [Tests] up to `node` `v11.4`, `v10.14`, `v8.14`, `v6.15` (([`2b4f3a8`][]) +- [New] `async`/`sync`/`node-modules-paths`: Adds support for “paths” being a function (([`7112873`][]) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `object-keys`, `safe-publish-latest`, `tape` (([`5542700`][]) +- [New] Implements a "normalize-options" pseudo-hook (([`f3961df`][]) +- [Tests] better failure messages (([`f839d20`][]) +- [Deps] update `path-parse` (([`1018c0e`][]) + +[`d652f01`]: https://github.com/browserify/resolve/commit/d652f018b2561f4863ffcd0f3ecdb0dfe65ee223 +[`2b4f3a8`]: https://github.com/browserify/resolve/commit/2b4f3a898a3943e45cdff539b542c4ebee2b608a +[`7112873`]: https://github.com/browserify/resolve/commit/711287339aad544788a4b8b5335221cea645572c +[`5542700`]: https://github.com/browserify/resolve/commit/554270035e1997ae34865500c629888249baa304 +[`f3961df`]: https://github.com/browserify/resolve/commit/f3961dfcb7b2993d935c255e65309e7028a88b8d +[`f839d20`]: https://github.com/browserify/resolve/commit/f839d20ab16ef814214d80183452d02379cbbf15 +[`1018c0e`]: https://github.com/browserify/resolve/commit/1018c0e49851bfb62176d8adbc94125ae85cd158 + +#### [v1.8.1](https://github.com/browserify/resolve/compare/v1.8.0...v1.8.1) - 17 June 2018 + +- [Docs] clean up readme code (([`f5394d8`][]) +- [Fix] resolution when `filename` option is passed (([`9c370c9`][]) +- [Tests] up to `node` `v10.4` (([`3a64219`][]) +- [Tests] improve output of symlink tests that fail on Mac (([`6f771b2`][]) + +[`f5394d8`]: https://github.com/browserify/resolve/commit/f5394d801350ff32be08dfc5ca37bcb677b4c08b +[`9c370c9`]: https://github.com/browserify/resolve/commit/9c370c9848eaecb36fb8e0b004930e2dd49e1e71 +[`3a64219`]: https://github.com/browserify/resolve/commit/3a64219a7385d5d51f3d4ff7b3de0ce749d6cf09 +[`6f771b2`]: https://github.com/browserify/resolve/commit/6f771b215b4f40b0ba0009ef564bde85212e79eb + +#### [v1.8.0](https://github.com/browserify/resolve/compare/v1.7.1...v1.8.0) - 15 June 2018 + +- [New] include filename in error message ([`#162`][]) +- [Tests] up to `node` `v10.1`, `v9.11`, `v8.11`, `v6.14`, `4.9` (([`ad16af2`][]) +- Fix eslint problems and update count of tests (([`def5931`][]) +- [New] add fs/promises to the list of core modules (([`756419a`][]) +- [New] core: add `trace_events`, `v8/tools/arguments` (([`bae0338`][]) +- [Fix] core: `_tls_legacy` is removed in node 10 (([`4225ac5`][]) + +[`#162`]: https://github.com/browserify/resolve/pull/162 +[`ad16af2`]: https://github.com/browserify/resolve/commit/ad16af2f4f6eb1dc964f5b119f6d94bd64b2607a +[`def5931`]: https://github.com/browserify/resolve/commit/def59317704d787adcddc9695b923e65c6bf5232 +[`756419a`]: https://github.com/browserify/resolve/commit/756419a94432fd753a62f5a58b797776efb543f9 +[`bae0338`]: https://github.com/browserify/resolve/commit/bae033824c82153ccb4f32abdd0e70ca677968bc +[`4225ac5`]: https://github.com/browserify/resolve/commit/4225ac5f4b90d26db664ed32f5b08416fea69b86 + +#### [v1.7.1](https://github.com/browserify/resolve/compare/v1.7.0...v1.7.1) - 12 April 2018 + +- [Fix] revert proper but unintended breaking change in sync packageFilter ([`#157`][]) + +#### [v1.7.0](https://github.com/browserify/resolve/compare/v1.6.0...v1.7.0) - 7 April 2018 + +- [Fix] Make loadAsFileSync() work the same as async loadAsFile() ([`#146`][]) +- [Tests] add more pathfilter tests (([`c3621a3`][]) +- [Tests] add some tests for browser field (([`13fb572`][]) +- [Refactor] cache default isFile functions at module level (([`fa6e6f5`][]) +- [Docs] fix default “isFile” implementations (([`0f29c93`][]) +- [Tests] add some tests for a non-directory basedir (([`0c18e40`][]) +- [Refactor] use "basedir" instead of "y", because meaningful variable names (([`876b0b0`][]) +- [Docs] fix options formatting (([`23df5f5`][]) +- Minor cleanup (([`c449d48`][]) +- [Fix] support `opts.package` in non-relative lookups (([`c8a2052`][]) +- [Tests] work around npm SSL issue (([`04cb0bb`][]) +- [Tests] add node 8 and 9 to appveyor (([`7cbd17a`][]) +- [Tests] work around npm SSL issue (([`4b10996`][]) + +[`#146`]: https://github.com/browserify/resolve/pull/146 +[`c3621a3`]: https://github.com/browserify/resolve/commit/c3621a35675b275b2b241dd367459ed7afe1c22a +[`13fb572`]: https://github.com/browserify/resolve/commit/13fb572337623622d06450696af6c15b68be26c3 +[`fa6e6f5`]: https://github.com/browserify/resolve/commit/fa6e6f5a2d34377f6973701733177a280adf0511 +[`0f29c93`]: https://github.com/browserify/resolve/commit/0f29c93f0c74fc4e52ec6ed6678ce0fec6347e2d +[`0c18e40`]: https://github.com/browserify/resolve/commit/0c18e40e4929ba2c9426a77079c153c43e50a025 +[`876b0b0`]: https://github.com/browserify/resolve/commit/876b0b08da9fe44d81681d0c815900485536be9e +[`23df5f5`]: https://github.com/browserify/resolve/commit/23df5f526823e27e33b01333016b7f58b4f63b6f +[`c449d48`]: https://github.com/browserify/resolve/commit/c449d4809cf8461a3d54e458780902b95119a969 +[`c8a2052`]: https://github.com/browserify/resolve/commit/c8a20524c7d08671c22903e70b952575b0502f7b +[`04cb0bb`]: https://github.com/browserify/resolve/commit/04cb0bb94628e560bfa4163e73637d3803591714 +[`7cbd17a`]: https://github.com/browserify/resolve/commit/7cbd17ae270f9ec24ef05779c3a5e9da3e75c598 +[`4b10996`]: https://github.com/browserify/resolve/commit/4b1099668477e28117c34f9db3509ff096a49190 + +#### [v1.6.0](https://github.com/browserify/resolve/compare/v1.5.0...v1.6.0) - 20 March 2018 + +- [New] add `async_hooks` core module, added in node 8 ([`#144`][]) +- [New] add many missing core modules. (([`88c0778`][]) +- Made loadAsFileSync() work the same as async loadAsFile(). (([`dc23387`][]) +- [Tests] up to `v9.8`, `v8.10`, `v6.13` (([`315d729`][]) +- [Tests] up to `node` `v9.3`, `v8.8`, `v6.12`; pin included builds to LTS (([`5091aa2`][]) +- [Tests] add a failing test (([`90b1192`][]) +- [Dev Deps] update `eslint`, `tape` (([`2acf953`][]) +- [Tests] restore node 0.6 (([`2764758`][]) +- [Dev Deps] update `eslint` (([`699a54e`][]) +- [Dev Deps] update `eslint` (([`2674fad`][]) + +[`88c0778`]: https://github.com/browserify/resolve/commit/88c0778be359caaeb4ca74b24a7b5f7903bc39e8 +[`dc23387`]: https://github.com/browserify/resolve/commit/dc23387adb93f497d67def7ee99fae48e5958fb3 +[`315d729`]: https://github.com/browserify/resolve/commit/315d729afe7074ffae5d6ca6509a73d747985d45 +[`5091aa2`]: https://github.com/browserify/resolve/commit/5091aa2c076b67ff762937401e81da66ef7988ca +[`90b1192`]: https://github.com/browserify/resolve/commit/90b11921181c2783209e9aa31f1e20d98c11ed17 +[`2acf953`]: https://github.com/browserify/resolve/commit/2acf953ce2a94b38528372b5f8848ac95a2aabe5 +[`2764758`]: https://github.com/browserify/resolve/commit/2764758aae576aef98f41af5d46f76ada3523012 +[`699a54e`]: https://github.com/browserify/resolve/commit/699a54e91222dc8b3e1f0af8e9859c734d99d50a +[`2674fad`]: https://github.com/browserify/resolve/commit/2674fadcfcf2b253fdcf5e9d8564fd2b23b0b57c + +#### [v1.5.0](https://github.com/browserify/resolve/compare/v1.4.0...v1.5.0) - 24 October 2017 + +- [New] node v8.8+ supports `http2` ([`#139`][]) +- [Fix] fix broken core tests; change core.json to be an object instead of an array; fix results (([`b826f30`][]) +- [Tests] up to `v8.4`; node 0.6 is failing due to travis-ci changes; allow it to fail for now. (([`e9d3a24`][]) +- [Tests] up to `node` `8.7`; use `nvm install-latest-npm` so new npm doesn’t break old node (([`d0de222`][]) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape` (([`76f28a3`][]) +- [Tests] on `node` `v8.8` (([`e0c5d51`][]) +- [Docs] update repo URL (([`3412f98`][]) +- [New] add `perf_hooks`, added in node v8.5 (([`e66117d`][]) +- [Dev Deps] update `eslint` (([`5bfb072`][]) + +[`b826f30`]: https://github.com/browserify/resolve/commit/b826f3007dc8903b95e39984f93c68bb5e4c85b9 +[`e9d3a24`]: https://github.com/browserify/resolve/commit/e9d3a24ae0a4d8e3eefc6431c918c23f7c8fc6d3 +[`d0de222`]: https://github.com/browserify/resolve/commit/d0de222e4b55b67224ddec0421ee66ce8cb5ee8d +[`76f28a3`]: https://github.com/browserify/resolve/commit/76f28a3d275a63b0511449d28900ab5749f27fa5 +[`e0c5d51`]: https://github.com/browserify/resolve/commit/e0c5d518abfaadc4107ca8f3f8c30caf46490444 +[`3412f98`]: https://github.com/browserify/resolve/commit/3412f984a03a345b9a5ef1f0642a0308d676a2c2 +[`e66117d`]: https://github.com/browserify/resolve/commit/e66117df49d9f967b46fde633770307c9d5a7066 +[`5bfb072`]: https://github.com/browserify/resolve/commit/5bfb072f152c77c8247f4c06c1efa9246bbdddb0 + +#### [v1.4.0](https://github.com/browserify/resolve/compare/v1.3.3...v1.4.0) - 26 July 2017 + +- [New]: add `preserveSymlinks` option ([`#130`][]) +- [Fix] `sync`: fix when package.json main = ‘.’ or main = ‘./‘ ([`#125`][]) +- [Tests] up to `node` `v8.2`, `v7.10`, `v6.11`; npm 4.6+ breaks on node < 4 (([`41a3604`][]) +- [Tests] fix 0.6 and linting (([`703517b`][]) +- Only apps should have lockfiles (([`11fb3d8`][]) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape` (([`bc2f7bf`][]) + +[`41a3604`]: https://github.com/browserify/resolve/commit/41a3604f6408dbe9693febf895251db924c87a8f +[`703517b`]: https://github.com/browserify/resolve/commit/703517b78e7e0f8093a79c0a7a413a708ac82d06 +[`11fb3d8`]: https://github.com/browserify/resolve/commit/11fb3d85bb107a24476bd8d764ba25b3c60c184a +[`bc2f7bf`]: https://github.com/browserify/resolve/commit/bc2f7bf29d172fa54d66cf909fb47a858f7765aa + +#### [v1.3.3](https://github.com/browserify/resolve/compare/v1.3.2...v1.3.3) - 20 April 2017 + +- [Fix] error code MODULE_NOT_FOUND instead of ENOTDIR ([`#121`][]) +- [Tests] [eslint] add `npm run lint` (([`3677928`][]) +- [Tests] up to `node` `v7.7`, `v6.10`, `v4.8`; comment out OSX builds since they block linux builds. (([`1d3883c`][]) +- [Fix] correctly resolve dir paths when file with the same name exists (([`a983d38`][]) +- [Tests] up to `node` `v7.9` (([`0da055c`][]) +- [Tests] improve failure scenarios. (([`1de578f`][]) +- [Fix] `sync`: ensure that the path is a string, explicitly. (([`b7ba83d`][]) +- [Dev Deps] update `eslint` (([`452fdf9`][]) +- [Tests] node 0.6 can’t support an npm that understands scoped packages (([`26369cf`][]) + +[`3677928`]: https://github.com/browserify/resolve/commit/36779282881ec4abce32b2c9b7f7b10bcd09d953 +[`1d3883c`]: https://github.com/browserify/resolve/commit/1d3883c40d55242d7dfeafa43fa782dc6f4ab4a6 +[`a983d38`]: https://github.com/browserify/resolve/commit/a983d38c47ea26e57e0824f22929985ecb24faca +[`0da055c`]: https://github.com/browserify/resolve/commit/0da055cc75bebd7e0044cd4184e7c5386a7bd7de +[`1de578f`]: https://github.com/browserify/resolve/commit/1de578f2879f83ba94789041420fd3d3b929127e +[`b7ba83d`]: https://github.com/browserify/resolve/commit/b7ba83d43519c3c77af823ef1badd7f452d8b8e3 +[`452fdf9`]: https://github.com/browserify/resolve/commit/452fdf981330f96d7fef88805b24e40ea24a89e1 +[`26369cf`]: https://github.com/browserify/resolve/commit/26369cfe6ce4eae7404f3c003c88618f098d6814 + +#### [v1.3.2](https://github.com/browserify/resolve/compare/v1.2.1...v1.3.2) - 26 February 2017 + +- Fix prepublish script. (([`1aa1d9d`][]) + +[`1aa1d9d`]: https://github.com/browserify/resolve/commit/1aa1d9d9adc60691431efbde8d915c143cd54916 + +#### [v1.3.1](https://github.com/browserify/resolve/compare/v1.3.0...v1.3.1) - 24 February 2017 + +- Revert "[New] add searched extensions to error messages" (([`68a081d`][]) + +[`68a081d`]: https://github.com/browserify/resolve/commit/68a081d1c7ff6e0fb58aeff4b6ac06aada7812c4 + +#### [v1.3.0](https://github.com/browserify/resolve/compare/v1.2.0...v1.3.0) - 24 February 2017 + +#### [v1.2.1](https://github.com/browserify/resolve/compare/v1.3.1...v1.2.1) - 26 February 2017 + +- [Fix] for browserify compat, do not assume `process.versions.node` exists. ([`#120`][]) +- [Fix] for browserify compat, do not assume `process.versions.node` exists. ([`#120`][]) + +#### [v1.2.0](https://github.com/browserify/resolve/compare/v1.1.7...v1.2.0) - 13 December 2016 + +- [Fix] `resolve.sync` should re-throw non `ENOENT errors. ([`#79`][]) +- [New] add missing core modules, and determine them dynamically by node version. ([`#100`][][`#110`][][`#111`][][`#112`][]) +- [Tests] test on every minor version of node. ([`#109`][][`#75`][][`#74`][][`#70`][]) +- code style: tabs → spaces (([`0ab33b2`][]) +- [Dev Deps] add `safe-publish-latest` (([`83c25dd`][]) +- [Fix] Create error outside process.nextTick (([`3fa5f02`][]) +- readme: update API docs link for require.resolve() (([`7e98547`][]) +- [Dev Deps] update `tape` (([`764f3a2`][]) +- gitignore node_modules (([`3e8a8da`][]) + +[`0ab33b2`]: https://github.com/browserify/resolve/commit/0ab33b29b814e030021ff2df391e60a1c52dcc46 +[`83c25dd`]: https://github.com/browserify/resolve/commit/83c25dde8aa5a663bc3863d946fdc62fab5fd080 +[`3fa5f02`]: https://github.com/browserify/resolve/commit/3fa5f02f2ace0683fbd42196619c4e2bbd9eef60 +[`7e98547`]: https://github.com/browserify/resolve/commit/7e98547319f1dada4f26d7a24f3b92a08f85c56b +[`764f3a2`]: https://github.com/browserify/resolve/commit/764f3a231c26c370c4e6b94f0bb10166c20551b7 +[`3e8a8da`]: https://github.com/browserify/resolve/commit/3e8a8da3c9d545e00e15f5bed24623eb134b2221 + +#### [v1.1.7](https://github.com/browserify/resolve/compare/v1.1.6...v1.1.7) - 24 January 2016 + +- (typo) Change againt to against ([`#83`][]) +- Fix node_modules paths on Windows (([`35b2b64`][]) + +[`#83`]: https://github.com/browserify/resolve/pull/83 +[`35b2b64`]: https://github.com/browserify/resolve/commit/35b2b642d91e9b81e7cc26b6fd19912e18901d55 + +#### [v1.1.6](https://github.com/browserify/resolve/compare/v1.1.5...v1.1.6) - 15 March 2015 + +- Use path.dirname to walk up looking for a package.json ([`#76`][]) +- add back pkg assertions to pick up the root package (([`4c25e45`][]) + +[`4c25e45`]: https://github.com/browserify/resolve/commit/4c25e45625fea7980463fc107fc843aab7e0d993 + +#### [v1.1.5](https://github.com/browserify/resolve/compare/v1.1.4...v1.1.5) - 21 February 2015 + +- another test, not quite the failing case (([`612cac2`][]) +- fix for the failing case (([`503c746`][]) + +[`612cac2`]: https://github.com/browserify/resolve/commit/612cac2beac41fb13b7b12a9dfdb4207391260c1 +[`503c746`]: https://github.com/browserify/resolve/commit/503c746a6e64d50f2c9b18b4476ffcfed49947f2 + +#### [v1.1.4](https://github.com/browserify/resolve/compare/v1.1.3...v1.1.4) - 20 February 2015 + +- finally seems to fully handle browser field from outside foo/bar resolution (([`5b737d5`][]) +- flatter nodeModules function (([`5ebb39a`][]) + +[`5b737d5`]: https://github.com/browserify/resolve/commit/5b737d58b38ce891ef3f06d600d0562dbbc8539c +[`5ebb39a`]: https://github.com/browserify/resolve/commit/5ebb39a19b62c052ff6201600c3d2fffb3f5fdcb + +#### [v1.1.3](https://github.com/browserify/resolve/compare/v1.1.2...v1.1.3) - 17 February 2015 + +- re-implemented pathfilter feature nearly passes the test (([`60ff554`][]) +- another precedence test (([`98d22e0`][]) +- move pathfilter test to its own file (([`90826f5`][]) +- path logic fix that seems to handle all the cases across this package and browserify (([`70146a5`][]) +- tape everywhere (([`47bbfcd`][]) +- move pathfilter files into their own dir (([`7f0a3f1`][]) +- failing precedence test (([`73e958e`][]) +- nearly nearly working (([`e7bffbf`][]) +- packageFilter should have been giving the pkgfile as an argument, fixed (([`70b71e7`][]) +- this fixes the directory precedence problem (([`caca9f9`][]) +- disable faulty basedir test except on windows for now (([`3be4b79`][]) +- passes pathfilter test (([`644f814`][]) +- fix node_path test, was clearly wrong for some reason (([`9aa36e7`][]) + +[`60ff554`]: https://github.com/browserify/resolve/commit/60ff5545ec3cd15367c89c08cf3f139fa9c23796 +[`98d22e0`]: https://github.com/browserify/resolve/commit/98d22e0e21dd57fe1ab8d9573c1f63903c2b7321 +[`90826f5`]: https://github.com/browserify/resolve/commit/90826f575fe37cb3852de17e764b62e3754484b2 +[`70146a5`]: https://github.com/browserify/resolve/commit/70146a5ebc4d96438383ada02785d4e722c6f5d9 +[`47bbfcd`]: https://github.com/browserify/resolve/commit/47bbfcd9d9c8a68ce97fa37e0563930cee67093d +[`7f0a3f1`]: https://github.com/browserify/resolve/commit/7f0a3f1545f4b53f1bdd099b67561f9516693325 +[`73e958e`]: https://github.com/browserify/resolve/commit/73e958e905eed000787f0596f81c212ca2cdb3b3 +[`e7bffbf`]: https://github.com/browserify/resolve/commit/e7bffbf1b39b6239732c0e7fb01eeb9dad605d15 +[`70b71e7`]: https://github.com/browserify/resolve/commit/70b71e7980b3235018a0f5ac0bd52b8393548beb +[`caca9f9`]: https://github.com/browserify/resolve/commit/caca9f9c3576c85d8972d25012ea5d12aeaa50f4 +[`3be4b79`]: https://github.com/browserify/resolve/commit/3be4b796f1a9aadfb293b36c0c7f781ca9169f09 +[`644f814`]: https://github.com/browserify/resolve/commit/644f81478c892874f9829aa6cca36ca72474db00 +[`9aa36e7`]: https://github.com/browserify/resolve/commit/9aa36e77eca50e177498984fdef5d564903d3964 + +#### [v1.1.2](https://github.com/browserify/resolve/compare/v1.1.0...v1.1.2) - 16 February 2015 + +- Adding pathFilter docs ([`#67`][]) +- adding pathFilter docs (([`44480ff`][]) + +[`#67`]: https://github.com/browserify/resolve/pull/67 +[`44480ff`]: https://github.com/browserify/resolve/commit/44480ff041f791f32b80d212302180be210901a1 + +#### [v1.1.0](https://github.com/browserify/resolve/compare/v1.0.0...v1.1.0) - 27 January 2015 + +- Update docs re: input and cb args. ([`#65`][]) +- Update main README--change word order for clarity ([`#55`][]) +- attempts to find package.json data for deep references https://github.com/substack/node-resolve/issues/62 (([`caff2ba`][]) +- formatting (([`b8d09e3`][]) +- Add failing test for parent filename in error msg. (([`96d38c6`][]) +- split before computing the pivot to prevent abcnode_modulesxyz from matching (([`10380e1`][]) +- Utilize opts.filename when available to ID parent. (([`f6edcd9`][]) + +[`#65`]: https://github.com/browserify/resolve/pull/65 +[`#55`]: https://github.com/browserify/resolve/pull/55 +[`caff2ba`]: https://github.com/browserify/resolve/commit/caff2ba60dc5d85eaded388dc6025afd05ba183b +[`b8d09e3`]: https://github.com/browserify/resolve/commit/b8d09e3a2d679f6b61515d49eca3f6d8d0d2ac7f +[`96d38c6`]: https://github.com/browserify/resolve/commit/96d38c6aaa575d12781c28b34243b4939359a335 +[`10380e1`]: https://github.com/browserify/resolve/commit/10380e16d3cf03f25941c3f1545ef73ed11bc1e1 +[`f6edcd9`]: https://github.com/browserify/resolve/commit/f6edcd95ad5d27bfbdee0fa51951aa3d45d77cba + +### [v1.0.0](https://github.com/browserify/resolve/compare/v0.7.4...v1.0.0) - 11 August 2014 + +- reformat package.json (([`695bbc1`][]) + +[`695bbc1`]: https://github.com/browserify/resolve/commit/695bbc1d9eeb35339b4a01e141c6f6e1dff3a6e3 + +#### [v0.7.4](https://github.com/browserify/resolve/compare/v0.7.3...v0.7.4) - 25 July 2014 + +- merged (([`5cae82f`][]) + +[`5cae82f`]: https://github.com/browserify/resolve/commit/5cae82fb22cb64d5b72f703c787dc0fd418ed412 + +#### [v0.7.3](https://github.com/browserify/resolve/compare/v0.7.2...v0.7.3) - 25 July 2014 + +- cb(err) for non-string args (([`965c70b`][]) + +[`965c70b`]: https://github.com/browserify/resolve/commit/965c70b27ff796fc0ac3dba186d95b61d82446df + +#### [v0.7.2](https://github.com/browserify/resolve/compare/v0.7.1...v0.7.2) - 25 July 2014 + +- failing dotdot test (([`3ee0f0e`][]) +- fixes for dotdot tests (([`a67f230`][]) +- failing sync dotdot test (([`55515e7`][]) + +[`3ee0f0e`]: https://github.com/browserify/resolve/commit/3ee0f0eb97971d246a4a3f183374f60938f1ca8a +[`a67f230`]: https://github.com/browserify/resolve/commit/a67f230133050568ca14a04c0d36aaf6bf14fa89 +[`55515e7`]: https://github.com/browserify/resolve/commit/55515e7f816571fb9d71fdd6d0f012185b2eeefb + +#### [v0.7.1](https://github.com/browserify/resolve/compare/v0.7.0...v0.7.1) - 9 June 2014 + +- [Fix] `node-modules-paths`: `opts` should be optional, and `opts.paths` should not be concatenated when omitted. ([`#96`][]) +- [Refactor] consistent spacing and quotes; run some basic linting manually. (([`f63faaf`][]) +- [Tests] use `path.join` more often to normalize paths across OS’s. (([`8280c53`][]) +- [Tests] use `path` methods to make tests pass on both linux and Windows. (([`af9a885`][]) +- [Tests] make matrix more efficient (([`7f0ce87`][]) +- [Tests] fix indentation, manual linting. (([`6984dcb`][]) +- [Tests] [Refactor] refactor `node-modules-paths` and add tests. (([`58b99a3`][]) +- [Tests] add `appveyor` (([`caffe35`][]) +- [new] Add err.code = 'MODULE_NOT_FOUND' (([`c622aef`][]) +- [New] add searched extensions to error messages (([`1260d9d`][]) +- node-modules-paths: absolutize the `start` path (([`9d6b7af`][]) +- [Refactor] `async`: remove unnecessary slashes, since `path.join` adds them. (([`dd50615`][]) +- [Tests] ensure node_path test is independent of the `tap` module’s “main” (([`ddca9ed`][]) + +[`f63faaf`]: https://github.com/browserify/resolve/commit/f63faaf9df5dbd8da388c674de0b14e3286e5e91 +[`8280c53`]: https://github.com/browserify/resolve/commit/8280c53eae6b612f586e133052ed2b2a56ae6649 +[`af9a885`]: https://github.com/browserify/resolve/commit/af9a8858a618ab64dd4bb311ef1be37822ade2b7 +[`7f0ce87`]: https://github.com/browserify/resolve/commit/7f0ce871b6d2b5cb2082b04cd72ddd4055cb7a05 +[`6984dcb`]: https://github.com/browserify/resolve/commit/6984dcb1407fec6af46f744ad2c63f502645bdd6 +[`58b99a3`]: https://github.com/browserify/resolve/commit/58b99a36f882d7ee65df725224f204abd27379db +[`caffe35`]: https://github.com/browserify/resolve/commit/caffe358566bb3c2f9b4cbd8c0f910debfb6df3b +[`c622aef`]: https://github.com/browserify/resolve/commit/c622aefeb286e479d536601e30bb828e69f86ec3 +[`1260d9d`]: https://github.com/browserify/resolve/commit/1260d9d1e2f55efb514540db9aa1b3d679f9db10 +[`9d6b7af`]: https://github.com/browserify/resolve/commit/9d6b7af28c054676d6ea8a5037353ed750ea13bb +[`dd50615`]: https://github.com/browserify/resolve/commit/dd506158089f7d071d2a9f61cd4385365d177219 +[`ddca9ed`]: https://github.com/browserify/resolve/commit/ddca9ed7e1d980d5ec561450875cb09463effd5a + +#### [v0.7.0](https://github.com/browserify/resolve/compare/v0.6.3...v0.7.0) - 17 May 2014 + +- array opts.moduleDirectory tests (([`0f6d088`][]) +- opts.moduleDirectory string tests (([`a15ffd6`][]) +- Support more than one directory in opts.moduleDirectory. (([`4183463`][]) +- formatting (([`b89f089`][]) +- Remove variable leftover from 325584a685 (([`12fa78c`][]) + +[`0f6d088`]: https://github.com/browserify/resolve/commit/0f6d08801db6bc2044df8767226421172a2d9461 +[`a15ffd6`]: https://github.com/browserify/resolve/commit/a15ffd6c20772831c41146189c117ab0a0650e0b +[`4183463`]: https://github.com/browserify/resolve/commit/41834633e84d76d86297968ba34c375f26fe4f08 +[`b89f089`]: https://github.com/browserify/resolve/commit/b89f08902e8551e07d66e81a3dc33840e24266c5 +[`12fa78c`]: https://github.com/browserify/resolve/commit/12fa78ce43c4363e1c9600b635d18cd295c6949f + +#### [v0.6.3](https://github.com/browserify/resolve/compare/v0.6.2...v0.6.3) - 16 April 2014 + +- Fixed the case when main is specified as "." or "./" causing the resolve to infinite loop as documented at https://github.com/substack/node-browserify/issues/732. (([`b11f273`][]) + +[`b11f273`]: https://github.com/browserify/resolve/commit/b11f2739ad8c9730e1076271eff54850755e2ee1 + +#### [v0.6.2](https://github.com/browserify/resolve/compare/v0.6.1...v0.6.2) - 21 March 2014 + +- passing tests for paths (([`4f56bb6`][]) +- faulty basedir does not always produce error properly in windows, because when the dirs are sliced down the final path has improper prefix, causing it to load relative to cwd (([`110168a`][]) + +[`4f56bb6`]: https://github.com/browserify/resolve/commit/4f56bb67fa45d35adfa6a0022cc77afbf8117234 +[`110168a`]: https://github.com/browserify/resolve/commit/110168adae1dfbedcb9a12086cacf0ce68cc67f6 + +#### [v0.6.1](https://github.com/browserify/resolve/compare/v0.6.0...v0.6.1) - 27 November 2013 + +- merged the context error patches (([`8408e6e`][]) + +[`8408e6e`]: https://github.com/browserify/resolve/commit/8408e6e8902b4bec8c859d606f53366e42058378 + +#### [v0.6.0](https://github.com/browserify/resolve/compare/v0.5.1...v0.6.0) - 26 November 2013 + +- fixes #25: resolve modules with the same name as node stdlib modules ([`#25`][]) + +#### [v0.5.1](https://github.com/browserify/resolve/compare/v0.5.0...v0.5.1) - 22 September 2013 + +- Separate duplicated nodeModulesPaths function (([`325584a`][]) +- Fix prefix for windows azure (([`b5ba043`][]) + +[`325584a`]: https://github.com/browserify/resolve/commit/325584a685db8f42aae3d4876ffbe64069233601 +[`b5ba043`]: https://github.com/browserify/resolve/commit/b5ba0430b012d93367a4f87c304f1d4c8c22941c + +#### [v0.5.0](https://github.com/browserify/resolve/compare/v0.4.3...v0.5.0) - 2 September 2013 + +- opts.modules => opts.moduleDirectory, documented (([`c46593d`][]) +- modules folder name is configurable (([`d65a422`][]) + +[`c46593d`]: https://github.com/browserify/resolve/commit/c46593de74b256196d7ea12c85422698652cff10 +[`d65a422`]: https://github.com/browserify/resolve/commit/d65a42238101ea284ddafb788debdad0e5a59504 + +#### [v0.4.3](https://github.com/browserify/resolve/compare/v0.4.2...v0.4.3) - 7 August 2013 + +- Fix default basedir calculation (([`cd7169b`][]) +- use getCaller() in both async and sync versions (([`20f8945`][]) + +[`cd7169b`]: https://github.com/browserify/resolve/commit/cd7169b204b9f474b6a924adf47564f33a469f07 +[`20f8945`]: https://github.com/browserify/resolve/commit/20f89456f7fc1d8e51b95ec1ab38b1ac154d9fc4 + +#### [v0.4.2](https://github.com/browserify/resolve/compare/v0.4.1...v0.4.2) - 3 August 2013 + +- Failing test case for pkg.main pointing to a directory. (([`b57a75a`][]) +- Fix for failing test case where pkg.main points to directory. (([`8c4078c`][]) + +[`b57a75a`]: https://github.com/browserify/resolve/commit/b57a75aefc394ead20d54ed107741f1f7151b90f +[`8c4078c`]: https://github.com/browserify/resolve/commit/8c4078c9dd45c6a92f1f409d70aaccc95be3bfc6 + +#### [v0.4.1](https://github.com/browserify/resolve/compare/v0.4.0...v0.4.1) - 30 July 2013 + +- adding tests to reproduce the problem (([`ad3a477`][]) +- async resolve now falls back to 'index.js' if main field in package.json is incorrect (([`62a5726`][]) + +[`ad3a477`]: https://github.com/browserify/resolve/commit/ad3a4772ddd7187ff38cb56e00635b37a491e1fa +[`62a5726`]: https://github.com/browserify/resolve/commit/62a572635f21bf1c28360ea5c2238be62736429b + +#### [v0.4.0](https://github.com/browserify/resolve/compare/v0.3.1...v0.4.0) - 9 June 2013 + +- Implement async support for returning package a module was resolved from. (([`b7b2806`][]) +- Document package option. (([`7f84028`][]) + +[`b7b2806`]: https://github.com/browserify/resolve/commit/b7b28069acb7c749a2053dbb0c8d606515954694 +[`7f84028`]: https://github.com/browserify/resolve/commit/7f8402881b725938cfaf1d4835ec2fb6cee4862d + +#### [v0.3.1](https://github.com/browserify/resolve/compare/v0.3.0...v0.3.1) - 29 March 2013 + +- use isFIFO() instead to more narrowly target <() usage (([`790cdf5`][]) +- check !isDirectory() instead of isFile() so that <(echo "beep") inline bash fds work (([`c396065`][]) + +[`790cdf5`]: https://github.com/browserify/resolve/commit/790cdf5ab7c92bb146e8ace05ba0b26c5f51ffb3 +[`c396065`]: https://github.com/browserify/resolve/commit/c3960650f1a1417e52238011e08a6da2b0d9fee4 + +#### [v0.3.0](https://github.com/browserify/resolve/compare/v0.2.8...v0.3.0) - 19 February 2013 + +- failing translated async test with parameterized readFile on account of 3-arg form (([`7033bbb`][]) +- factor out .sync into lib/sync.js (([`ba7038a`][]) +- updated the docs for async (([`34a958e`][]) +- first async test passes (([`e427ca8`][]) +- sync parity with async tests (([`d1191a9`][]) +- stub out async (([`f4b02e3`][]) +- factor out core into lib/ (([`a800954`][]) +- synchronous example (([`3534992`][]) +- adapted async test (([`c9111d2`][]) +- async example (([`e1a9809`][]) +- fix for async parameterized readFile (([`2d4e80e`][]) +- drop 0.4, add 0.8 in travis (([`8a1ba59`][]) + +[`7033bbb`]: https://github.com/browserify/resolve/commit/7033bbb6e21ecfd13476ca8de247580aa2f97e7c +[`ba7038a`]: https://github.com/browserify/resolve/commit/ba7038a56d78212329b64287dfaf895b1a85cf2c +[`34a958e`]: https://github.com/browserify/resolve/commit/34a958e84b7fc4cdccd7b71f9a116027a6f3a123 +[`e427ca8`]: https://github.com/browserify/resolve/commit/e427ca85b7e3b1d01b05f94783b76516b8594a03 +[`d1191a9`]: https://github.com/browserify/resolve/commit/d1191a9958581a040f4f18b3aecdd50714bffc7a +[`f4b02e3`]: https://github.com/browserify/resolve/commit/f4b02e3bbf0c3b09f83cfb2b22b12b0f55afdf92 +[`a800954`]: https://github.com/browserify/resolve/commit/a80095482ef2d16425e6e12759c9735d89f7f50b +[`3534992`]: https://github.com/browserify/resolve/commit/3534992946294811d20aaf9857ee453078cbe828 +[`c9111d2`]: https://github.com/browserify/resolve/commit/c9111d293ab35fb611d9c65ea2f88ae8cf853f8e +[`e1a9809`]: https://github.com/browserify/resolve/commit/e1a98093094cded0a251ef36f4f2eb0adb280acb +[`2d4e80e`]: https://github.com/browserify/resolve/commit/2d4e80e139d01176bf70132bc80caed946cd6682 +[`8a1ba59`]: https://github.com/browserify/resolve/commit/8a1ba593ab924995a45099e164cc7b769c44e9a0 + +#### [v0.2.8](https://github.com/browserify/resolve/compare/v0.2.7...v0.2.8) - 18 February 2013 + +- add the domain module to .core (([`2979cde`][]) + +[`2979cde`]: https://github.com/browserify/resolve/commit/2979cdea615fe724de62d88cb221c1d1824d0f10 + +#### [v0.2.7](https://github.com/browserify/resolve/compare/v0.2.6...v0.2.7) - 18 February 2013 + +#### [v0.2.6](https://github.com/browserify/resolve/compare/v0.2.5...v0.2.6) - 18 February 2013 + +#### [v0.2.5](https://github.com/browserify/resolve/compare/v0.2.4...v0.2.5) - 18 February 2013 + +#### [v0.2.4](https://github.com/browserify/resolve/compare/v0.2.3...v0.2.4) - 18 February 2013 + +- resolve '../baz' correct (([`46fe923`][]) + +[`46fe923`]: https://github.com/browserify/resolve/commit/46fe923c20feeceac783e67cfa84d07222bc17fa + +#### [v0.2.3](https://github.com/browserify/resolve/compare/v0.2.2...v0.2.3) - 12 August 2012 + +- license file (([`a964396`][]) +- existsSync (([`d1c1012`][]) +- pass dir to packageFilter (([`3bea5b6`][]) +- pkg.main may be a directory (([`3521c2f`][]) +- Prioritize parent tree in nodeModulesPathsSync before fallback options.paths/ NODE_PATH equivalent, in accordance with http://nodejs.org/docs/latest/api/all.html#all_loading_from_the_global_folders (([`27fa227`][]) + +[`a964396`]: https://github.com/browserify/resolve/commit/a9643965438eb4fcb068a5876b317f516199879a +[`d1c1012`]: https://github.com/browserify/resolve/commit/d1c1012f14c50212ea49a9a1255c902f5ad6cb37 +[`3bea5b6`]: https://github.com/browserify/resolve/commit/3bea5b6475b39e7f4974d29c6fa1e8eb8b1589af +[`3521c2f`]: https://github.com/browserify/resolve/commit/3521c2f2b93234e5a50dc47598554a76589d6d8c +[`27fa227`]: https://github.com/browserify/resolve/commit/27fa22707e87738ddde61cb4ad90508cfe0d7755 + +#### [v0.2.2](https://github.com/browserify/resolve/compare/v0.2.1...v0.2.2) - 30 April 2012 + +- fix indentation (([`98fc4a5`][]) +- Updated to work with windows, tested on Windows 7 64-bit and OS X 10.6 (([`a6646cc`][]) +- bump for windows fixes (([`d67d595`][]) + +[`98fc4a5`]: https://github.com/browserify/resolve/commit/98fc4a50b68456d497a862b9c4e4e0a79570c770 +[`a6646cc`]: https://github.com/browserify/resolve/commit/a6646ccceb1a6c411d5b9dfdc97106c80d8a0a09 +[`d67d595`]: https://github.com/browserify/resolve/commit/d67d5959e1be31eb67d5b62e7050bff318572373 + +#### [v0.2.1](https://github.com/browserify/resolve/compare/v0.2.0...v0.2.1) - 12 April 2012 + +- now using tap (([`b625169`][]) +- using travis (([`30cc7b3`][]) +- split on multiple slashes (([`ebeafab`][]) +- fix splitting of paths to support windows as well (([`5e7e24b`][]) + +[`b625169`]: https://github.com/browserify/resolve/commit/b62516922eaaafe533806cd385017109ea057baa +[`30cc7b3`]: https://github.com/browserify/resolve/commit/30cc7b3af9299a0e08f34c314015a1395ef16ea3 +[`ebeafab`]: https://github.com/browserify/resolve/commit/ebeafab4a43c6ac4df7a8a7ee578629f81b7b9e7 +[`5e7e24b`]: https://github.com/browserify/resolve/commit/5e7e24bf11c48f14385886d7dd3661f786cc109b + +#### [v0.2.0](https://github.com/browserify/resolve/compare/v0.1.3...v0.2.0) - 25 February 2012 + +- updated the core list for 0.6.11 (([`12d4c16`][]) + +[`12d4c16`]: https://github.com/browserify/resolve/commit/12d4c164ef99bd35c13b0f566feaa70bc3560082 + +#### [v0.1.3](https://github.com/browserify/resolve/compare/v0.1.2...v0.1.3) - 14 December 2011 + +- bump (([`2dffd07`][]) +- Added readline to core modules (([`4ab55a2`][]) + +[`2dffd07`]: https://github.com/browserify/resolve/commit/2dffd072ce65b4aae4974e934ca5b58ec741f598 +[`4ab55a2`]: https://github.com/browserify/resolve/commit/4ab55a2d4eb95be2399fe94fd5d33879271b5a9f + +#### [v0.1.2](https://github.com/browserify/resolve/compare/v0.1.1...v0.1.2) - 31 October 2011 + +- Add opts.paths to list of node_modules directories (([`7bb6ef4`][]) +- bump (([`5e3fcc6`][]) + +[`7bb6ef4`]: https://github.com/browserify/resolve/commit/7bb6ef4a1805523169f30b6ea38776796a714c3a +[`5e3fcc6`]: https://github.com/browserify/resolve/commit/5e3fcc63cfec322779be5435820d3236e6d13dba + +#### [v0.1.1](https://github.com/browserify/resolve/compare/v0.1.0...v0.1.1) - 18 October 2011 + +- bump for windows paths (([`3fb86d0`][]) +- Added support for Windows-style paths. (([`638951e`][]) + +[`3fb86d0`]: https://github.com/browserify/resolve/commit/3fb86d07c77b09a7d6fa6d2a8b89432a070a6aa0 +[`638951e`]: https://github.com/browserify/resolve/commit/638951ed92fa4435d9752df30c3bcb9eb49573cd + +#### [v0.1.0](https://github.com/browserify/resolve/compare/v0.0.4...v0.1.0) - 3 October 2011 + +- passing mock test (([`030f0d3`][]) +- passing mock test with package.json (([`d2b19c8`][]) +- isFile and readFileSync as parameters (([`d30c22d`][]) +- doc updates and a minor bump for custom isFile and readFileSync params (([`b0af4c3`][]) + +[`030f0d3`]: https://github.com/browserify/resolve/commit/030f0d391e02558574bc673077fb1b4057f8358d +[`d2b19c8`]: https://github.com/browserify/resolve/commit/d2b19c893b7f8c63154c5b5ff2c419ffdc8baa0c +[`d30c22d`]: https://github.com/browserify/resolve/commit/d30c22d1e13b000016f2592d6d6f3489a2d29988 +[`b0af4c3`]: https://github.com/browserify/resolve/commit/b0af4c3ac1a51acf9995cb4e078bf5619f257952 + +#### [v0.0.4](https://github.com/browserify/resolve/compare/v0.0.3...v0.0.4) - 21 June 2011 + +- bump for packageFilter and a note in the docs (([`9fbb632`][]) +- new packageFilter option (([`c92c883`][]) + +[`9fbb632`]: https://github.com/browserify/resolve/commit/9fbb632a5c0c38641ed7c10399306a56651e0789 +[`c92c883`]: https://github.com/browserify/resolve/commit/c92c883bed3e50dd8ed9a2e1d4b9fefc9f3ced64 + +#### [v0.0.3](https://github.com/browserify/resolve/compare/v0.0.2...v0.0.3) - 20 June 2011 + +- custom extensions now work (([`502b6e9`][]) +- failing test for extensions (([`ce56f56`][]) +- bump and a note in the docs for extensions (([`2ad8287`][]) +- passing normalize test (([`055c7ce`][]) + +[`502b6e9`]: https://github.com/browserify/resolve/commit/502b6e9c8b9f258e5c943954467016e9c048fa0f +[`ce56f56`]: https://github.com/browserify/resolve/commit/ce56f56b4e1a5c1df495a7bf061fb0242103b4d8 +[`2ad8287`]: https://github.com/browserify/resolve/commit/2ad8287bc8b34929c2074a739410d08955ccdea7 +[`055c7ce`]: https://github.com/browserify/resolve/commit/055c7cea391ff0ce9cd8c585e8244f553b62f6e7 + +#### [v0.0.2](https://github.com/browserify/resolve/compare/v0.0.1...v0.0.2) - 19 June 2011 + +- failing biz test for going up and down the path directory (([`cf4f5a5`][]) +- don't stop on the first node_modules since that's going away in node anyhow, all tests pass again (([`9049abf`][]) + +[`cf4f5a5`]: https://github.com/browserify/resolve/commit/cf4f5a58d092124c517c55dd180559f5a444eb06 +[`9049abf`]: https://github.com/browserify/resolve/commit/9049abfb60cac49bb547b8ca02cc2617d406ff1a + +#### v0.0.1 + +- implementation seems to work but no tests yet (([`5218f01`][]) +- a package.json all up in this (([`4084043`][]) +- new resolve.{core,isCore} with tests and documentation, bump to 0.0.1 (([`a9ef081`][]) +- failing foo test (([`463b108`][]) +- readme before any code (([`7885443`][]) +- opts.path => opts.basedir, more descriptive I think (([`78010b1`][]) +- failing bar test (([`c40c5c1`][]) +- passing baz test to check package.json resolution (([`410635e`][]) +- a path.resolve() fixed the relative loads (([`dfef4b6`][]) +- passing the bar test after taking out the dirname() around y (([`eda2247`][]) +- trailing comma in the package.json (([`2032753`][]) + +[`5218f01`]: https://github.com/browserify/resolve/commit/5218f0106e78edce4cfb905d0ea4492ed3fd38af +[`4084043`]: https://github.com/browserify/resolve/commit/40840435a621120db78126c1792df7fdd0570703 +[`a9ef081`]: https://github.com/browserify/resolve/commit/a9ef081a4897e9882bf6bc6b31457c53b8d0fc0d +[`463b108`]: https://github.com/browserify/resolve/commit/463b108dd6e750196cba150348bd68397522c908 +[`7885443`]: https://github.com/browserify/resolve/commit/7885443d8a3dba7223b1bfca2d62cafc08a46436 +[`78010b1`]: https://github.com/browserify/resolve/commit/78010b1f91251447d1e74c6ac9cd0baebc6ddf60 +[`c40c5c1`]: https://github.com/browserify/resolve/commit/c40c5c14038acbe8bec91cf979d12382c2e6ddfe +[`410635e`]: https://github.com/browserify/resolve/commit/410635ef6226c030f74c4475e73724a01a102896 +[`dfef4b6`]: https://github.com/browserify/resolve/commit/dfef4b6185d02259c119a10c8a938e1ab148b140 +[`eda2247`]: https://github.com/browserify/resolve/commit/eda22479bd47c5d0b2e8a88851d9ffabbea2329c +[`2032753`]: https://github.com/browserify/resolve/commit/20327532053284676a269ec2441a87f16456fbf3 diff --git a/node_modules/normalize-package-data/node_modules/resolve/LICENSE b/node_modules/normalize-package-data/node_modules/resolve/LICENSE new file mode 100644 index 0000000000000..ee27ba4b4412b --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/normalize-package-data/node_modules/resolve/appveyor.yml b/node_modules/normalize-package-data/node_modules/resolve/appveyor.yml new file mode 100644 index 0000000000000..cc73cc18f084b --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/appveyor.yml @@ -0,0 +1,47 @@ +version: 1.0.{build} +skip_branch_with_pr: true +build: off + +environment: + matrix: + - nodejs_version: "9" + - nodejs_version: "8" + - nodejs_version: "7" + - nodejs_version: "6" + - nodejs_version: "5" + - nodejs_version: "4" + - nodejs_version: "3" + - nodejs_version: "2" + - nodejs_version: "1" + - nodejs_version: "0.12" + - nodejs_version: "0.10" + - nodejs_version: "0.8" + - nodejs_version: "0.6" +matrix: + # fast_finish: true + allow_failures: + - nodejs_version: "0.8" + - nodejs_version: "0.6" + +platform: + - x86 + - x64 + +# Install scripts. (runs after repo cloning) +install: + # Get the latest stable version of Node.js or io.js + - ps: Install-Product node $env:nodejs_version $env:platform + - IF %nodejs_version% EQU 0.6 npm config set strict-ssl false && npm -g install npm@1.3 + - IF %nodejs_version% EQU 0.8 npm config set strict-ssl false && npm -g install npm@1.4.28 && npm install -g npm@4.5 + - set PATH=%APPDATA%\npm;%PATH% + #- IF %nodejs_version% NEQ 0.6 AND %nodejs_version% NEQ 0.8 npm -g install npm + # install modules + - npm install + +# Post-install test scripts. +test_script: + # Output useful info for debugging. + - node --version + - npm --version + # run tests + - npm run tests-only diff --git a/node_modules/normalize-package-data/node_modules/resolve/changelog.hbs b/node_modules/normalize-package-data/node_modules/resolve/changelog.hbs new file mode 100644 index 0000000000000..addd7816b3fdb --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/changelog.hbs @@ -0,0 +1,36 @@ +### Changelog + +All notable changes to this project will be documented in this file. Dates are displayed in UTC. +{{#each releases}} + + {{#if href}} + ###{{#unless major}}#{{/unless}} [{{title}}]({{href}}){{#if tag}} - {{niceDate}}{{/if}} + {{else}} + #### {{title}} + {{/if}} + + {{#if summary}} + Summary: + {{summary}} + {{/if}} + + {{#each fixes}} + - {{{commit.subject}}} ({{#each fixes}}{{#if href}}[`#{{id}}`][]{{/if}}{{/each}}) + {{/each}} + {{#each merges}} + - {{{message}}}{{#if href}} ([`#{{id}}`][]){{/if}} + {{/each}} + {{#each commits}} + - {{#if breaking}}**Breaking change:** {{/if}}{{{subject}}}{{#if href}} (([`{{shorthash}}`][]){{/if}} + {{/each}} + + {{#each fixes}} +{{#if href}}[`#{{id}}`]: {{href}}{{/if}} + {{/each}} + {{#each merges}} +{{#if href}}[`#{{id}}`]: {{href}}{{/if}} + {{/each}} + {{#each commits}} +{{#if href}}[`{{shorthash}}`]: {{href}}{{/if}} + {{/each}} +{{/each}} diff --git a/node_modules/normalize-package-data/node_modules/resolve/example/async.js b/node_modules/normalize-package-data/node_modules/resolve/example/async.js new file mode 100644 index 0000000000000..20e65dc281dba --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/example/async.js @@ -0,0 +1,5 @@ +var resolve = require('../'); +resolve('tap', { basedir: __dirname }, function (err, res) { + if (err) console.error(err); + else console.log(res); +}); diff --git a/node_modules/normalize-package-data/node_modules/resolve/example/sync.js b/node_modules/normalize-package-data/node_modules/resolve/example/sync.js new file mode 100644 index 0000000000000..54b2cc1004223 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/example/sync.js @@ -0,0 +1,3 @@ +var resolve = require('../'); +var res = resolve.sync('tap', { basedir: __dirname }); +console.log(res); diff --git a/node_modules/normalize-package-data/node_modules/resolve/index.js b/node_modules/normalize-package-data/node_modules/resolve/index.js new file mode 100644 index 0000000000000..eb6ba89e6c210 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/index.js @@ -0,0 +1,8 @@ +var core = require('./lib/core'); +var async = require('./lib/async'); +async.core = core; +async.isCore = function isCore(x) { return core[x]; }; +async.sync = require('./lib/sync'); + +exports = async; +module.exports = async; diff --git a/node_modules/normalize-package-data/node_modules/resolve/lib/async.js b/node_modules/normalize-package-data/node_modules/resolve/lib/async.js new file mode 100644 index 0000000000000..54211cc66e2d2 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/lib/async.js @@ -0,0 +1,229 @@ +var core = require('./core'); +var fs = require('fs'); +var path = require('path'); +var caller = require('./caller.js'); +var nodeModulesPaths = require('./node-modules-paths.js'); +var normalizeOptions = require('./normalize-options.js'); + +var defaultIsFile = function isFile(file, cb) { + fs.stat(file, function (err, stat) { + if (!err) { + return cb(null, stat.isFile() || stat.isFIFO()); + } + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') return cb(null, false); + return cb(err); + }); +}; + +module.exports = function resolve(x, options, callback) { + var cb = callback; + var opts = options; + if (typeof options === 'function') { + cb = opts; + opts = {}; + } + if (typeof x !== 'string') { + var err = new TypeError('Path must be a string.'); + return process.nextTick(function () { + cb(err); + }); + } + + opts = normalizeOptions(x, opts); + + var isFile = opts.isFile || defaultIsFile; + var readFile = opts.readFile || fs.readFile; + + var extensions = opts.extensions || ['.js']; + var basedir = opts.basedir || path.dirname(caller()); + var parent = opts.filename || basedir; + + opts.paths = opts.paths || []; + + // ensure that `basedir` is an absolute path at this point, resolving against the process' current working directory + var absoluteStart = path.resolve(basedir); + + if (opts.preserveSymlinks === false) { + fs.realpath(absoluteStart, function (realPathErr, realStart) { + if (realPathErr && realPathErr.code !== 'ENOENT') cb(err); + else init(realPathErr ? absoluteStart : realStart); + }); + } else { + init(absoluteStart); + } + + var res; + function init(basedir) { + if ((/^(?:\.\.?(?:\/|$)|\/|([A-Za-z]:)?[/\\])/).test(x)) { + res = path.resolve(basedir, x); + if (x === '..' || x.slice(-1) === '/') res += '/'; + if ((/\/$/).test(x) && res === basedir) { + loadAsDirectory(res, opts.package, onfile); + } else loadAsFile(res, opts.package, onfile); + } else loadNodeModules(x, basedir, function (err, n, pkg) { + if (err) cb(err); + else if (n) cb(null, n, pkg); + else if (core[x]) return cb(null, x); + else { + var moduleError = new Error("Cannot find module '" + x + "' from '" + parent + "'"); + moduleError.code = 'MODULE_NOT_FOUND'; + cb(moduleError); + } + }); + } + + function onfile(err, m, pkg) { + if (err) cb(err); + else if (m) cb(null, m, pkg); + else loadAsDirectory(res, function (err, d, pkg) { + if (err) cb(err); + else if (d) cb(null, d, pkg); + else { + var moduleError = new Error("Cannot find module '" + x + "' from '" + parent + "'"); + moduleError.code = 'MODULE_NOT_FOUND'; + cb(moduleError); + } + }); + } + + function loadAsFile(x, thePackage, callback) { + var loadAsFilePackage = thePackage; + var cb = callback; + if (typeof loadAsFilePackage === 'function') { + cb = loadAsFilePackage; + loadAsFilePackage = undefined; + } + + var exts = [''].concat(extensions); + load(exts, x, loadAsFilePackage); + + function load(exts, x, loadPackage) { + if (exts.length === 0) return cb(null, undefined, loadPackage); + var file = x + exts[0]; + + var pkg = loadPackage; + if (pkg) onpkg(null, pkg); + else loadpkg(path.dirname(file), onpkg); + + function onpkg(err, pkg_, dir) { + pkg = pkg_; + if (err) return cb(err); + if (dir && pkg && opts.pathFilter) { + var rfile = path.relative(dir, file); + var rel = rfile.slice(0, rfile.length - exts[0].length); + var r = opts.pathFilter(pkg, x, rel); + if (r) return load( + [''].concat(extensions.slice()), + path.resolve(dir, r), + pkg + ); + } + isFile(file, onex); + } + function onex(err, ex) { + if (err) return cb(err); + if (ex) return cb(null, file, pkg); + load(exts.slice(1), x, pkg); + } + } + } + + function loadpkg(dir, cb) { + if (dir === '' || dir === '/') return cb(null); + if (process.platform === 'win32' && (/^\w:[/\\]*$/).test(dir)) { + return cb(null); + } + if ((/[/\\]node_modules[/\\]*$/).test(dir)) return cb(null); + + var pkgfile = path.join(dir, 'package.json'); + isFile(pkgfile, function (err, ex) { + // on err, ex is false + if (!ex) return loadpkg(path.dirname(dir), cb); + + readFile(pkgfile, function (err, body) { + if (err) cb(err); + try { var pkg = JSON.parse(body); } catch (jsonErr) {} + + if (pkg && opts.packageFilter) { + pkg = opts.packageFilter(pkg, pkgfile); + } + cb(null, pkg, dir); + }); + }); + } + + function loadAsDirectory(x, loadAsDirectoryPackage, callback) { + var cb = callback; + var fpkg = loadAsDirectoryPackage; + if (typeof fpkg === 'function') { + cb = fpkg; + fpkg = opts.package; + } + + var pkgfile = path.join(x, 'package.json'); + isFile(pkgfile, function (err, ex) { + if (err) return cb(err); + if (!ex) return loadAsFile(path.join(x, 'index'), fpkg, cb); + + readFile(pkgfile, function (err, body) { + if (err) return cb(err); + try { + var pkg = JSON.parse(body); + } catch (jsonErr) {} + + if (opts.packageFilter) { + pkg = opts.packageFilter(pkg, pkgfile); + } + + if (pkg.main) { + if (typeof pkg.main !== 'string') { + var mainError = new TypeError('package “' + pkg.name + '” `main` must be a string'); + mainError.code = 'INVALID_PACKAGE_MAIN'; + return cb(mainError); + } + if (pkg.main === '.' || pkg.main === './') { + pkg.main = 'index'; + } + loadAsFile(path.resolve(x, pkg.main), pkg, function (err, m, pkg) { + if (err) return cb(err); + if (m) return cb(null, m, pkg); + if (!pkg) return loadAsFile(path.join(x, 'index'), pkg, cb); + + var dir = path.resolve(x, pkg.main); + loadAsDirectory(dir, pkg, function (err, n, pkg) { + if (err) return cb(err); + if (n) return cb(null, n, pkg); + loadAsFile(path.join(x, 'index'), pkg, cb); + }); + }); + return; + } + + loadAsFile(path.join(x, '/index'), pkg, cb); + }); + }); + } + + function processDirs(cb, dirs) { + if (dirs.length === 0) return cb(null, undefined); + var dir = dirs[0]; + + var file = path.join(dir, x); + loadAsFile(file, opts.package, onfile); + + function onfile(err, m, pkg) { + if (err) return cb(err); + if (m) return cb(null, m, pkg); + loadAsDirectory(path.join(dir, x), opts.package, ondir); + } + + function ondir(err, n, pkg) { + if (err) return cb(err); + if (n) return cb(null, n, pkg); + processDirs(cb, dirs.slice(1)); + } + } + function loadNodeModules(x, start, cb) { + processDirs(cb, nodeModulesPaths(start, opts, x)); + } +}; diff --git a/node_modules/normalize-package-data/node_modules/resolve/lib/caller.js b/node_modules/normalize-package-data/node_modules/resolve/lib/caller.js new file mode 100644 index 0000000000000..b14a2804ae828 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/lib/caller.js @@ -0,0 +1,8 @@ +module.exports = function () { + // see https://code.google.com/p/v8/wiki/JavaScriptStackTraceApi + var origPrepareStackTrace = Error.prepareStackTrace; + Error.prepareStackTrace = function (_, stack) { return stack; }; + var stack = (new Error()).stack; + Error.prepareStackTrace = origPrepareStackTrace; + return stack[2].getFileName(); +}; diff --git a/node_modules/normalize-package-data/node_modules/resolve/lib/core.js b/node_modules/normalize-package-data/node_modules/resolve/lib/core.js new file mode 100644 index 0000000000000..0877650ccad4e --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/lib/core.js @@ -0,0 +1,53 @@ +var current = (process.versions && process.versions.node && process.versions.node.split('.')) || []; + +function specifierIncluded(specifier) { + var parts = specifier.split(' '); + var op = parts.length > 1 ? parts[0] : '='; + var versionParts = (parts.length > 1 ? parts[1] : parts[0]).split('.'); + + for (var i = 0; i < 3; ++i) { + var cur = Number(current[i] || 0); + var ver = Number(versionParts[i] || 0); + if (cur === ver) { + continue; // eslint-disable-line no-restricted-syntax, no-continue + } + if (op === '<') { + return cur < ver; + } else if (op === '>=') { + return cur >= ver; + } else { + return false; + } + } + return op === '>='; +} + +function matchesRange(range) { + var specifiers = range.split(/ ?&& ?/); + if (specifiers.length === 0) { return false; } + for (var i = 0; i < specifiers.length; ++i) { + if (!specifierIncluded(specifiers[i])) { return false; } + } + return true; +} + +function versionIncluded(specifierValue) { + if (typeof specifierValue === 'boolean') { return specifierValue; } + if (specifierValue && typeof specifierValue === 'object') { + for (var i = 0; i < specifierValue.length; ++i) { + if (matchesRange(specifierValue[i])) { return true; } + } + return false; + } + return matchesRange(specifierValue); +} + +var data = require('./core.json'); + +var core = {}; +for (var mod in data) { // eslint-disable-line no-restricted-syntax + if (Object.prototype.hasOwnProperty.call(data, mod)) { + core[mod] = versionIncluded(data[mod]); + } +} +module.exports = core; diff --git a/node_modules/normalize-package-data/node_modules/resolve/lib/core.json b/node_modules/normalize-package-data/node_modules/resolve/lib/core.json new file mode 100644 index 0000000000000..1575db31b0eb2 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/lib/core.json @@ -0,0 +1,73 @@ +{ + "assert": true, + "async_hooks": ">= 8", + "buffer_ieee754": "< 0.9.7", + "buffer": true, + "child_process": true, + "cluster": true, + "console": true, + "constants": true, + "crypto": true, + "_debugger": "< 8", + "dgram": true, + "dns": true, + "domain": true, + "events": true, + "freelist": "< 6", + "fs": true, + "fs/promises": ">= 10 && < 10.1", + "_http_agent": ">= 0.11.1", + "_http_client": ">= 0.11.1", + "_http_common": ">= 0.11.1", + "_http_incoming": ">= 0.11.1", + "_http_outgoing": ">= 0.11.1", + "_http_server": ">= 0.11.1", + "http": true, + "http2": ">= 8.8", + "https": true, + "inspector": ">= 8.0.0", + "_linklist": "< 8", + "module": true, + "net": true, + "node-inspect/lib/_inspect": ">= 7.6.0", + "node-inspect/lib/internal/inspect_client": ">= 7.6.0", + "node-inspect/lib/internal/inspect_repl": ">= 7.6.0", + "os": true, + "path": true, + "perf_hooks": ">= 8.5", + "process": ">= 1", + "punycode": true, + "querystring": true, + "readline": true, + "repl": true, + "smalloc": ">= 0.11.5 && < 3", + "_stream_duplex": ">= 0.9.4", + "_stream_transform": ">= 0.9.4", + "_stream_wrap": ">= 1.4.1", + "_stream_passthrough": ">= 0.9.4", + "_stream_readable": ">= 0.9.4", + "_stream_writable": ">= 0.9.4", + "stream": true, + "string_decoder": true, + "sys": true, + "timers": true, + "_tls_common": ">= 0.11.13", + "_tls_legacy": ">= 0.11.3 && < 10", + "_tls_wrap": ">= 0.11.3", + "tls": true, + "trace_events": ">= 10", + "tty": true, + "url": true, + "util": true, + "v8/tools/arguments": ">= 10", + "v8/tools/codemap": [">= 4.4.0 && < 5", ">= 5.2.0"], + "v8/tools/consarray": [">= 4.4.0 && < 5", ">= 5.2.0"], + "v8/tools/csvparser": [">= 4.4.0 && < 5", ">= 5.2.0"], + "v8/tools/logreader": [">= 4.4.0 && < 5", ">= 5.2.0"], + "v8/tools/profile_view": [">= 4.4.0 && < 5", ">= 5.2.0"], + "v8/tools/splaytree": [">= 4.4.0 && < 5", ">= 5.2.0"], + "v8": ">= 1", + "vm": true, + "worker_threads": ">= 11.7", + "zlib": true +} diff --git a/node_modules/normalize-package-data/node_modules/resolve/lib/node-modules-paths.js b/node_modules/normalize-package-data/node_modules/resolve/lib/node-modules-paths.js new file mode 100644 index 0000000000000..df50f48baf430 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/lib/node-modules-paths.js @@ -0,0 +1,42 @@ +var path = require('path'); +var parse = path.parse || require('path-parse'); + +var getNodeModulesDirs = function getNodeModulesDirs(absoluteStart, modules) { + var prefix = '/'; + if ((/^([A-Za-z]:)/).test(absoluteStart)) { + prefix = ''; + } else if ((/^\\\\/).test(absoluteStart)) { + prefix = '\\\\'; + } + + var paths = [absoluteStart]; + var parsed = parse(absoluteStart); + while (parsed.dir !== paths[paths.length - 1]) { + paths.push(parsed.dir); + parsed = parse(parsed.dir); + } + + return paths.reduce(function (dirs, aPath) { + return dirs.concat(modules.map(function (moduleDir) { + return path.join(prefix, aPath, moduleDir); + })); + }, []); +}; + +module.exports = function nodeModulesPaths(start, opts, request) { + var modules = opts && opts.moduleDirectory + ? [].concat(opts.moduleDirectory) + : ['node_modules']; + + if (opts && typeof opts.paths === 'function') { + return opts.paths( + request, + start, + function () { return getNodeModulesDirs(start, modules); }, + opts + ); + } + + var dirs = getNodeModulesDirs(start, modules); + return opts && opts.paths ? dirs.concat(opts.paths) : dirs; +}; diff --git a/node_modules/normalize-package-data/node_modules/resolve/lib/normalize-options.js b/node_modules/normalize-package-data/node_modules/resolve/lib/normalize-options.js new file mode 100644 index 0000000000000..4b56904eaea72 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/lib/normalize-options.js @@ -0,0 +1,10 @@ +module.exports = function (x, opts) { + /** + * This file is purposefully a passthrough. It's expected that third-party + * environments will override it at runtime in order to inject special logic + * into `resolve` (by manipulating the options). One such example is the PnP + * code path in Yarn. + */ + + return opts || {}; +}; diff --git a/node_modules/normalize-package-data/node_modules/resolve/lib/sync.js b/node_modules/normalize-package-data/node_modules/resolve/lib/sync.js new file mode 100644 index 0000000000000..33ad5da2f69ac --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/lib/sync.js @@ -0,0 +1,154 @@ +var core = require('./core'); +var fs = require('fs'); +var path = require('path'); +var caller = require('./caller.js'); +var nodeModulesPaths = require('./node-modules-paths.js'); +var normalizeOptions = require('./normalize-options.js'); + +var defaultIsFile = function isFile(file) { + try { + var stat = fs.statSync(file); + } catch (e) { + if (e && (e.code === 'ENOENT' || e.code === 'ENOTDIR')) return false; + throw e; + } + return stat.isFile() || stat.isFIFO(); +}; + +module.exports = function (x, options) { + if (typeof x !== 'string') { + throw new TypeError('Path must be a string.'); + } + var opts = normalizeOptions(x, options); + + var isFile = opts.isFile || defaultIsFile; + var readFileSync = opts.readFileSync || fs.readFileSync; + + var extensions = opts.extensions || ['.js']; + var basedir = opts.basedir || path.dirname(caller()); + var parent = opts.filename || basedir; + + opts.paths = opts.paths || []; + + // ensure that `basedir` is an absolute path at this point, resolving against the process' current working directory + var absoluteStart = path.resolve(basedir); + + if (opts.preserveSymlinks === false) { + try { + absoluteStart = fs.realpathSync(absoluteStart); + } catch (realPathErr) { + if (realPathErr.code !== 'ENOENT') { + throw realPathErr; + } + } + } + + if ((/^(?:\.\.?(?:\/|$)|\/|([A-Za-z]:)?[/\\])/).test(x)) { + var res = path.resolve(absoluteStart, x); + if (x === '..' || x.slice(-1) === '/') res += '/'; + var m = loadAsFileSync(res) || loadAsDirectorySync(res); + if (m) return m; + } else { + var n = loadNodeModulesSync(x, absoluteStart); + if (n) return n; + } + + if (core[x]) return x; + + var err = new Error("Cannot find module '" + x + "' from '" + parent + "'"); + err.code = 'MODULE_NOT_FOUND'; + throw err; + + function loadAsFileSync(x) { + var pkg = loadpkg(path.dirname(x)); + + if (pkg && pkg.dir && pkg.pkg && opts.pathFilter) { + var rfile = path.relative(pkg.dir, x); + var r = opts.pathFilter(pkg.pkg, x, rfile); + if (r) { + x = path.resolve(pkg.dir, r); // eslint-disable-line no-param-reassign + } + } + + if (isFile(x)) { + return x; + } + + for (var i = 0; i < extensions.length; i++) { + var file = x + extensions[i]; + if (isFile(file)) { + return file; + } + } + } + + function loadpkg(dir) { + if (dir === '' || dir === '/') return; + if (process.platform === 'win32' && (/^\w:[/\\]*$/).test(dir)) { + return; + } + if ((/[/\\]node_modules[/\\]*$/).test(dir)) return; + + var pkgfile = path.join(dir, 'package.json'); + + if (!isFile(pkgfile)) { + return loadpkg(path.dirname(dir)); + } + + var body = readFileSync(pkgfile); + + try { + var pkg = JSON.parse(body); + } catch (jsonErr) {} + + if (pkg && opts.packageFilter) { + pkg = opts.packageFilter(pkg, dir); + } + + return { pkg: pkg, dir: dir }; + } + + function loadAsDirectorySync(x) { + var pkgfile = path.join(x, '/package.json'); + if (isFile(pkgfile)) { + try { + var body = readFileSync(pkgfile, 'UTF8'); + var pkg = JSON.parse(body); + } catch (e) {} + + if (opts.packageFilter) { + pkg = opts.packageFilter(pkg, x); + } + + if (pkg.main) { + if (typeof pkg.main !== 'string') { + var mainError = new TypeError('package “' + pkg.name + '” `main` must be a string'); + mainError.code = 'INVALID_PACKAGE_MAIN'; + throw mainError; + } + if (pkg.main === '.' || pkg.main === './') { + pkg.main = 'index'; + } + try { + var m = loadAsFileSync(path.resolve(x, pkg.main)); + if (m) return m; + var n = loadAsDirectorySync(path.resolve(x, pkg.main)); + if (n) return n; + } catch (e) {} + } + } + + return loadAsFileSync(path.join(x, '/index')); + } + + function loadNodeModulesSync(x, start) { + var dirs = nodeModulesPaths(start, opts, x); + for (var i = 0; i < dirs.length; i++) { + var dir = dirs[i]; + var m = loadAsFileSync(path.join(dir, '/', x)); + if (m) return m; + var n = loadAsDirectorySync(path.join(dir, '/', x)); + if (n) return n; + } + } +}; diff --git a/node_modules/normalize-package-data/node_modules/resolve/package.json b/node_modules/normalize-package-data/node_modules/resolve/package.json new file mode 100644 index 0000000000000..a736a535f1989 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/package.json @@ -0,0 +1,71 @@ +{ + "_from": "resolve@^1.10.0", + "_id": "resolve@1.10.0", + "_inBundle": false, + "_integrity": "sha512-3sUr9aq5OfSg2S9pNtPA9hL1FVEAjvfOC4leW0SNf/mpnaakz2a9femSd6LqAww2RaFctwyf1lCqnTHuF1rxDg==", + "_location": "/normalize-package-data/resolve", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "resolve@^1.10.0", + "name": "resolve", + "escapedName": "resolve", + "rawSpec": "^1.10.0", + "saveSpec": null, + "fetchSpec": "^1.10.0" + }, + "_requiredBy": [ + "/normalize-package-data" + ], + "_resolved": "https://registry.npmjs.org/resolve/-/resolve-1.10.0.tgz", + "_shasum": "3bdaaeaf45cc07f375656dfd2e54ed0810b101ba", + "_spec": "resolve@^1.10.0", + "_where": "/Users/aeschright/code/npm-release/node_modules/normalize-package-data", + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "bugs": { + "url": "https://github.com/browserify/resolve/issues" + }, + "bundleDependencies": false, + "dependencies": { + "path-parse": "^1.0.6" + }, + "deprecated": false, + "description": "resolve like require.resolve() on behalf of files asynchronously and synchronously", + "devDependencies": { + "@ljharb/eslint-config": "^13.1.1", + "eslint": "^5.12.0", + "object-keys": "^1.0.12", + "safe-publish-latest": "^1.1.2", + "tap": "0.4.13", + "tape": "^4.9.2" + }, + "homepage": "https://github.com/browserify/resolve#readme", + "keywords": [ + "resolve", + "require", + "node", + "module" + ], + "license": "MIT", + "main": "index.js", + "name": "resolve", + "repository": { + "type": "git", + "url": "git://github.com/browserify/resolve.git" + }, + "scripts": { + "lint": "eslint .", + "posttest": "npm run test:multirepo", + "prepublish": "safe-publish-latest", + "pretest": "npm run lint", + "test": "npm run --silent tests-only", + "test:multirepo": "cd ./test/resolver/multirepo && npm install && npm test", + "tests-only": "tape test/*.js" + }, + "version": "1.10.0" +} diff --git a/node_modules/normalize-package-data/node_modules/resolve/readme.markdown b/node_modules/normalize-package-data/node_modules/resolve/readme.markdown new file mode 100644 index 0000000000000..95be0f97a0f85 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/readme.markdown @@ -0,0 +1,179 @@ +# resolve + +implements the [node `require.resolve()` +algorithm](https://nodejs.org/api/modules.html#modules_all_together) +such that you can `require.resolve()` on behalf of a file asynchronously and +synchronously + +[![build status](https://secure.travis-ci.org/browserify/node-resolve.png)](http://travis-ci.org/browserify/node-resolve) + +# example + +asynchronously resolve: + +```js +var resolve = require('resolve'); +resolve('tap', { basedir: __dirname }, function (err, res) { + if (err) console.error(err); + else console.log(res); +}); +``` + +``` +$ node example/async.js +/home/substack/projects/node-resolve/node_modules/tap/lib/main.js +``` + +synchronously resolve: + +```js +var resolve = require('resolve'); +var res = resolve.sync('tap', { basedir: __dirname }); +console.log(res); +``` + +``` +$ node example/sync.js +/home/substack/projects/node-resolve/node_modules/tap/lib/main.js +``` + +# methods + +```js +var resolve = require('resolve'); +``` + +## resolve(id, opts={}, cb) + +Asynchronously resolve the module path string `id` into `cb(err, res [, pkg])`, where `pkg` (if defined) is the data from `package.json`. + +options are: + +* opts.basedir - directory to begin resolving from + +* opts.package - `package.json` data applicable to the module being loaded + +* opts.extensions - array of file extensions to search in order + +* opts.readFile - how to read files asynchronously + +* opts.isFile - function to asynchronously test whether a file exists + +* `opts.packageFilter(pkg, pkgfile)` - transform the parsed package.json contents before looking at the "main" field + * pkg - package data + * pkgfile - path to package.json + +* `opts.pathFilter(pkg, path, relativePath)` - transform a path within a package + * pkg - package data + * path - the path being resolved + * relativePath - the path relative from the package.json location + * returns - a relative path that will be joined from the package.json location + +* opts.paths - require.paths array to use if nothing is found on the normal `node_modules` recursive walk (probably don't use this) + + For advanced users, `paths` can also be a `opts.paths(request, start, opts)` function + * request - the import specifier being resolved + * start - lookup path + * getNodeModulesDirs - a thunk (no-argument function) that returns the paths using standard `node_modules` resolution + * opts - the resolution options + +* opts.moduleDirectory - directory (or directories) in which to recursively look for modules. default: `"node_modules"` + +* opts.preserveSymlinks - if true, doesn't resolve `basedir` to real path before resolving. +This is the way Node resolves dependencies when executed with the [--preserve-symlinks](https://nodejs.org/api/all.html#cli_preserve_symlinks) flag. +**Note:** this property is currently `true` by default but it will be changed to +`false` in the next major version because *Node's resolution algorithm does not preserve symlinks by default*. + +default `opts` values: + +```js +{ + paths: [], + basedir: __dirname, + extensions: ['.js'], + readFile: fs.readFile, + isFile: function isFile(file, cb) { + fs.stat(file, function (err, stat) { + if (!err) { + return cb(null, stat.isFile() || stat.isFIFO()); + } + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') return cb(null, false); + return cb(err); + }); + }, + moduleDirectory: 'node_modules', + preserveSymlinks: true +} +``` + +## resolve.sync(id, opts) + +Synchronously resolve the module path string `id`, returning the result and +throwing an error when `id` can't be resolved. + +options are: + +* opts.basedir - directory to begin resolving from + +* opts.extensions - array of file extensions to search in order + +* opts.readFile - how to read files synchronously + +* opts.isFile - function to synchronously test whether a file exists + +* `opts.packageFilter(pkg, dir)` - transform the parsed package.json contents before looking at the "main" field + * pkg - package data + * dir - directory for package.json (Note: the second argument will change to "pkgfile" in v2) + +* `opts.pathFilter(pkg, path, relativePath)` - transform a path within a package + * pkg - package data + * path - the path being resolved + * relativePath - the path relative from the package.json location + * returns - a relative path that will be joined from the package.json location + +* opts.paths - require.paths array to use if nothing is found on the normal `node_modules` recursive walk (probably don't use this) + +* opts.moduleDirectory - directory (or directories) in which to recursively look for modules. default: `"node_modules"` + +* opts.preserveSymlinks - if true, doesn't resolve `basedir` to real path before resolving. +This is the way Node resolves dependencies when executed with the [--preserve-symlinks](https://nodejs.org/api/all.html#cli_preserve_symlinks) flag. +**Note:** this property is currently `true` by default but it will be changed to +`false` in the next major version because *Node's resolution algorithm does not preserve symlinks by default*. + +default `opts` values: + +```js +{ + paths: [], + basedir: __dirname, + extensions: ['.js'], + readFileSync: fs.readFileSync, + isFile: function isFile(file) { + try { + var stat = fs.statSync(file); + } catch (e) { + if (e && (e.code === 'ENOENT' || e.code === 'ENOTDIR')) return false; + throw e; + } + return stat.isFile() || stat.isFIFO(); + }, + moduleDirectory: 'node_modules', + preserveSymlinks: true +} +``` + +## resolve.isCore(pkg) + +Return whether a package is in core. + +# install + +With [npm](https://npmjs.org) do: + +```sh +npm install resolve +``` + +# license + +MIT diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/core.js b/node_modules/normalize-package-data/node_modules/resolve/test/core.js new file mode 100644 index 0000000000000..33d9f329499c6 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/core.js @@ -0,0 +1,82 @@ +var test = require('tape'); +var keys = require('object-keys'); +var resolve = require('../'); + +test('core modules', function (t) { + t.test('isCore()', function (st) { + st.ok(resolve.isCore('fs')); + st.ok(resolve.isCore('net')); + st.ok(resolve.isCore('http')); + + st.ok(!resolve.isCore('seq')); + st.ok(!resolve.isCore('../')); + st.end(); + }); + + t.test('core list', function (st) { + var cores = keys(resolve.core); + st.plan(cores.length); + + for (var i = 0; i < cores.length; ++i) { + var mod = cores[i]; + if (resolve.core[mod]) { + st.doesNotThrow( + function () { require(mod); }, // eslint-disable-line no-loop-func + mod + ' supported; requiring does not throw' + ); + } else { + st.throws( + function () { require(mod); }, // eslint-disable-line no-loop-func + mod + ' not supported; requiring throws' + ); + } + } + + st.end(); + }); + + t.test('core via repl module', { skip: !resolve.core.repl }, function (st) { + var libs = require('repl')._builtinLibs; // eslint-disable-line no-underscore-dangle + if (!libs) { + st.skip('module.builtinModules does not exist'); + return st.end(); + } + for (var i = 0; i < libs.length; ++i) { + var mod = libs[i]; + st.ok(resolve.core[mod], mod + ' is a core module'); + st.doesNotThrow( + function () { require(mod); }, // eslint-disable-line no-loop-func + 'requiring ' + mod + ' does not throw' + ); + } + st.end(); + }); + + t.test('core via builtinModules list', { skip: !resolve.core.module }, function (st) { + var libs = require('module').builtinModules; + if (!libs) { + st.skip('module.builtinModules does not exist'); + return st.end(); + } + var blacklist = [ + '_debug_agent', + 'v8/tools/tickprocessor-driver', + 'v8/tools/SourceMap', + 'v8/tools/tickprocessor', + 'v8/tools/profile' + ]; + for (var i = 0; i < libs.length; ++i) { + var mod = libs[i]; + if (blacklist.indexOf(mod) === -1) { + st.ok(resolve.core[mod], mod + ' is a core module'); + st.doesNotThrow( + function () { require(mod); }, // eslint-disable-line no-loop-func + 'requiring ' + mod + ' does not throw' + ); + } + } + st.end(); + }); + + t.end(); +}); diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/dotdot.js b/node_modules/normalize-package-data/node_modules/resolve/test/dotdot.js new file mode 100644 index 0000000000000..30806659be2ef --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/dotdot.js @@ -0,0 +1,29 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('dotdot', function (t) { + t.plan(4); + var dir = path.join(__dirname, '/dotdot/abc'); + + resolve('..', { basedir: dir }, function (err, res, pkg) { + t.ifError(err); + t.equal(res, path.join(__dirname, 'dotdot/index.js')); + }); + + resolve('.', { basedir: dir }, function (err, res, pkg) { + t.ifError(err); + t.equal(res, path.join(dir, 'index.js')); + }); +}); + +test('dotdot sync', function (t) { + t.plan(2); + var dir = path.join(__dirname, '/dotdot/abc'); + + var a = resolve.sync('..', { basedir: dir }); + t.equal(a, path.join(__dirname, 'dotdot/index.js')); + + var b = resolve.sync('.', { basedir: dir }); + t.equal(b, path.join(dir, 'index.js')); +}); diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/dotdot/abc/index.js b/node_modules/normalize-package-data/node_modules/resolve/test/dotdot/abc/index.js new file mode 100644 index 0000000000000..67f2534ebf90d --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/dotdot/abc/index.js @@ -0,0 +1,2 @@ +var x = require('..'); +console.log(x); diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/dotdot/index.js b/node_modules/normalize-package-data/node_modules/resolve/test/dotdot/index.js new file mode 100644 index 0000000000000..643f9fcc6a6a7 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/dotdot/index.js @@ -0,0 +1 @@ +module.exports = 'whatever'; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/faulty_basedir.js b/node_modules/normalize-package-data/node_modules/resolve/test/faulty_basedir.js new file mode 100644 index 0000000000000..5f2141a67267b --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/faulty_basedir.js @@ -0,0 +1,29 @@ +var test = require('tape'); +var path = require('path'); +var resolve = require('../'); + +test('faulty basedir must produce error in windows', { skip: process.platform !== 'win32' }, function (t) { + t.plan(1); + + var resolverDir = 'C:\\a\\b\\c\\d'; + + resolve('tape/lib/test.js', { basedir: resolverDir }, function (err, res, pkg) { + t.equal(!!err, true); + }); +}); + +test('non-existent basedir should not throw when preserveSymlinks is false', function (t) { + t.plan(2); + + var opts = { + basedir: path.join(path.sep, 'unreal', 'path', 'that', 'does', 'not', 'exist'), + preserveSymlinks: false + }; + + var module = './dotdot/abc'; + + resolve(module, opts, function (err, res) { + t.equal(err.code, 'MODULE_NOT_FOUND'); + t.equal(res, undefined); + }); +}); diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/filter.js b/node_modules/normalize-package-data/node_modules/resolve/test/filter.js new file mode 100644 index 0000000000000..dfc622a874e16 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/filter.js @@ -0,0 +1,34 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('filter', function (t) { + t.plan(4); + var dir = path.join(__dirname, 'resolver'); + var packageFilterArgs; + resolve('./baz', { + basedir: dir, + packageFilter: function (pkg, pkgfile) { + pkg.main = 'doom'; + packageFilterArgs = [pkg, pkgfile]; + return pkg; + } + }, function (err, res, pkg) { + if (err) t.fail(err); + + t.equal(res, path.join(dir, 'baz/doom.js'), 'changing the package "main" works'); + + var packageData = packageFilterArgs[0]; + t.equal(pkg, packageData, 'first packageFilter argument is "pkg"'); + t.equal(packageData.main, 'doom', 'package "main" was altered'); + + var packageFile = packageFilterArgs[1]; + t.equal( + packageFile, + path.join(dir, 'baz/package.json'), + 'second packageFilter argument is "pkgfile"' + ); + + t.end(); + }); +}); diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/filter_sync.js b/node_modules/normalize-package-data/node_modules/resolve/test/filter_sync.js new file mode 100644 index 0000000000000..064052e160ff1 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/filter_sync.js @@ -0,0 +1,26 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('filter', function (t) { + var dir = path.join(__dirname, 'resolver'); + var packageFilterArgs; + var res = resolve.sync('./baz', { + basedir: dir, + packageFilter: function (pkg, dir) { + pkg.main = 'doom'; + packageFilterArgs = [pkg, dir]; + return pkg; + } + }); + + t.equal(res, path.join(dir, 'baz/doom.js'), 'changing the package "main" works'); + + var packageData = packageFilterArgs[0]; + t.equal(packageData.main, 'doom', 'package "main" was altered'); + + var packageFile = packageFilterArgs[1]; + t.equal(packageFile, path.join(dir, 'baz'), 'second packageFilter argument is "dir"'); + + t.end(); +}); diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/mock.js b/node_modules/normalize-package-data/node_modules/resolve/test/mock.js new file mode 100644 index 0000000000000..a88059d453919 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/mock.js @@ -0,0 +1,143 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('mock', function (t) { + t.plan(8); + + var files = {}; + files[path.resolve('/foo/bar/baz.js')] = 'beep'; + + function opts(basedir) { + return { + basedir: path.resolve(basedir), + isFile: function (file, cb) { + cb(null, Object.prototype.hasOwnProperty.call(files, path.resolve(file))); + }, + readFile: function (file, cb) { + cb(null, files[path.resolve(file)]); + } + }; + } + + resolve('./baz', opts('/foo/bar'), function (err, res, pkg) { + if (err) return t.fail(err); + t.equal(res, path.resolve('/foo/bar/baz.js')); + t.equal(pkg, undefined); + }); + + resolve('./baz.js', opts('/foo/bar'), function (err, res, pkg) { + if (err) return t.fail(err); + t.equal(res, path.resolve('/foo/bar/baz.js')); + t.equal(pkg, undefined); + }); + + resolve('baz', opts('/foo/bar'), function (err, res) { + t.equal(err.message, "Cannot find module 'baz' from '" + path.resolve('/foo/bar') + "'"); + t.equal(err.code, 'MODULE_NOT_FOUND'); + }); + + resolve('../baz', opts('/foo/bar'), function (err, res) { + t.equal(err.message, "Cannot find module '../baz' from '" + path.resolve('/foo/bar') + "'"); + t.equal(err.code, 'MODULE_NOT_FOUND'); + }); +}); + +test('mock from package', function (t) { + t.plan(8); + + var files = {}; + files[path.resolve('/foo/bar/baz.js')] = 'beep'; + + function opts(basedir) { + return { + basedir: path.resolve(basedir), + isFile: function (file, cb) { + cb(null, Object.prototype.hasOwnProperty.call(files, file)); + }, + 'package': { main: 'bar' }, + readFile: function (file, cb) { + cb(null, files[file]); + } + }; + } + + resolve('./baz', opts('/foo/bar'), function (err, res, pkg) { + if (err) return t.fail(err); + t.equal(res, path.resolve('/foo/bar/baz.js')); + t.equal(pkg && pkg.main, 'bar'); + }); + + resolve('./baz.js', opts('/foo/bar'), function (err, res, pkg) { + if (err) return t.fail(err); + t.equal(res, path.resolve('/foo/bar/baz.js')); + t.equal(pkg && pkg.main, 'bar'); + }); + + resolve('baz', opts('/foo/bar'), function (err, res) { + t.equal(err.message, "Cannot find module 'baz' from '" + path.resolve('/foo/bar') + "'"); + t.equal(err.code, 'MODULE_NOT_FOUND'); + }); + + resolve('../baz', opts('/foo/bar'), function (err, res) { + t.equal(err.message, "Cannot find module '../baz' from '" + path.resolve('/foo/bar') + "'"); + t.equal(err.code, 'MODULE_NOT_FOUND'); + }); +}); + +test('mock package', function (t) { + t.plan(2); + + var files = {}; + files[path.resolve('/foo/node_modules/bar/baz.js')] = 'beep'; + files[path.resolve('/foo/node_modules/bar/package.json')] = JSON.stringify({ + main: './baz.js' + }); + + function opts(basedir) { + return { + basedir: path.resolve(basedir), + isFile: function (file, cb) { + cb(null, Object.prototype.hasOwnProperty.call(files, path.resolve(file))); + }, + readFile: function (file, cb) { + cb(null, files[path.resolve(file)]); + } + }; + } + + resolve('bar', opts('/foo'), function (err, res, pkg) { + if (err) return t.fail(err); + t.equal(res, path.resolve('/foo/node_modules/bar/baz.js')); + t.equal(pkg && pkg.main, './baz.js'); + }); +}); + +test('mock package from package', function (t) { + t.plan(2); + + var files = {}; + files[path.resolve('/foo/node_modules/bar/baz.js')] = 'beep'; + files[path.resolve('/foo/node_modules/bar/package.json')] = JSON.stringify({ + main: './baz.js' + }); + + function opts(basedir) { + return { + basedir: path.resolve(basedir), + isFile: function (file, cb) { + cb(null, Object.prototype.hasOwnProperty.call(files, path.resolve(file))); + }, + 'package': { main: 'bar' }, + readFile: function (file, cb) { + cb(null, files[path.resolve(file)]); + } + }; + } + + resolve('bar', opts('/foo'), function (err, res, pkg) { + if (err) return t.fail(err); + t.equal(res, path.resolve('/foo/node_modules/bar/baz.js')); + t.equal(pkg && pkg.main, './baz.js'); + }); +}); diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/mock_sync.js b/node_modules/normalize-package-data/node_modules/resolve/test/mock_sync.js new file mode 100644 index 0000000000000..43af10289fd22 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/mock_sync.js @@ -0,0 +1,67 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('mock', function (t) { + t.plan(4); + + var files = {}; + files[path.resolve('/foo/bar/baz.js')] = 'beep'; + + function opts(basedir) { + return { + basedir: path.resolve(basedir), + isFile: function (file) { + return Object.prototype.hasOwnProperty.call(files, file); + }, + readFileSync: function (file) { + return files[file]; + } + }; + } + + t.equal( + resolve.sync('./baz', opts('/foo/bar')), + path.resolve('/foo/bar/baz.js') + ); + + t.equal( + resolve.sync('./baz.js', opts('/foo/bar')), + path.resolve('/foo/bar/baz.js') + ); + + t.throws(function () { + resolve.sync('baz', opts('/foo/bar')); + }); + + t.throws(function () { + resolve.sync('../baz', opts('/foo/bar')); + }); +}); + +test('mock package', function (t) { + t.plan(1); + + var files = {}; + files[path.resolve('/foo/node_modules/bar/baz.js')] = 'beep'; + files[path.resolve('/foo/node_modules/bar/package.json')] = JSON.stringify({ + main: './baz.js' + }); + + function opts(basedir) { + return { + basedir: path.resolve(basedir), + isFile: function (file) { + return Object.prototype.hasOwnProperty.call(files, file); + }, + readFileSync: function (file) { + return files[file]; + } + }; + } + + t.equal( + resolve.sync('bar', opts('/foo')), + path.resolve('/foo/node_modules/bar/baz.js') + ); +}); diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/module_dir.js b/node_modules/normalize-package-data/node_modules/resolve/test/module_dir.js new file mode 100644 index 0000000000000..b50e5bb1751d6 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/module_dir.js @@ -0,0 +1,56 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('moduleDirectory strings', function (t) { + t.plan(4); + var dir = path.join(__dirname, 'module_dir'); + var xopts = { + basedir: dir, + moduleDirectory: 'xmodules' + }; + resolve('aaa', xopts, function (err, res, pkg) { + t.ifError(err); + t.equal(res, path.join(dir, '/xmodules/aaa/index.js')); + }); + + var yopts = { + basedir: dir, + moduleDirectory: 'ymodules' + }; + resolve('aaa', yopts, function (err, res, pkg) { + t.ifError(err); + t.equal(res, path.join(dir, '/ymodules/aaa/index.js')); + }); +}); + +test('moduleDirectory array', function (t) { + t.plan(6); + var dir = path.join(__dirname, 'module_dir'); + var aopts = { + basedir: dir, + moduleDirectory: ['xmodules', 'ymodules', 'zmodules'] + }; + resolve('aaa', aopts, function (err, res, pkg) { + t.ifError(err); + t.equal(res, path.join(dir, '/xmodules/aaa/index.js')); + }); + + var bopts = { + basedir: dir, + moduleDirectory: ['zmodules', 'ymodules', 'xmodules'] + }; + resolve('aaa', bopts, function (err, res, pkg) { + t.ifError(err); + t.equal(res, path.join(dir, '/ymodules/aaa/index.js')); + }); + + var copts = { + basedir: dir, + moduleDirectory: ['xmodules', 'ymodules', 'zmodules'] + }; + resolve('bbb', copts, function (err, res, pkg) { + t.ifError(err); + t.equal(res, path.join(dir, '/zmodules/bbb/main.js')); + }); +}); diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/module_dir/xmodules/aaa/index.js b/node_modules/normalize-package-data/node_modules/resolve/test/module_dir/xmodules/aaa/index.js new file mode 100644 index 0000000000000..dd7cf7b2d022d --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/module_dir/xmodules/aaa/index.js @@ -0,0 +1 @@ +module.exports = function (x) { return x * 100; }; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/module_dir/ymodules/aaa/index.js b/node_modules/normalize-package-data/node_modules/resolve/test/module_dir/ymodules/aaa/index.js new file mode 100644 index 0000000000000..ef2d4d4bf76e6 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/module_dir/ymodules/aaa/index.js @@ -0,0 +1 @@ +module.exports = function (x) { return x + 100; }; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/module_dir/zmodules/bbb/main.js b/node_modules/normalize-package-data/node_modules/resolve/test/module_dir/zmodules/bbb/main.js new file mode 100644 index 0000000000000..e8ba629936a7a --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/module_dir/zmodules/bbb/main.js @@ -0,0 +1 @@ +module.exports = function (n) { return n * 111; }; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/module_dir/zmodules/bbb/package.json b/node_modules/normalize-package-data/node_modules/resolve/test/module_dir/zmodules/bbb/package.json new file mode 100644 index 0000000000000..c13b8cf6acfd3 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/module_dir/zmodules/bbb/package.json @@ -0,0 +1,3 @@ +{ + "main": "main.js" +} diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/node-modules-paths.js b/node_modules/normalize-package-data/node_modules/resolve/test/node-modules-paths.js new file mode 100644 index 0000000000000..1500fb19998bc --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/node-modules-paths.js @@ -0,0 +1,121 @@ +var test = require('tape'); +var path = require('path'); +var parse = path.parse || require('path-parse'); +var keys = require('object-keys'); + +var nodeModulesPaths = require('../lib/node-modules-paths'); + +var verifyDirs = function verifyDirs(t, start, dirs, moduleDirectories, paths) { + var moduleDirs = [].concat(moduleDirectories || 'node_modules'); + if (paths) { + for (var k = 0; k < paths.length; ++k) { + moduleDirs.push(path.basename(paths[k])); + } + } + + var foundModuleDirs = {}; + var uniqueDirs = {}; + var parsedDirs = {}; + for (var i = 0; i < dirs.length; ++i) { + var parsed = parse(dirs[i]); + if (!foundModuleDirs[parsed.base]) { foundModuleDirs[parsed.base] = 0; } + foundModuleDirs[parsed.base] += 1; + parsedDirs[parsed.dir] = true; + uniqueDirs[dirs[i]] = true; + } + t.equal(keys(parsedDirs).length >= start.split(path.sep).length, true, 'there are >= dirs than "start" has'); + var foundModuleDirNames = keys(foundModuleDirs); + t.deepEqual(foundModuleDirNames, moduleDirs, 'all desired module dirs were found'); + t.equal(keys(uniqueDirs).length, dirs.length, 'all dirs provided were unique'); + + var counts = {}; + for (var j = 0; j < foundModuleDirNames.length; ++j) { + counts[foundModuleDirs[j]] = true; + } + t.equal(keys(counts).length, 1, 'all found module directories had the same count'); +}; + +test('node-modules-paths', function (t) { + t.test('no options', function (t) { + var start = path.join(__dirname, 'resolver'); + var dirs = nodeModulesPaths(start); + + verifyDirs(t, start, dirs); + + t.end(); + }); + + t.test('empty options', function (t) { + var start = path.join(__dirname, 'resolver'); + var dirs = nodeModulesPaths(start, {}); + + verifyDirs(t, start, dirs); + + t.end(); + }); + + t.test('with paths=array option', function (t) { + var start = path.join(__dirname, 'resolver'); + var paths = ['a', 'b']; + var dirs = nodeModulesPaths(start, { paths: paths }); + + verifyDirs(t, start, dirs, null, paths); + + t.end(); + }); + + t.test('with paths=function option', function (t) { + var paths = function paths(request, absoluteStart, getNodeModulesDirs, opts) { + return getNodeModulesDirs().concat(path.join(absoluteStart, 'not node modules', request)); + }; + + var start = path.join(__dirname, 'resolver'); + var dirs = nodeModulesPaths(start, { paths: paths }, 'pkg'); + + verifyDirs(t, start, dirs, null, [path.join(start, 'not node modules', 'pkg')]); + + t.end(); + }); + + t.test('with paths=function skipping node modules resolution', function (t) { + var paths = function paths(request, absoluteStart, getNodeModulesDirs, opts) { + return []; + }; + var start = path.join(__dirname, 'resolver'); + var dirs = nodeModulesPaths(start, { paths: paths }); + t.deepEqual(dirs, [], 'no node_modules was computed'); + t.end(); + }); + + t.test('with moduleDirectory option', function (t) { + var start = path.join(__dirname, 'resolver'); + var moduleDirectory = 'not node modules'; + var dirs = nodeModulesPaths(start, { moduleDirectory: moduleDirectory }); + + verifyDirs(t, start, dirs, moduleDirectory); + + t.end(); + }); + + t.test('with 1 moduleDirectory and paths options', function (t) { + var start = path.join(__dirname, 'resolver'); + var paths = ['a', 'b']; + var moduleDirectory = 'not node modules'; + var dirs = nodeModulesPaths(start, { paths: paths, moduleDirectory: moduleDirectory }); + + verifyDirs(t, start, dirs, moduleDirectory, paths); + + t.end(); + }); + + t.test('with 1+ moduleDirectory and paths options', function (t) { + var start = path.join(__dirname, 'resolver'); + var paths = ['a', 'b']; + var moduleDirectories = ['not node modules', 'other modules']; + var dirs = nodeModulesPaths(start, { paths: paths, moduleDirectory: moduleDirectories }); + + verifyDirs(t, start, dirs, moduleDirectories, paths); + + t.end(); + }); +}); diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/node_path.js b/node_modules/normalize-package-data/node_modules/resolve/test/node_path.js new file mode 100644 index 0000000000000..d06aa4eafe5b5 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/node_path.js @@ -0,0 +1,70 @@ +var fs = require('fs'); +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('$NODE_PATH', function (t) { + t.plan(8); + + var isDir = function (dir, cb) { + if (dir === '/node_path' || dir === 'node_path/x') { + return cb(null, true); + } + fs.stat(dir, function (err, stat) { + if (!err) { + return cb(null, stat.isDirectory()); + } + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') return cb(null, false); + return cb(err); + }); + }; + + resolve('aaa', { + paths: [ + path.join(__dirname, '/node_path/x'), + path.join(__dirname, '/node_path/y') + ], + basedir: __dirname, + isDirectory: isDir + }, function (err, res) { + t.error(err); + t.equal(res, path.join(__dirname, '/node_path/x/aaa/index.js'), 'aaa resolves'); + }); + + resolve('bbb', { + paths: [ + path.join(__dirname, '/node_path/x'), + path.join(__dirname, '/node_path/y') + ], + basedir: __dirname, + isDirectory: isDir + }, function (err, res) { + t.error(err); + t.equal(res, path.join(__dirname, '/node_path/y/bbb/index.js'), 'bbb resolves'); + }); + + resolve('ccc', { + paths: [ + path.join(__dirname, '/node_path/x'), + path.join(__dirname, '/node_path/y') + ], + basedir: __dirname, + isDirectory: isDir + }, function (err, res) { + t.error(err); + t.equal(res, path.join(__dirname, '/node_path/x/ccc/index.js'), 'ccc resolves'); + }); + + // ensure that relative paths still resolve against the regular `node_modules` correctly + resolve('tap', { + paths: [ + 'node_path' + ], + basedir: path.join(__dirname, 'node_path/x'), + isDirectory: isDir + }, function (err, res) { + var root = require('tap/package.json').main; + t.error(err); + t.equal(res, path.resolve(__dirname, '..', 'node_modules/tap', root), 'tap resolves'); + }); +}); diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/node_path/x/aaa/index.js b/node_modules/normalize-package-data/node_modules/resolve/test/node_path/x/aaa/index.js new file mode 100644 index 0000000000000..ad70d0bb03f6f --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/node_path/x/aaa/index.js @@ -0,0 +1 @@ +module.exports = 'A'; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/node_path/x/ccc/index.js b/node_modules/normalize-package-data/node_modules/resolve/test/node_path/x/ccc/index.js new file mode 100644 index 0000000000000..a64132e4c7e52 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/node_path/x/ccc/index.js @@ -0,0 +1 @@ +module.exports = 'C'; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/node_path/y/bbb/index.js b/node_modules/normalize-package-data/node_modules/resolve/test/node_path/y/bbb/index.js new file mode 100644 index 0000000000000..4d0f32e243681 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/node_path/y/bbb/index.js @@ -0,0 +1 @@ +module.exports = 'B'; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/node_path/y/ccc/index.js b/node_modules/normalize-package-data/node_modules/resolve/test/node_path/y/ccc/index.js new file mode 100644 index 0000000000000..793315e846687 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/node_path/y/ccc/index.js @@ -0,0 +1 @@ +module.exports = 'CY'; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/nonstring.js b/node_modules/normalize-package-data/node_modules/resolve/test/nonstring.js new file mode 100644 index 0000000000000..ef63c40f9393d --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/nonstring.js @@ -0,0 +1,9 @@ +var test = require('tape'); +var resolve = require('../'); + +test('nonstring', function (t) { + t.plan(1); + resolve(555, function (err, res, pkg) { + t.ok(err); + }); +}); diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/pathfilter.js b/node_modules/normalize-package-data/node_modules/resolve/test/pathfilter.js new file mode 100644 index 0000000000000..16519aeae51c4 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/pathfilter.js @@ -0,0 +1,75 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +var resolverDir = path.join(__dirname, '/pathfilter/deep_ref'); + +var pathFilterFactory = function (t) { + return function (pkg, x, remainder) { + t.equal(pkg.version, '1.2.3'); + t.equal(x, path.join(resolverDir, 'node_modules/deep/ref')); + t.equal(remainder, 'ref'); + return 'alt'; + }; +}; + +test('#62: deep module references and the pathFilter', function (t) { + t.test('deep/ref.js', function (st) { + st.plan(3); + + resolve('deep/ref', { basedir: resolverDir }, function (err, res, pkg) { + if (err) st.fail(err); + + st.equal(pkg.version, '1.2.3'); + st.equal(res, path.join(resolverDir, 'node_modules/deep/ref.js')); + }); + + var res = resolve.sync('deep/ref', { basedir: resolverDir }); + st.equal(res, path.join(resolverDir, 'node_modules/deep/ref.js')); + }); + + t.test('deep/deeper/ref', function (st) { + st.plan(4); + + resolve( + 'deep/deeper/ref', + { basedir: resolverDir }, + function (err, res, pkg) { + if (err) t.fail(err); + st.notEqual(pkg, undefined); + st.equal(pkg.version, '1.2.3'); + st.equal(res, path.join(resolverDir, 'node_modules/deep/deeper/ref.js')); + } + ); + + var res = resolve.sync( + 'deep/deeper/ref', + { basedir: resolverDir } + ); + st.equal(res, path.join(resolverDir, 'node_modules/deep/deeper/ref.js')); + }); + + t.test('deep/ref alt', function (st) { + st.plan(8); + + var pathFilter = pathFilterFactory(st); + + var res = resolve.sync( + 'deep/ref', + { basedir: resolverDir, pathFilter: pathFilter } + ); + st.equal(res, path.join(resolverDir, 'node_modules/deep/alt.js')); + + resolve( + 'deep/ref', + { basedir: resolverDir, pathFilter: pathFilter }, + function (err, res, pkg) { + if (err) st.fail(err); + st.equal(res, path.join(resolverDir, 'node_modules/deep/alt.js')); + st.end(); + } + ); + }); + + t.end(); +}); diff --git a/node_modules/readdir-scoped-modules/test/fixtures/@org/x/.keep b/node_modules/normalize-package-data/node_modules/resolve/test/pathfilter/deep_ref/main.js similarity index 100% rename from node_modules/readdir-scoped-modules/test/fixtures/@org/x/.keep rename to node_modules/normalize-package-data/node_modules/resolve/test/pathfilter/deep_ref/main.js diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/precedence.js b/node_modules/normalize-package-data/node_modules/resolve/test/precedence.js new file mode 100644 index 0000000000000..2febb598fbc06 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/precedence.js @@ -0,0 +1,23 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('precedence', function (t) { + t.plan(3); + var dir = path.join(__dirname, 'precedence/aaa'); + + resolve('./', { basedir: dir }, function (err, res, pkg) { + t.ifError(err); + t.equal(res, path.join(dir, 'index.js')); + t.equal(pkg.name, 'resolve'); + }); +}); + +test('./ should not load ${dir}.js', function (t) { // eslint-disable-line no-template-curly-in-string + t.plan(1); + var dir = path.join(__dirname, 'precedence/bbb'); + + resolve('./', { basedir: dir }, function (err, res, pkg) { + t.ok(err); + }); +}); diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/precedence/aaa.js b/node_modules/normalize-package-data/node_modules/resolve/test/precedence/aaa.js new file mode 100644 index 0000000000000..b83a3e7ad98d1 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/precedence/aaa.js @@ -0,0 +1 @@ +module.exports = 'wtf'; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/precedence/aaa/index.js b/node_modules/normalize-package-data/node_modules/resolve/test/precedence/aaa/index.js new file mode 100644 index 0000000000000..e0f8f6abf72f7 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/precedence/aaa/index.js @@ -0,0 +1 @@ +module.exports = 'okok'; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/precedence/aaa/main.js b/node_modules/normalize-package-data/node_modules/resolve/test/precedence/aaa/main.js new file mode 100644 index 0000000000000..93542a965e0ea --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/precedence/aaa/main.js @@ -0,0 +1 @@ +console.log(require('./')); diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/precedence/bbb.js b/node_modules/normalize-package-data/node_modules/resolve/test/precedence/bbb.js new file mode 100644 index 0000000000000..2298f47fdd16d --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/precedence/bbb.js @@ -0,0 +1 @@ +module.exports = '>_<'; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/precedence/bbb/main.js b/node_modules/normalize-package-data/node_modules/resolve/test/precedence/bbb/main.js new file mode 100644 index 0000000000000..716b81d4bd463 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/precedence/bbb/main.js @@ -0,0 +1 @@ +console.log(require('./')); // should throw diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver.js b/node_modules/normalize-package-data/node_modules/resolve/test/resolver.js new file mode 100644 index 0000000000000..f7fcd292bdff9 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver.js @@ -0,0 +1,429 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('async foo', function (t) { + t.plan(12); + var dir = path.join(__dirname, 'resolver'); + + resolve('./foo', { basedir: dir }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'foo.js')); + t.equal(pkg && pkg.name, 'resolve'); + }); + + resolve('./foo.js', { basedir: dir }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'foo.js')); + t.equal(pkg && pkg.name, 'resolve'); + }); + + resolve('./foo', { basedir: dir, 'package': { main: 'resolver' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'foo.js')); + t.equal(pkg && pkg.main, 'resolver'); + }); + + resolve('./foo.js', { basedir: dir, 'package': { main: 'resolver' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'foo.js')); + t.equal(pkg.main, 'resolver'); + }); + + resolve('./foo', { basedir: dir, filename: path.join(dir, 'baz.js') }, function (err, res) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'foo.js')); + }); + + resolve('foo', { basedir: dir }, function (err) { + t.equal(err.message, "Cannot find module 'foo' from '" + path.resolve(dir) + "'"); + t.equal(err.code, 'MODULE_NOT_FOUND'); + }); + + // Test that filename is reported as the "from" value when passed. + resolve('foo', { basedir: dir, filename: path.join(dir, 'baz.js') }, function (err) { + t.equal(err.message, "Cannot find module 'foo' from '" + path.join(dir, 'baz.js') + "'"); + }); +}); + +test('bar', function (t) { + t.plan(6); + var dir = path.join(__dirname, 'resolver'); + + resolve('foo', { basedir: dir + '/bar' }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'bar/node_modules/foo/index.js')); + t.equal(pkg, undefined); + }); + + resolve('foo', { basedir: dir + '/bar' }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'bar/node_modules/foo/index.js')); + t.equal(pkg, undefined); + }); + + resolve('foo', { basedir: dir + '/bar', 'package': { main: 'bar' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'bar/node_modules/foo/index.js')); + t.equal(pkg.main, 'bar'); + }); +}); + +test('baz', function (t) { + t.plan(4); + var dir = path.join(__dirname, 'resolver'); + + resolve('./baz', { basedir: dir }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'baz/quux.js')); + t.equal(pkg.main, 'quux.js'); + }); + + resolve('./baz', { basedir: dir, 'package': { main: 'resolver' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'baz/quux.js')); + t.equal(pkg.main, 'quux.js'); + }); +}); + +test('biz', function (t) { + t.plan(24); + var dir = path.join(__dirname, 'resolver/biz/node_modules'); + + resolve('./grux', { basedir: dir }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'grux/index.js')); + t.equal(pkg, undefined); + }); + + resolve('./grux', { basedir: dir, 'package': { main: 'biz' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'grux/index.js')); + t.equal(pkg.main, 'biz'); + }); + + resolve('./garply', { basedir: dir }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'garply/lib/index.js')); + t.equal(pkg.main, './lib'); + }); + + resolve('./garply', { basedir: dir, 'package': { main: 'biz' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'garply/lib/index.js')); + t.equal(pkg.main, './lib'); + }); + + resolve('tiv', { basedir: dir + '/grux' }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'tiv/index.js')); + t.equal(pkg, undefined); + }); + + resolve('tiv', { basedir: dir + '/grux', 'package': { main: 'grux' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'tiv/index.js')); + t.equal(pkg.main, 'grux'); + }); + + resolve('tiv', { basedir: dir + '/garply' }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'tiv/index.js')); + t.equal(pkg, undefined); + }); + + resolve('tiv', { basedir: dir + '/garply', 'package': { main: './lib' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'tiv/index.js')); + t.equal(pkg.main, './lib'); + }); + + resolve('grux', { basedir: dir + '/tiv' }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'grux/index.js')); + t.equal(pkg, undefined); + }); + + resolve('grux', { basedir: dir + '/tiv', 'package': { main: 'tiv' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'grux/index.js')); + t.equal(pkg.main, 'tiv'); + }); + + resolve('garply', { basedir: dir + '/tiv' }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'garply/lib/index.js')); + t.equal(pkg.main, './lib'); + }); + + resolve('garply', { basedir: dir + '/tiv', 'package': { main: 'tiv' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'garply/lib/index.js')); + t.equal(pkg.main, './lib'); + }); +}); + +test('quux', function (t) { + t.plan(2); + var dir = path.join(__dirname, 'resolver/quux'); + + resolve('./foo', { basedir: dir, 'package': { main: 'quux' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'foo/index.js')); + t.equal(pkg.main, 'quux'); + }); +}); + +test('normalize', function (t) { + t.plan(2); + var dir = path.join(__dirname, 'resolver/biz/node_modules/grux'); + + resolve('../grux', { basedir: dir }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'index.js')); + t.equal(pkg, undefined); + }); +}); + +test('cup', function (t) { + t.plan(5); + var dir = path.join(__dirname, 'resolver'); + + resolve('./cup', { basedir: dir, extensions: ['.js', '.coffee'] }, function (err, res) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'cup.coffee')); + }); + + resolve('./cup.coffee', { basedir: dir }, function (err, res) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'cup.coffee')); + }); + + resolve('./cup', { basedir: dir, extensions: ['.js'] }, function (err, res) { + t.equal(err.message, "Cannot find module './cup' from '" + path.resolve(dir) + "'"); + t.equal(err.code, 'MODULE_NOT_FOUND'); + }); + + // Test that filename is reported as the "from" value when passed. + resolve('./cup', { basedir: dir, extensions: ['.js'], filename: path.join(dir, 'cupboard.js') }, function (err, res) { + t.equal(err.message, "Cannot find module './cup' from '" + path.join(dir, 'cupboard.js') + "'"); + }); +}); + +test('mug', function (t) { + t.plan(3); + var dir = path.join(__dirname, 'resolver'); + + resolve('./mug', { basedir: dir }, function (err, res) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'mug.js')); + }); + + resolve('./mug', { basedir: dir, extensions: ['.coffee', '.js'] }, function (err, res) { + if (err) t.fail(err); + t.equal(res, path.join(dir, '/mug.coffee')); + }); + + resolve('./mug', { basedir: dir, extensions: ['.js', '.coffee'] }, function (err, res) { + t.equal(res, path.join(dir, '/mug.js')); + }); +}); + +test('other path', function (t) { + t.plan(6); + var resolverDir = path.join(__dirname, 'resolver'); + var dir = path.join(resolverDir, 'bar'); + var otherDir = path.join(resolverDir, 'other_path'); + + resolve('root', { basedir: dir, paths: [otherDir] }, function (err, res) { + if (err) t.fail(err); + t.equal(res, path.join(resolverDir, 'other_path/root.js')); + }); + + resolve('lib/other-lib', { basedir: dir, paths: [otherDir] }, function (err, res) { + if (err) t.fail(err); + t.equal(res, path.join(resolverDir, 'other_path/lib/other-lib.js')); + }); + + resolve('root', { basedir: dir }, function (err, res) { + t.equal(err.message, "Cannot find module 'root' from '" + path.resolve(dir) + "'"); + t.equal(err.code, 'MODULE_NOT_FOUND'); + }); + + resolve('zzz', { basedir: dir, paths: [otherDir] }, function (err, res) { + t.equal(err.message, "Cannot find module 'zzz' from '" + path.resolve(dir) + "'"); + t.equal(err.code, 'MODULE_NOT_FOUND'); + }); +}); + +test('incorrect main', function (t) { + t.plan(1); + + var resolverDir = path.join(__dirname, 'resolver'); + var dir = path.join(resolverDir, 'incorrect_main'); + + resolve('./incorrect_main', { basedir: resolverDir }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'index.js')); + }); +}); + +test('without basedir', function (t) { + t.plan(1); + + var dir = path.join(__dirname, 'resolver/without_basedir'); + var tester = require(path.join(dir, 'main.js')); + + tester(t, function (err, res, pkg) { + if (err) { + t.fail(err); + } else { + t.equal(res, path.join(dir, 'node_modules/mymodule.js')); + } + }); +}); + +test('#25: node modules with the same name as node stdlib modules', function (t) { + t.plan(1); + + var resolverDir = path.join(__dirname, 'resolver/punycode'); + + resolve('punycode', { basedir: resolverDir }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(resolverDir, 'node_modules/punycode/index.js')); + }); +}); + +test('#52 - incorrectly resolves module-paths like "./someFolder/" when there is a file of the same name', function (t) { + t.plan(2); + + var dir = path.join(__dirname, 'resolver'); + + resolve('./foo', { basedir: path.join(dir, 'same_names') }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'same_names/foo.js')); + }); + + resolve('./foo/', { basedir: path.join(dir, 'same_names') }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'same_names/foo/index.js')); + }); +}); + +test('async: #121 - treating an existing file as a dir when no basedir', function (t) { + var testFile = path.basename(__filename); + + t.test('sanity check', function (st) { + st.plan(1); + resolve('./' + testFile, function (err, res, pkg) { + if (err) t.fail(err); + st.equal(res, __filename, 'sanity check'); + }); + }); + + t.test('with a fake directory', function (st) { + st.plan(4); + + resolve('./' + testFile + '/blah', function (err, res, pkg) { + st.ok(err, 'there is an error'); + st.notOk(res, 'no result'); + + st.equal(err && err.code, 'MODULE_NOT_FOUND', 'error code matches require.resolve'); + st.equal( + err && err.message, + 'Cannot find module \'./' + testFile + '/blah\' from \'' + __dirname + '\'', + 'can not find nonexistent module' + ); + st.end(); + }); + }); + + t.end(); +}); + +test('async dot main', function (t) { + var start = new Date(); + t.plan(3); + resolve('./resolver/dot_main', function (err, ret) { + t.notOk(err); + t.equal(ret, path.join(__dirname, 'resolver/dot_main/index.js')); + t.ok(new Date() - start < 50, 'resolve.sync timedout'); + t.end(); + }); +}); + +test('async dot slash main', function (t) { + var start = new Date(); + t.plan(3); + resolve('./resolver/dot_slash_main', function (err, ret) { + t.notOk(err); + t.equal(ret, path.join(__dirname, 'resolver/dot_slash_main/index.js')); + t.ok(new Date() - start < 50, 'resolve.sync timedout'); + t.end(); + }); +}); + +test('not a directory', function (t) { + t.plan(6); + var path = './foo'; + resolve(path, { basedir: __filename }, function (err, res, pkg) { + t.ok(err, 'a non-directory errors'); + t.equal(arguments.length, 1); + t.equal(res, undefined); + t.equal(pkg, undefined); + + t.equal(err && err.message, 'Cannot find module \'' + path + '\' from \'' + __filename + '\''); + t.equal(err && err.code, 'MODULE_NOT_FOUND'); + }); +}); + +test('non-string "main" field in package.json', function (t) { + t.plan(5); + + var dir = path.join(__dirname, 'resolver'); + resolve('./invalid_main', { basedir: dir }, function (err, res, pkg) { + t.ok(err, 'errors on non-string main'); + t.equal(err.message, 'package “invalid main” `main` must be a string'); + t.equal(err.code, 'INVALID_PACKAGE_MAIN'); + t.equal(res, undefined, 'res is undefined'); + t.equal(pkg, undefined, 'pkg is undefined'); + }); +}); + +test('non-string "main" field in package.json', function (t) { + t.plan(5); + + var dir = path.join(__dirname, 'resolver'); + resolve('./invalid_main', { basedir: dir }, function (err, res, pkg) { + t.ok(err, 'errors on non-string main'); + t.equal(err.message, 'package “invalid main” `main` must be a string'); + t.equal(err.code, 'INVALID_PACKAGE_MAIN'); + t.equal(res, undefined, 'res is undefined'); + t.equal(pkg, undefined, 'pkg is undefined'); + }); +}); + +test('browser field in package.json', function (t) { + t.plan(3); + + var dir = path.join(__dirname, 'resolver'); + resolve( + './browser_field', + { + basedir: dir, + packageFilter: function packageFilter(pkg) { + if (pkg.browser) { + pkg.main = pkg.browser; + delete pkg.browser; + } + return pkg; + } + }, + function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'browser_field', 'b.js')); + t.equal(pkg && pkg.main, 'b'); + t.equal(pkg && pkg.browser, undefined); + } + ); +}); diff --git a/node_modules/readdir-scoped-modules/test/fixtures/@org/y/.keep b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/baz/doom.js similarity index 100% rename from node_modules/readdir-scoped-modules/test/fixtures/@org/y/.keep rename to node_modules/normalize-package-data/node_modules/resolve/test/resolver/baz/doom.js diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/baz/package.json b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/baz/package.json new file mode 100644 index 0000000000000..c41e4dbf73d99 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/baz/package.json @@ -0,0 +1,3 @@ +{ + "main": "quux.js" +} diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/baz/quux.js b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/baz/quux.js new file mode 100644 index 0000000000000..bd816eaba4ca3 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/baz/quux.js @@ -0,0 +1 @@ +module.exports = 1; diff --git a/node_modules/readdir-scoped-modules/test/fixtures/@scope/x/.keep b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/browser_field/a.js similarity index 100% rename from node_modules/readdir-scoped-modules/test/fixtures/@scope/x/.keep rename to node_modules/normalize-package-data/node_modules/resolve/test/resolver/browser_field/a.js diff --git a/node_modules/readdir-scoped-modules/test/fixtures/@scope/y/.keep b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/browser_field/b.js similarity index 100% rename from node_modules/readdir-scoped-modules/test/fixtures/@scope/y/.keep rename to node_modules/normalize-package-data/node_modules/resolve/test/resolver/browser_field/b.js diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/browser_field/package.json b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/browser_field/package.json new file mode 100644 index 0000000000000..bf406f0830f8a --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/browser_field/package.json @@ -0,0 +1,5 @@ +{ + "name": "browser_field", + "main": "a", + "browser": "b" +} diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/cup.coffee b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/cup.coffee new file mode 100644 index 0000000000000..8b137891791fe --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/cup.coffee @@ -0,0 +1 @@ + diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/dot_main/index.js b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/dot_main/index.js new file mode 100644 index 0000000000000..bd816eaba4ca3 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/dot_main/index.js @@ -0,0 +1 @@ +module.exports = 1; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/dot_main/package.json b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/dot_main/package.json new file mode 100644 index 0000000000000..d7f4fc8079f60 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/dot_main/package.json @@ -0,0 +1,3 @@ +{ + "main": "." +} diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/dot_slash_main/index.js b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/dot_slash_main/index.js new file mode 100644 index 0000000000000..bd816eaba4ca3 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/dot_slash_main/index.js @@ -0,0 +1 @@ +module.exports = 1; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/dot_slash_main/package.json b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/dot_slash_main/package.json new file mode 100644 index 0000000000000..f51287b9d1e73 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/dot_slash_main/package.json @@ -0,0 +1,3 @@ +{ + "main": "./" +} diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/foo.js b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/foo.js new file mode 100644 index 0000000000000..bd816eaba4ca3 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/foo.js @@ -0,0 +1 @@ +module.exports = 1; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/incorrect_main/index.js b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/incorrect_main/index.js new file mode 100644 index 0000000000000..bc1fb0a6f4ede --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/incorrect_main/index.js @@ -0,0 +1,2 @@ +// this is the actual main file 'index.js', not 'wrong.js' like the package.json would indicate +module.exports = 1; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/incorrect_main/package.json b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/incorrect_main/package.json new file mode 100644 index 0000000000000..b7188041763f8 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/incorrect_main/package.json @@ -0,0 +1,3 @@ +{ + "main": "wrong.js" +} diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/invalid_main/package.json b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/invalid_main/package.json new file mode 100644 index 0000000000000..0cf8279950c38 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/invalid_main/package.json @@ -0,0 +1,7 @@ +{ + "name": "invalid main", + "main": [ + "why is this a thing", + "srsly omg wtf" + ] +} diff --git a/node_modules/readdir-scoped-modules/test/fixtures/a/x/.keep b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/mug.coffee similarity index 100% rename from node_modules/readdir-scoped-modules/test/fixtures/a/x/.keep rename to node_modules/normalize-package-data/node_modules/resolve/test/resolver/mug.coffee diff --git a/node_modules/readdir-scoped-modules/test/fixtures/a/y/.keep b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/mug.js similarity index 100% rename from node_modules/readdir-scoped-modules/test/fixtures/a/y/.keep rename to node_modules/normalize-package-data/node_modules/resolve/test/resolver/mug.js diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/multirepo/lerna.json b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/multirepo/lerna.json new file mode 100644 index 0000000000000..d6707ca0cd64d --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/multirepo/lerna.json @@ -0,0 +1,6 @@ +{ + "packages": [ + "packages/*" + ], + "version": "0.0.0" +} diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/multirepo/package.json b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/multirepo/package.json new file mode 100644 index 0000000000000..8508f9d2c4a2d --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/multirepo/package.json @@ -0,0 +1,20 @@ +{ + "name": "monorepo-symlink-test", + "private": true, + "version": "0.0.0", + "description": "", + "main": "index.js", + "scripts": { + "postinstall": "lerna bootstrap", + "test": "node packages/package-a" + }, + "author": "", + "license": "MIT", + "dependencies": { + "jquery": "^3.3.1", + "resolve": "../../../" + }, + "devDependencies": { + "lerna": "^3.4.3" + } +} diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/multirepo/packages/package-a/index.js b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/multirepo/packages/package-a/index.js new file mode 100644 index 0000000000000..8875a32df0ffc --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/multirepo/packages/package-a/index.js @@ -0,0 +1,35 @@ +'use strict'; + +var assert = require('assert'); +var path = require('path'); +var resolve = require('resolve'); + +var basedir = __dirname + '/node_modules/@my-scope/package-b'; + +var expected = path.join(__dirname, '../../node_modules/jquery/dist/jquery.js'); + +/* + * preserveSymlinks === false + * will search NPM package from + * - packages/package-b/node_modules + * - packages/node_modules + * - node_modules + */ +assert.equal(resolve.sync('jquery', { basedir: basedir, preserveSymlinks: false }), expected); +assert.equal(resolve.sync('../../node_modules/jquery', { basedir: basedir, preserveSymlinks: false }), expected); + +/* + * preserveSymlinks === true + * will search NPM package from + * - packages/package-a/node_modules/@my-scope/packages/package-b/node_modules + * - packages/package-a/node_modules/@my-scope/packages/node_modules + * - packages/package-a/node_modules/@my-scope/node_modules + * - packages/package-a/node_modules/node_modules + * - packages/package-a/node_modules + * - packages/node_modules + * - node_modules + */ +assert.equal(resolve.sync('jquery', { basedir: basedir, preserveSymlinks: true }), expected); +assert.equal(resolve.sync('../../../../../node_modules/jquery', { basedir: basedir, preserveSymlinks: true }), expected); + +console.log(' * all monorepo paths successfully resolved through symlinks'); diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/multirepo/packages/package-a/package.json b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/multirepo/packages/package-a/package.json new file mode 100644 index 0000000000000..204de51e05878 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/multirepo/packages/package-a/package.json @@ -0,0 +1,14 @@ +{ + "name": "@my-scope/package-a", + "version": "0.0.0", + "private": true, + "description": "", + "license": "MIT", + "main": "index.js", + "scripts": { + "test": "echo \"Error: run tests from root\" && exit 1" + }, + "dependencies": { + "@my-scope/package-b": "^0.0.0" + } +} diff --git a/node_modules/readdir-scoped-modules/test/fixtures/b/x/.keep b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/multirepo/packages/package-b/index.js similarity index 100% rename from node_modules/readdir-scoped-modules/test/fixtures/b/x/.keep rename to node_modules/normalize-package-data/node_modules/resolve/test/resolver/multirepo/packages/package-b/index.js diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/multirepo/packages/package-b/package.json b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/multirepo/packages/package-b/package.json new file mode 100644 index 0000000000000..f57c3b5f5e454 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/multirepo/packages/package-b/package.json @@ -0,0 +1,14 @@ +{ + "name": "@my-scope/package-b", + "private": true, + "version": "0.0.0", + "description": "", + "license": "MIT", + "main": "index.js", + "scripts": { + "test": "echo \"Error: run tests from root\" && exit 1" + }, + "dependencies": { + "@my-scope/package-a": "^0.0.0" + } +} diff --git a/node_modules/readdir-scoped-modules/test/fixtures/b/y/.keep b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/other_path/lib/other-lib.js similarity index 100% rename from node_modules/readdir-scoped-modules/test/fixtures/b/y/.keep rename to node_modules/normalize-package-data/node_modules/resolve/test/resolver/other_path/lib/other-lib.js diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/other_path/root.js b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/other_path/root.js new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/quux/foo/index.js b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/quux/foo/index.js new file mode 100644 index 0000000000000..bd816eaba4ca3 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/quux/foo/index.js @@ -0,0 +1 @@ +module.exports = 1; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/same_names/foo.js b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/same_names/foo.js new file mode 100644 index 0000000000000..888cae37af95c --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/same_names/foo.js @@ -0,0 +1 @@ +module.exports = 42; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/same_names/foo/index.js b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/same_names/foo/index.js new file mode 100644 index 0000000000000..bd816eaba4ca3 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/same_names/foo/index.js @@ -0,0 +1 @@ +module.exports = 1; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/symlinked/_/node_modules/foo.js b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/symlinked/_/node_modules/foo.js new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/symlinked/_/symlink_target/.gitkeep b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/symlinked/_/symlink_target/.gitkeep new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver/without_basedir/main.js b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/without_basedir/main.js new file mode 100644 index 0000000000000..5b31975be69da --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver/without_basedir/main.js @@ -0,0 +1,5 @@ +var resolve = require('../../../'); + +module.exports = function (t, cb) { + resolve('mymodule', null, cb); +}; diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/resolver_sync.js b/node_modules/normalize-package-data/node_modules/resolve/test/resolver_sync.js new file mode 100644 index 0000000000000..f33143903c140 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/resolver_sync.js @@ -0,0 +1,340 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('foo', function (t) { + var dir = path.join(__dirname, 'resolver'); + + t.equal( + resolve.sync('./foo', { basedir: dir }), + path.join(dir, 'foo.js') + ); + + t.equal( + resolve.sync('./foo.js', { basedir: dir }), + path.join(dir, 'foo.js') + ); + + t.equal( + resolve.sync('./foo.js', { basedir: dir, filename: path.join(dir, 'bar.js') }), + path.join(dir, 'foo.js') + ); + + t.throws(function () { + resolve.sync('foo', { basedir: dir }); + }); + + // Test that filename is reported as the "from" value when passed. + t.throws( + function () { + resolve.sync('foo', { basedir: dir, filename: path.join(dir, 'bar.js') }); + }, + { + name: 'Error', + message: "Cannot find module 'foo' from '" + path.join(dir, 'bar.js') + "'" + } + ); + + t.end(); +}); + +test('bar', function (t) { + var dir = path.join(__dirname, 'resolver'); + + t.equal( + resolve.sync('foo', { basedir: path.join(dir, 'bar') }), + path.join(dir, 'bar/node_modules/foo/index.js') + ); + t.end(); +}); + +test('baz', function (t) { + var dir = path.join(__dirname, 'resolver'); + + t.equal( + resolve.sync('./baz', { basedir: dir }), + path.join(dir, 'baz/quux.js') + ); + t.end(); +}); + +test('biz', function (t) { + var dir = path.join(__dirname, 'resolver/biz/node_modules'); + t.equal( + resolve.sync('./grux', { basedir: dir }), + path.join(dir, 'grux/index.js') + ); + + t.equal( + resolve.sync('tiv', { basedir: path.join(dir, 'grux') }), + path.join(dir, 'tiv/index.js') + ); + + t.equal( + resolve.sync('grux', { basedir: path.join(dir, 'tiv') }), + path.join(dir, 'grux/index.js') + ); + t.end(); +}); + +test('normalize', function (t) { + var dir = path.join(__dirname, 'resolver/biz/node_modules/grux'); + t.equal( + resolve.sync('../grux', { basedir: dir }), + path.join(dir, 'index.js') + ); + t.end(); +}); + +test('cup', function (t) { + var dir = path.join(__dirname, 'resolver'); + t.equal( + resolve.sync('./cup', { + basedir: dir, + extensions: ['.js', '.coffee'] + }), + path.join(dir, 'cup.coffee') + ); + + t.equal( + resolve.sync('./cup.coffee', { basedir: dir }), + path.join(dir, 'cup.coffee') + ); + + t.throws(function () { + resolve.sync('./cup', { + basedir: dir, + extensions: ['.js'] + }); + }); + + t.end(); +}); + +test('mug', function (t) { + var dir = path.join(__dirname, 'resolver'); + t.equal( + resolve.sync('./mug', { basedir: dir }), + path.join(dir, 'mug.js') + ); + + t.equal( + resolve.sync('./mug', { + basedir: dir, + extensions: ['.coffee', '.js'] + }), + path.join(dir, 'mug.coffee') + ); + + t.equal( + resolve.sync('./mug', { + basedir: dir, + extensions: ['.js', '.coffee'] + }), + path.join(dir, 'mug.js') + ); + + t.end(); +}); + +test('other path', function (t) { + var resolverDir = path.join(__dirname, 'resolver'); + var dir = path.join(resolverDir, 'bar'); + var otherDir = path.join(resolverDir, 'other_path'); + + t.equal( + resolve.sync('root', { + basedir: dir, + paths: [otherDir] + }), + path.join(resolverDir, 'other_path/root.js') + ); + + t.equal( + resolve.sync('lib/other-lib', { + basedir: dir, + paths: [otherDir] + }), + path.join(resolverDir, 'other_path/lib/other-lib.js') + ); + + t.throws(function () { + resolve.sync('root', { basedir: dir }); + }); + + t.throws(function () { + resolve.sync('zzz', { + basedir: dir, + paths: [otherDir] + }); + }); + + t.end(); +}); + +test('incorrect main', function (t) { + var resolverDir = path.join(__dirname, 'resolver'); + var dir = path.join(resolverDir, 'incorrect_main'); + + t.equal( + resolve.sync('./incorrect_main', { basedir: resolverDir }), + path.join(dir, 'index.js') + ); + + t.end(); +}); + +test('#25: node modules with the same name as node stdlib modules', function (t) { + var resolverDir = path.join(__dirname, 'resolver/punycode'); + + t.equal( + resolve.sync('punycode', { basedir: resolverDir }), + path.join(resolverDir, 'node_modules/punycode/index.js') + ); + + t.end(); +}); + +var stubStatSync = function stubStatSync(fn) { + var fs = require('fs'); + var statSync = fs.statSync; + try { + fs.statSync = function () { + throw new EvalError('Unknown Error'); + }; + return fn(); + } finally { + fs.statSync = statSync; + } +}; + +test('#79 - re-throw non ENOENT errors from stat', function (t) { + var dir = path.join(__dirname, 'resolver'); + + stubStatSync(function () { + t.throws(function () { + resolve.sync('foo', { basedir: dir }); + }, /Unknown Error/); + }); + + t.end(); +}); + +test('#52 - incorrectly resolves module-paths like "./someFolder/" when there is a file of the same name', function (t) { + var dir = path.join(__dirname, 'resolver'); + + t.equal( + resolve.sync('./foo', { basedir: path.join(dir, 'same_names') }), + path.join(dir, 'same_names/foo.js') + ); + t.equal( + resolve.sync('./foo/', { basedir: path.join(dir, 'same_names') }), + path.join(dir, 'same_names/foo/index.js') + ); + t.end(); +}); + +test('sync: #121 - treating an existing file as a dir when no basedir', function (t) { + var testFile = path.basename(__filename); + + t.test('sanity check', function (st) { + st.equal( + resolve.sync('./' + testFile), + __filename, + 'sanity check' + ); + st.end(); + }); + + t.test('with a fake directory', function (st) { + function run() { return resolve.sync('./' + testFile + '/blah'); } + + st.throws(run, 'throws an error'); + + try { + run(); + } catch (e) { + st.equal(e.code, 'MODULE_NOT_FOUND', 'error code matches require.resolve'); + st.equal( + e.message, + 'Cannot find module \'./' + testFile + '/blah\' from \'' + __dirname + '\'', + 'can not find nonexistent module' + ); + } + + st.end(); + }); + + t.end(); +}); + +test('sync dot main', function (t) { + var start = new Date(); + t.equal(resolve.sync('./resolver/dot_main'), path.join(__dirname, 'resolver/dot_main/index.js')); + t.ok(new Date() - start < 50, 'resolve.sync timedout'); + t.end(); +}); + +test('sync dot slash main', function (t) { + var start = new Date(); + t.equal(resolve.sync('./resolver/dot_slash_main'), path.join(__dirname, 'resolver/dot_slash_main/index.js')); + t.ok(new Date() - start < 50, 'resolve.sync timedout'); + t.end(); +}); + +test('not a directory', function (t) { + var path = './foo'; + try { + resolve.sync(path, { basedir: __filename }); + t.fail(); + } catch (err) { + t.ok(err, 'a non-directory errors'); + t.equal(err && err.message, 'Cannot find module \'' + path + "' from '" + __filename + "'"); + t.equal(err && err.code, 'MODULE_NOT_FOUND'); + } + t.end(); +}); + +test('non-string "main" field in package.json', function (t) { + var dir = path.join(__dirname, 'resolver'); + try { + var result = resolve.sync('./invalid_main', { basedir: dir }); + t.equal(result, undefined, 'result should not exist'); + t.fail('should not get here'); + } catch (err) { + t.ok(err, 'errors on non-string main'); + t.equal(err.message, 'package “invalid main” `main` must be a string'); + t.equal(err.code, 'INVALID_PACKAGE_MAIN'); + } + t.end(); +}); + +test('non-string "main" field in package.json', function (t) { + var dir = path.join(__dirname, 'resolver'); + try { + var result = resolve.sync('./invalid_main', { basedir: dir }); + t.equal(result, undefined, 'result should not exist'); + t.fail('should not get here'); + } catch (err) { + t.ok(err, 'errors on non-string main'); + t.equal(err.message, 'package “invalid main” `main` must be a string'); + t.equal(err.code, 'INVALID_PACKAGE_MAIN'); + } + t.end(); +}); + +test('browser field in package.json', function (t) { + var dir = path.join(__dirname, 'resolver'); + var res = resolve.sync('./browser_field', { + basedir: dir, + packageFilter: function packageFilter(pkg) { + if (pkg.browser) { + pkg.main = pkg.browser; + delete pkg.browser; + } + return pkg; + } + }); + t.equal(res, path.join(dir, 'browser_field', 'b.js')); + t.end(); +}); diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/subdirs.js b/node_modules/normalize-package-data/node_modules/resolve/test/subdirs.js new file mode 100644 index 0000000000000..b7b8450a9ef23 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/subdirs.js @@ -0,0 +1,13 @@ +var test = require('tape'); +var resolve = require('../'); +var path = require('path'); + +test('subdirs', function (t) { + t.plan(2); + + var dir = path.join(__dirname, '/subdirs'); + resolve('a/b/c/x.json', { basedir: dir }, function (err, res) { + t.ifError(err); + t.equal(res, path.join(dir, 'node_modules/a/b/c/x.json')); + }); +}); diff --git a/node_modules/normalize-package-data/node_modules/resolve/test/symlinks.js b/node_modules/normalize-package-data/node_modules/resolve/test/symlinks.js new file mode 100644 index 0000000000000..ea95a1e1a20fb --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/resolve/test/symlinks.js @@ -0,0 +1,56 @@ +var path = require('path'); +var fs = require('fs'); +var test = require('tape'); +var resolve = require('../'); + +var symlinkDir = path.join(__dirname, 'resolver', 'symlinked', 'symlink'); +try { + fs.unlinkSync(symlinkDir); +} catch (err) {} +try { + fs.symlinkSync('./_/symlink_target', symlinkDir, 'dir'); +} catch (err) { + // if fails then it is probably on Windows and lets try to create a junction + fs.symlinkSync(path.join(__dirname, 'resolver', 'symlinked', '_', 'symlink_target') + '\\', symlinkDir, 'junction'); +} + +test('symlink', function (t) { + t.plan(2); + + resolve('foo', { basedir: symlinkDir, preserveSymlinks: false }, function (err, res, pkg) { + t.error(err); + t.equal(res, path.join(__dirname, 'resolver', 'symlinked', '_', 'node_modules', 'foo.js')); + }); +}); + +test('sync symlink when preserveSymlinks = true', function (t) { + t.plan(4); + + resolve('foo', { basedir: symlinkDir }, function (err, res, pkg) { + t.ok(err, 'there is an error'); + t.notOk(res, 'no result'); + + t.equal(err && err.code, 'MODULE_NOT_FOUND', 'error code matches require.resolve'); + t.equal( + err && err.message, + 'Cannot find module \'foo\' from \'' + symlinkDir + '\'', + 'can not find nonexistent module' + ); + }); +}); + +test('sync symlink', function (t) { + var start = new Date(); + t.doesNotThrow(function () { + t.equal(resolve.sync('foo', { basedir: symlinkDir, preserveSymlinks: false }), path.join(__dirname, 'resolver', 'symlinked', '_', 'node_modules', 'foo.js')); + }); + t.ok(new Date() - start < 50, 'resolve.sync timedout'); + t.end(); +}); + +test('sync symlink when preserveSymlinks = true', function (t) { + t.throws(function () { + resolve.sync('foo', { basedir: symlinkDir }); + }, /Cannot find module 'foo'/); + t.end(); +}); diff --git a/node_modules/normalize-package-data/package.json b/node_modules/normalize-package-data/package.json index 37151c79c9f83..a408d8c0ab602 100644 --- a/node_modules/normalize-package-data/package.json +++ b/node_modules/normalize-package-data/package.json @@ -1,36 +1,34 @@ { - "_args": [ - [ - "normalize-package-data@2.4.0", - "/Users/rebecca/code/npm" - ] - ], - "_from": "normalize-package-data@2.4.0", - "_id": "normalize-package-data@2.4.0", + "_from": "normalize-package-data@2.5.0", + "_id": "normalize-package-data@2.5.0", "_inBundle": false, - "_integrity": "sha512-9jjUFbTPfEy3R/ad/2oNbKtW9Hgovl5O1FvFWKkKblNXoN/Oou6+9+KKohPK13Yc3/TyunyWhJp6gvRNR/PPAw==", + "_integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", "_location": "/normalize-package-data", - "_phantomChildren": {}, + "_phantomChildren": { + "path-parse": "1.0.6" + }, "_requested": { "type": "version", "registry": true, - "raw": "normalize-package-data@2.4.0", + "raw": "normalize-package-data@2.5.0", "name": "normalize-package-data", "escapedName": "normalize-package-data", - "rawSpec": "2.4.0", + "rawSpec": "2.5.0", "saveSpec": null, - "fetchSpec": "2.4.0" + "fetchSpec": "2.5.0" }, "_requiredBy": [ + "#USER", "/", - "/npm-registry-client", + "/libnpmpublish", "/pacote", "/read-package-json", "/read-pkg" ], - "_resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.4.0.tgz", - "_spec": "2.4.0", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "_shasum": "e66db1838b200c1dfc233225d12cb36520e234a8", + "_spec": "normalize-package-data@2.5.0", + "_where": "/Users/aeschright/code/npm-release", "author": { "name": "Meryn Stol", "email": "merynstol@gmail.com" @@ -38,6 +36,7 @@ "bugs": { "url": "https://github.com/npm/normalize-package-data/issues" }, + "bundleDependencies": false, "contributors": [ { "name": "Isaac Z. Schlueter", @@ -54,14 +53,15 @@ ], "dependencies": { "hosted-git-info": "^2.1.4", - "is-builtin-module": "^1.0.0", + "resolve": "^1.10.0", "semver": "2 || 3 || 4 || 5", "validate-npm-package-license": "^3.0.1" }, + "deprecated": false, "description": "Normalizes data that can be found in package.json files.", "devDependencies": { - "async": "^1.5.0", - "tap": "^2.2.0", + "async": "^2.6.1", + "tap": "^12.4.0", "underscore": "^1.8.3" }, "files": [ @@ -80,5 +80,5 @@ "scripts": { "test": "tap test/*.js" }, - "version": "2.4.0" + "version": "2.5.0" } diff --git a/node_modules/npm-audit-report/CHANGELOG.md b/node_modules/npm-audit-report/CHANGELOG.md index 4cf6a1acda0a3..941a18741b600 100644 --- a/node_modules/npm-audit-report/CHANGELOG.md +++ b/node_modules/npm-audit-report/CHANGELOG.md @@ -2,6 +2,18 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [1.3.2](https://github.com/npm/npm-audit-report/compare/v1.3.1...v1.3.2) (2018-12-18) + + +### Bug Fixes + +* **parseable:** add support for critical vulns and more resolves on update/install action ([#28](https://github.com/npm/npm-audit-report/issues/28)) ([5e27893](https://github.com/npm/npm-audit-report/commit/5e27893)) +* **security:** audit fix ([ff9faf3](https://github.com/npm/npm-audit-report/commit/ff9faf3)) +* **urls:** Replace hardcoded URL to advisory with a URL from audit response ([#34](https://github.com/npm/npm-audit-report/issues/34)) ([e2fe95b](https://github.com/npm/npm-audit-report/commit/e2fe95b)) + + + ## [1.3.1](https://github.com/npm/npm-audit-report/compare/v1.3.0...v1.3.1) (2018-07-10) diff --git a/node_modules/npm-audit-report/package.json b/node_modules/npm-audit-report/package.json index 0f76601e27020..905c0ce33da3d 100644 --- a/node_modules/npm-audit-report/package.json +++ b/node_modules/npm-audit-report/package.json @@ -1,27 +1,27 @@ { - "_from": "npm-audit-report@^1.2.1", - "_id": "npm-audit-report@1.3.1", + "_from": "npm-audit-report@1.3.2", + "_id": "npm-audit-report@1.3.2", "_inBundle": false, - "_integrity": "sha512-SjTF8ZP4rOu3JiFrTMi4M1CmVo2tni2sP4TzhyCMHwnMGf6XkdGLZKt9cdZ12esKf0mbQqFyU9LtY0SoeahL7g==", + "_integrity": "sha512-abeqS5ONyXNaZJPGAf6TOUMNdSe1Y6cpc9MLBRn+CuUoYbfdca6AxOyXVlfIv9OgKX+cacblbG5w7A6ccwoTPw==", "_location": "/npm-audit-report", "_phantomChildren": {}, "_requested": { - "type": "range", + "type": "version", "registry": true, - "raw": "npm-audit-report@^1.2.1", + "raw": "npm-audit-report@1.3.2", "name": "npm-audit-report", "escapedName": "npm-audit-report", - "rawSpec": "^1.2.1", + "rawSpec": "1.3.2", "saveSpec": null, - "fetchSpec": "^1.2.1" + "fetchSpec": "1.3.2" }, "_requiredBy": [ "#USER", "/" ], - "_resolved": "https://registry.npmjs.org/npm-audit-report/-/npm-audit-report-1.3.1.tgz", - "_shasum": "e79ea1fcb5ffaf3031102b389d5222c2b0459632", - "_spec": "npm-audit-report@^1.2.1", + "_resolved": "https://registry.npmjs.org/npm-audit-report/-/npm-audit-report-1.3.2.tgz", + "_shasum": "303bc78cd9e4c226415076a4f7e528c89fc77018", + "_spec": "npm-audit-report@1.3.2", "_where": "/Users/zkat/Documents/code/work/npm", "author": { "name": "Adam Baldwin" @@ -76,5 +76,5 @@ "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "1.3.1" + "version": "1.3.2" } diff --git a/node_modules/npm-audit-report/reporters/detail.js b/node_modules/npm-audit-report/reporters/detail.js index 2cbb8fea50b25..f6e822eb7ae6f 100644 --- a/node_modules/npm-audit-report/reporters/detail.js +++ b/node_modules/npm-audit-report/reporters/detail.js @@ -117,7 +117,7 @@ const report = function (data, options) { {'Package': advisory.module_name}, {'Dependency of': `${resolution.path.split('>')[0]} ${resolution.dev ? '[dev]' : ''}`}, {'Path': `${resolution.path.split('>').join(Utils.color(' > ', 'grey', config.withColor))}`}, - {'More info': `https://nodesecurity.io/advisories/${advisory.id}`} + {'More info': advisory.url || `https://www.npmjs.com/advisories/${advisory.id}`} ) log(table.toString() + '\n\n') @@ -160,7 +160,7 @@ const report = function (data, options) { {'Patched in': patchedIn}, {'Dependency of': `${resolution.path.split('>')[0]} ${resolution.dev ? '[dev]' : ''}`}, {'Path': `${resolution.path.split('>').join(Utils.color(' > ', 'grey', config.withColor))}`}, - {'More info': `https://nodesecurity.io/advisories/${advisory.id}`} + {'More info': advisory.url || `https://www.npmjs.com/advisories/${advisory.id}`} ) log(table.toString()) }) diff --git a/node_modules/npm-audit-report/reporters/parseable.js b/node_modules/npm-audit-report/reporters/parseable.js index 363359772916c..1d46ef22716cd 100644 --- a/node_modules/npm-audit-report/reporters/parseable.js +++ b/node_modules/npm-audit-report/reporters/parseable.js @@ -11,6 +11,7 @@ const report = function (data, options) { const actions = function (data, config) { let accumulator = { + critical: '', high: '', moderate: '', low: '' @@ -25,16 +26,18 @@ const report = function (data, options) { l.recommendation = recommendation.cmd l.breaking = recommendation.isBreaking ? 'Y' : 'N' - // TODO: Verify: The advisory seems to repeat and be the same for all the 'resolves'. Is it true? - const advisory = data.advisories[action.resolves[0].id] - l.sevLevel = advisory.severity - l.severity = advisory.title - l.package = advisory.module_name - l.moreInfo = `https://nodesecurity.io/advisories/${advisory.id}` - l.path = action.resolves[0].path + action.resolves.forEach((resolution) => { + const advisory = data.advisories[resolution.id] + + l.sevLevel = advisory.severity + l.severity = advisory.title + l.package = advisory.module_name + l.moreInfo = advisory.url || `https://www.npmjs.com/advisories/${advisory.id}` + l.path = resolution.path - accumulator[advisory.severity] += [action.action, l.package, l.sevLevel, l.recommendation, l.severity, l.moreInfo, l.path, l.breaking] - .join('\t') + '\n' + accumulator[advisory.severity] += [action.action, l.package, l.sevLevel, l.recommendation, l.severity, l.moreInfo, l.path, l.breaking] + .join('\t') + '\n' + }) // forEach resolves } if (action.action === 'review') { @@ -44,7 +47,7 @@ const report = function (data, options) { l.sevLevel = advisory.severity l.severity = advisory.title l.package = advisory.module_name - l.moreInfo = `https://nodesecurity.io/advisories/${advisory.id}` + l.moreInfo = advisory.url || `https://www.npmjs.com/advisories/${advisory.id}` l.patchedIn = advisory.patched_versions.replace(' ', '') === '<0.0.0' ? 'No patch available' : advisory.patched_versions l.path = resolution.path @@ -53,7 +56,7 @@ const report = function (data, options) { } // is review }) // forEach actions } - return accumulator['high'] + accumulator['moderate'] + accumulator['low'] + return accumulator['critical'] + accumulator['high'] + accumulator['moderate'] + accumulator['low'] } const exitCode = function (metadata) { diff --git a/node_modules/npm-bundled/index.js b/node_modules/npm-bundled/index.js index c0c5f60cc0edf..197a1bcb99a15 100644 --- a/node_modules/npm-bundled/index.js +++ b/node_modules/npm-bundled/index.js @@ -10,6 +10,9 @@ const fs = require('fs') const path = require('path') const EE = require('events').EventEmitter +// we don't care about the package bins, but we share a pj cache +// with other modules that DO care about it, so keep it nice. +const normalizePackageBin = require('npm-normalize-package-bin') class BundleWalker extends EE { constructor (opt) { @@ -43,6 +46,18 @@ class BundleWalker extends EE { this.bundle = null } + addListener (ev, fn) { + return this.on(ev, fn) + } + + on (ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'done' && this.didDone) { + this.emit('done', this.result) + } + return ret + } + done () { if (!this.didDone) { this.didDone = true @@ -57,7 +72,7 @@ class BundleWalker extends EE { } start () { - const pj = this.path + '/package.json' + const pj = path.resolve(this.path, 'package.json') if (this.packageJsonCache.has(pj)) this.onPackage(this.packageJsonCache.get(pj)) else @@ -72,7 +87,7 @@ class BundleWalker extends EE { onPackageJson (pj, data) { try { - this.package = JSON.parse(data + '') + this.package = normalizePackageBin(JSON.parse(data + '')) } catch (er) { return this.done() } @@ -80,17 +95,22 @@ class BundleWalker extends EE { this.onPackage(this.package) } + allDepsBundled (pkg) { + return Object.keys(pkg.dependencies || {}).concat( + Object.keys(pkg.optionalDependencies || {})) + } + onPackage (pkg) { // all deps are bundled if we got here as a child. // otherwise, only bundle bundledDeps // Get a unique-ified array with a short-lived Set - const bdRaw = this.parent - ? Object.keys(pkg.dependencies || {}).concat( - Object.keys(pkg.optionalDependencies || {})) + const bdRaw = this.parent ? this.allDepsBundled(pkg) : pkg.bundleDependencies || pkg.bundledDependencies || [] const bd = Array.from(new Set( - Array.isArray(bdRaw) ? bdRaw : Object.keys(bdRaw))) + Array.isArray(bdRaw) ? bdRaw + : bdRaw === true ? this.allDepsBundled(pkg) + : Object.keys(bdRaw))) if (!bd.length) return this.done() diff --git a/node_modules/npm-bundled/package.json b/node_modules/npm-bundled/package.json index 4c46838ce476a..18c578a5cb72d 100644 --- a/node_modules/npm-bundled/package.json +++ b/node_modules/npm-bundled/package.json @@ -1,8 +1,8 @@ { "_from": "npm-bundled@^1.0.1", - "_id": "npm-bundled@1.0.5", + "_id": "npm-bundled@1.1.1", "_inBundle": false, - "_integrity": "sha512-m/e6jgWu8/v5niCUKQi9qQl8QdeEduFA96xHDDzFGqly0OOjI7c+60KM/2sppfnUU9JJagf+zs+yGhqSOFj71g==", + "_integrity": "sha512-gqkfgGePhTpAEgUsGEgcq1rqPXA+tv/aVBlgEzfXwA1yiUJF7xtEt3CtVwOjNYQOVknDk0F20w58Fnm3EtG0fA==", "_location": "/npm-bundled", "_phantomChildren": {}, "_requested": { @@ -18,10 +18,10 @@ "_requiredBy": [ "/npm-packlist" ], - "_resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.0.5.tgz", - "_shasum": "3c1732b7ba936b3a10325aef616467c0ccbcc979", + "_resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.1.1.tgz", + "_shasum": "1edd570865a94cdb1bc8220775e29466c9fb234b", "_spec": "npm-bundled@^1.0.1", - "_where": "/Users/zkat/Documents/code/work/npm/node_modules/npm-packlist", + "_where": "/Users/isaacs/dev/npm/cli/node_modules/npm-packlist", "author": { "name": "Isaac Z. Schlueter", "email": "i@izs.me", @@ -31,6 +31,9 @@ "url": "https://github.com/npm/npm-bundled/issues" }, "bundleDependencies": false, + "dependencies": { + "npm-normalize-package-bin": "^1.0.1" + }, "deprecated": false, "description": "list things in node_modules that are bundledDependencies, or transitive dependencies thereof", "devDependencies": { @@ -56,5 +59,5 @@ "preversion": "npm test", "test": "tap test/*.js -J --100" }, - "version": "1.0.5" + "version": "1.1.1" } diff --git a/node_modules/npm-install-checks/index.js b/node_modules/npm-install-checks/index.js index 9ea7b875e4234..a533c9d36063e 100644 --- a/node_modules/npm-install-checks/index.js +++ b/node_modules/npm-install-checks/index.js @@ -7,9 +7,10 @@ exports.checkEngine = checkEngine function checkEngine (target, npmVer, nodeVer, force, strict, cb) { var nodev = force ? null : nodeVer var eng = target.engines + var opt = { includePrerelease: true } if (!eng) return cb() - if (nodev && eng.node && !semver.satisfies(nodev, eng.node) || - eng.npm && !semver.satisfies(npmVer, eng.npm)) { + if (nodev && eng.node && !semver.satisfies(nodev, eng.node, opt) || + eng.npm && !semver.satisfies(npmVer, eng.npm, opt)) { var er = new Error(util.format('Unsupported engine for %s: wanted: %j (current: %j)', target._id, eng, {node: nodev, npm: npmVer})) er.code = 'ENOTSUP' diff --git a/node_modules/npm-install-checks/package.json b/node_modules/npm-install-checks/package.json index e186c6a5ad960..dc409c761cc54 100644 --- a/node_modules/npm-install-checks/package.json +++ b/node_modules/npm-install-checks/package.json @@ -1,32 +1,28 @@ { - "_args": [ - [ - "npm-install-checks@3.0.0", - "/Users/rebecca/code/npm" - ] - ], - "_from": "npm-install-checks@3.0.0", - "_id": "npm-install-checks@3.0.0", + "_from": "npm-install-checks@3.0.2", + "_id": "npm-install-checks@3.0.2", "_inBundle": false, - "_integrity": "sha1-1K7N/VGlPjcjt7L5Oy7ijjB7wNc=", + "_integrity": "sha512-E4kzkyZDIWoin6uT5howP8VDvkM+E8IQDcHAycaAxMbwkqhIg5eEYALnXOl3Hq9MrkdQB/2/g1xwBINXdKSRkg==", "_location": "/npm-install-checks", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "npm-install-checks@3.0.0", + "raw": "npm-install-checks@3.0.2", "name": "npm-install-checks", "escapedName": "npm-install-checks", - "rawSpec": "3.0.0", + "rawSpec": "3.0.2", "saveSpec": null, - "fetchSpec": "3.0.0" + "fetchSpec": "3.0.2" }, "_requiredBy": [ + "#USER", "/" ], - "_resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-3.0.0.tgz", - "_spec": "3.0.0", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-3.0.2.tgz", + "_shasum": "ab2e32ad27baa46720706908e5b14c1852de44d9", + "_spec": "npm-install-checks@3.0.2", + "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Robert Kowalski", "email": "rok@kowalski.gd" @@ -34,16 +30,21 @@ "bugs": { "url": "https://github.com/npm/npm-install-checks/issues" }, + "bundleDependencies": false, "dependencies": { "semver": "^2.3.0 || 3.x || 4 || 5" }, + "deprecated": false, "description": "checks that npm runs during the installation of a module", "devDependencies": { - "mkdirp": "~0.3.5", - "rimraf": "~2.2.5", + "mkdirp": "^0.5.1", + "rimraf": "^2.6.3", "standard": "^5.4.1", - "tap": "^5.0.1" + "tap": "^12.4.0" }, + "files": [ + "index.js" + ], "homepage": "https://github.com/npm/npm-install-checks", "keywords": [ "npm,", @@ -59,5 +60,5 @@ "scripts": { "test": "standard && tap --coverage test/*.js" }, - "version": "3.0.0" + "version": "3.0.2" } diff --git a/node_modules/npm-install-checks/test/check-engine.js b/node_modules/npm-install-checks/test/check-engine.js deleted file mode 100644 index a8fa0390a1347..0000000000000 --- a/node_modules/npm-install-checks/test/check-engine.js +++ /dev/null @@ -1,62 +0,0 @@ -var test = require('tap').test -var c = require('../index.js').checkEngine - -test('no engine defined', function (t) { - c({ engines: {} }, '1.1.2', '0.2.1', false, true, function (err) { - t.notOk(err, 'no error present') - t.end() - }) -}) - -test('node version too old', function (t) { - var target = { engines: { node: '0.10.24' } } - c(target, '1.1.2', '0.10.18', false, true, function (err) { - t.ok(err, 'returns an error') - t.equals(err.required.node, '0.10.24') - t.end() - }) -}) - -test('npm version too old', function (t) { - var target = { engines: { npm: '^1.4.6' } } - c(target, '1.3.2', '0.2.1', false, true, function (err) { - t.ok(err, 'returns an error') - t.equals(err.required.npm, '^1.4.6') - t.end() - }) -}) - -test('strict=false w/engineStrict json does not return an error', function (t) { - var target = { engines: { npm: '1.3.6' }, engineStrict: true } - c(target, '1.4.2', '0.2.1', false, false, function (err, warn) { - t.notOk(err, 'returns no error') - t.ok(warn, 'returns warning object') - t.equals(warn.required.npm, '1.3.6') - t.end() - }) -}) - -test('force node version too old', function (t) { - var target = { _id: 'test@1.0.0', engines: { node: '0.1.0' } } - c(target, '1.3.2', '0.2.1', true, true, function (err, warn) { - t.is(err, undefined, 'returns no error') - t.notOk(warn, 'returns no warning') - t.end() - }) -}) - -test('force npm version too old', function (t) { - var target = { _id: 'test@1.0.0', engines: { npm: '^1.4.6' } } - c(target, '1.3.2', '0.2.1', true, true, function (err, warn) { - t.ok(err, "can't force an npm version mismatch") - t.end() - }) -}) - -test('no engine', function (t) { - c({}, '1.3.2', '0.2.1', false, true, function (err, warn) { - t.notOk(err, 'returns no error') - t.notOk(warn, 'returns no warning') - t.end() - }) -}) diff --git a/node_modules/npm-install-checks/test/check-git.js b/node_modules/npm-install-checks/test/check-git.js deleted file mode 100644 index 0fadd0631b73d..0000000000000 --- a/node_modules/npm-install-checks/test/check-git.js +++ /dev/null @@ -1,37 +0,0 @@ -var test = require('tap').test -var c = require('../index.js').checkGit -var rimraf = require('rimraf') -var mkdirp = require('mkdirp') -var path = require('path') -var gitFixturePath = path.resolve(__dirname, 'out') - -test('is .git repo', function (t) { - mkdirp(gitFixturePath + '/.git', function () { - c(gitFixturePath, function (err) { - t.ok(err, 'error present') - t.equal(err.code, 'EISGIT') - t.end() - }) - }) -}) - -test('is not a .git repo', function (t) { - c(__dirname, function (err) { - t.notOk(err, 'error not present') - t.end() - }) -}) - -test('non-thing', function (t) { - c('/path/to/no/where', function (err) { - t.notOk(err, 'non-existent path is not a .git repo') - t.end() - }) -}) - -test('cleanup', function (t) { - rimraf(gitFixturePath, function () { - t.pass('cleanup') - t.end() - }) -}) diff --git a/node_modules/npm-install-checks/test/check-platform.js b/node_modules/npm-install-checks/test/check-platform.js deleted file mode 100644 index 23dbfba4a9b79..0000000000000 --- a/node_modules/npm-install-checks/test/check-platform.js +++ /dev/null @@ -1,104 +0,0 @@ -var test = require('tap').test -var c = require('../index.js').checkPlatform - -test('target cpu wrong', function (t) { - var target = {} - target.cpu = 'enten-cpu' - target.os = 'any' - c(target, false, function (err) { - t.ok(err, 'error present') - t.equal(err.code, 'EBADPLATFORM') - t.end() - }) -}) - -test('os wrong', function (t) { - var target = {} - target.cpu = 'any' - target.os = 'enten-os' - c(target, false, function (err) { - t.ok(err, 'error present') - t.equal(err.code, 'EBADPLATFORM') - t.end() - }) -}) - -test('nothing wrong', function (t) { - var target = {} - target.cpu = 'any' - target.os = 'any' - c(target, false, function (err) { - t.notOk(err, 'no error present') - t.end() - }) -}) - -test('force', function (t) { - var target = {} - target.cpu = 'enten-cpu' - target.os = 'any' - c(target, true, function (err) { - t.notOk(err, 'no error present') - t.end() - }) -}) - -test('no opinions', function (t) { - var target = {} - c(target, false, function (err) { - t.notOk(err, 'no error present') - t.end() - }) -}) - -test('only target cpu wrong', function (t) { - var target = {} - target.cpu = 'enten-cpu' - c(target, false, function (err) { - t.ok(err, 'error present') - t.equal(err.code, 'EBADPLATFORM') - t.end() - }) -}) - -test('only os wrong', function (t) { - var target = {} - target.os = 'enten-os' - c(target, false, function (err) { - t.ok(err, 'error present') - t.equal(err.code, 'EBADPLATFORM') - t.end() - }) -}) - -test('everything wrong w/arrays', function (t) { - var target = {} - target.cpu = ['enten-cpu'] - target.os = ['enten-os'] - c(target, false, function (err) { - t.ok(err, 'error present') - t.equal(err.code, 'EBADPLATFORM') - t.end() - }) -}) - -test('os wrong (negation)', function (t) { - var target = {} - target.cpu = 'any' - target.os = '!' + process.platform - c(target, false, function (err) { - t.ok(err, 'error present') - t.equal(err.code, 'EBADPLATFORM') - t.end() - }) -}) - -test('nothing wrong (negation)', function (t) { - var target = {} - target.cpu = '!enten-cpu' - target.os = '!enten-os' - c(target, false, function (err) { - t.notOk(err, 'no error present') - t.end() - }) -}) diff --git a/node_modules/npm-lifecycle/CHANGELOG.md b/node_modules/npm-lifecycle/CHANGELOG.md index 7e7bc92e2922e..ccdd777ee1975 100644 --- a/node_modules/npm-lifecycle/CHANGELOG.md +++ b/node_modules/npm-lifecycle/CHANGELOG.md @@ -2,6 +2,82 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [3.1.4](https://github.com/npm/lifecycle/compare/v3.1.3...v3.1.4) (2019-09-18) + + +### Bug Fixes + +* filter functions and undefined out of makeEnv ([10c0c08](https://github.com/npm/lifecycle/commit/10c0c08)) + + + + +## [3.1.3](https://github.com/npm/lifecycle/compare/v3.1.2...v3.1.3) (2019-08-12) + + +### Bug Fixes + +* fail properly if uid-number raises an error ([e0e1b62](https://github.com/npm/lifecycle/commit/e0e1b62)) + + + + +## [3.1.2](https://github.com/npm/lifecycle/compare/v3.1.1...v3.1.2) (2019-07-22) + + +### Bug Fixes + +* do not exclude /path/ from process.env copying ([53e6318](https://github.com/npm/lifecycle/commit/53e6318)) + + + + +# [3.1.0](https://github.com/npm/lifecycle/compare/v3.0.0...v3.1.0) (2019-07-17) + + +### Bug Fixes + +* remove procInterrupt listener on SIGINT in procError ([ea18ed2](https://github.com/npm/lifecycle/commit/ea18ed2)), closes [#36](https://github.com/npm/lifecycle/issues/36) [#11](https://github.com/npm/lifecycle/issues/11) [#18](https://github.com/npm/lifecycle/issues/18) +* set only one PATH env variable for child proc ([3aaf954](https://github.com/npm/lifecycle/commit/3aaf954)), closes [#22](https://github.com/npm/lifecycle/issues/22) [#25](https://github.com/npm/lifecycle/issues/25) + + +### Features + +* **process.env.path:** Use platform specific path casing if present ([5523951](https://github.com/npm/lifecycle/commit/5523951)), closes [#29](https://github.com/npm/lifecycle/issues/29) [#30](https://github.com/npm/lifecycle/issues/30) + + + + +# [3.0.0](https://github.com/npm/lifecycle/compare/v2.1.1...v3.0.0) (2019-07-10) + + +* node-gyp@5.0.2 ([3c5aae6](https://github.com/npm/lifecycle/commit/3c5aae6)) + + +### BREAKING CHANGES + +* requires modifying the version of node-gyp in npm cli. + +Required for https://github.com/npm/cli/pull/208 +Fix: https://github.com/npm/npm-lifecycle/issues/37 +Close: https://github.com/npm/npm-lifecycle/issues/38 +PR-URL: https://github.com/npm/npm-lifecycle/pull/38 +Credit: @irega +Reviewed-by: @isaacs + + + + +## [2.1.1](https://github.com/npm/lifecycle/compare/v2.1.0...v2.1.1) (2019-05-08) + + +### Bug Fixes + +* **test:** update postinstall script for fixture ([220cd70](https://github.com/npm/lifecycle/commit/220cd70)) + + + # [2.1.0](https://github.com/npm/lifecycle/compare/v2.0.3...v2.1.0) (2018-08-13) diff --git a/node_modules/npm-lifecycle/index.js b/node_modules/npm-lifecycle/index.js index 4eb83255a94a2..337de71416879 100644 --- a/node_modules/npm-lifecycle/index.js +++ b/node_modules/npm-lifecycle/index.js @@ -4,6 +4,9 @@ exports = module.exports = lifecycle exports.makeEnv = makeEnv exports._incorrectWorkingDirectory = _incorrectWorkingDirectory +// for testing +const platform = process.env.__TESTING_FAKE_PLATFORM__ || process.platform +const isWindows = platform === 'win32' const spawn = require('./lib/spawn') const path = require('path') const Stream = require('stream').Stream @@ -18,17 +21,28 @@ const resolveFrom = require('resolve-from') const DEFAULT_NODE_GYP_PATH = resolveFrom(__dirname, 'node-gyp/bin/node-gyp') const hookStatCache = new Map() -let PATH = 'PATH' - -// windows calls it's path 'Path' usually, but this is not guaranteed. -if (process.platform === 'win32') { - PATH = 'Path' - Object.keys(process.env).forEach(function (e) { - if (e.match(/^PATH$/i)) { - PATH = e - } - }) +let PATH = isWindows ? 'Path' : 'PATH' +exports._pathEnvName = PATH +const delimiter = path.delimiter + +// windows calls its path 'Path' usually, but this is not guaranteed. +// merge them all together in the order they appear in the object. +const mergePath = env => + Object.keys(env).filter(p => /^path$/i.test(p) && env[p]) + .map(p => env[p].split(delimiter)) + .reduce((set, p) => set.concat(p.filter(p => !set.includes(p))), []) + .join(delimiter) +exports._mergePath = mergePath + +const setPathEnv = (env, path) => { + // first ensure that the canonical value is set. + env[PATH] = path + // also set any other case values, because windows. + Object.keys(env) + .filter(p => p !== PATH && /^path$/i.test(p)) + .forEach(p => { env[p] = path }) } +exports._setPathEnv = setPathEnv function logid (pkg, stage) { return pkg._id + '~' + stage + ':' @@ -120,8 +134,10 @@ function lifecycle_ (pkg, stage, wd, opts, env, cb) { pathArr.push(path.dirname(process.execPath)) } - if (env[PATH]) pathArr.push(env[PATH]) - env[PATH] = pathArr.join(process.platform === 'win32' ? ';' : ':') + const existingPath = mergePath(env) + if (existingPath) pathArr.push(existingPath) + const envPath = pathArr.join(isWindows ? ';' : ':') + setPathEnv(env, envPath) var packageLifecycle = pkg.scripts && pkg.scripts.hasOwnProperty(stage) @@ -164,10 +180,9 @@ function shouldPrependCurrentNodeDirToPATH (opts) { var isDifferentNodeInPath - var isWindows = process.platform === 'win32' var foundExecPath try { - foundExecPath = which.sync(path.basename(process.execPath), {pathExt: isWindows ? ';' : ':'}) + foundExecPath = which.sync(path.basename(process.execPath), { pathExt: isWindows ? ';' : ':' }) // Apply `fs.realpath()` here to avoid false positives when `node` is a symlinked executable. isDifferentNodeInPath = fs.realpathSync(process.execPath).toUpperCase() !== fs.realpathSync(foundExecPath).toUpperCase() @@ -242,7 +257,7 @@ function runCmd (note, cmd, pkg, env, stage, wd, opts, cb) { } opts.log.verbose('lifecycle', logid(pkg, stage), 'unsafe-perm in lifecycle', unsafe) - if (process.platform === 'win32') { + if (isWindows) { unsafe = true } @@ -250,19 +265,19 @@ function runCmd (note, cmd, pkg, env, stage, wd, opts, cb) { runCmd_(cmd, pkg, env, wd, opts, stage, unsafe, 0, 0, cb) } else { uidNumber(user, group, function (er, uid, gid) { + if (er) { + er.code = 'EUIDLOOKUP' + opts.log.resume() + process.nextTick(dequeue) + return cb(er) + } runCmd_(cmd, pkg, env, wd, opts, stage, unsafe, uid, gid, cb) }) } } -function runCmd_ (cmd, pkg, env, wd, opts, stage, unsafe, uid, gid, cb_) { - function cb (er) { - cb_.apply(null, arguments) - opts.log.resume() - process.nextTick(dequeue) - } - - var conf = { +const getSpawnArgs = ({ cmd, wd, opts, uid, gid, unsafe, env }) => { + const conf = { cwd: wd, env: env, stdio: opts.stdio || [ 0, 1, 2 ] @@ -273,24 +288,40 @@ function runCmd_ (cmd, pkg, env, wd, opts, stage, unsafe, uid, gid, cb_) { conf.gid = gid ^ 0 } - var sh = 'sh' - var shFlag = '-c' - - var customShell = opts.scriptShell + const customShell = opts.scriptShell + let sh = 'sh' + let shFlag = '-c' if (customShell) { sh = customShell - } else if (process.platform === 'win32') { + } else if (isWindows || opts._TESTING_FAKE_WINDOWS_) { sh = process.env.comspec || 'cmd' - shFlag = '/d /s /c' - conf.windowsVerbatimArguments = true + // '/d /s /c' is used only for cmd.exe. + if (/^(?:.*\\)?cmd(?:\.exe)?$/i.test(sh)) { + shFlag = '/d /s /c' + conf.windowsVerbatimArguments = true + } + } + + return [sh, [shFlag, cmd], conf] +} + +exports._getSpawnArgs = getSpawnArgs + +function runCmd_ (cmd, pkg, env, wd, opts, stage, unsafe, uid, gid, cb_) { + function cb (er) { + cb_.apply(null, arguments) + opts.log.resume() + process.nextTick(dequeue) } + const [sh, args, conf] = getSpawnArgs({ cmd, wd, opts, uid, gid, unsafe, env }) + opts.log.verbose('lifecycle', logid(pkg, stage), 'PATH:', env[PATH]) opts.log.verbose('lifecycle', logid(pkg, stage), 'CWD:', wd) - opts.log.silly('lifecycle', logid(pkg, stage), 'Args:', [shFlag, cmd]) + opts.log.silly('lifecycle', logid(pkg, stage), 'Args:', args) - var proc = spawn(sh, [shFlag, cmd], conf, opts.log) + var proc = spawn(sh, args, conf, opts.log) proc.on('error', procError) proc.on('close', function (code, signal) { @@ -333,6 +364,7 @@ function runCmd_ (cmd, pkg, env, wd, opts, stage, unsafe, uid, gid, cb_) { process.removeListener('SIGTERM', procKill) process.removeListener('SIGTERM', procInterupt) process.removeListener('SIGINT', procKill) + process.removeListener('SIGINT', procInterupt) return cb(er) } function procKill () { @@ -426,12 +458,17 @@ function makeEnv (data, opts, prefix, env) { return } var value = opts.config[i] - if (value instanceof Stream || Array.isArray(value)) return + if (value instanceof Stream || Array.isArray(value) || typeof value === 'function') return if (i.match(/umask/)) value = umask.toString(value) + if (!value) value = '' else if (typeof value === 'number') value = '' + value else if (typeof value !== 'string') value = JSON.stringify(value) + if (typeof value !== 'string') { + return + } + value = value.indexOf('\n') !== -1 ? JSON.stringify(value) : value diff --git a/node_modules/npm-lifecycle/package.json b/node_modules/npm-lifecycle/package.json index 6f63b06fd51c4..beeb598957898 100644 --- a/node_modules/npm-lifecycle/package.json +++ b/node_modules/npm-lifecycle/package.json @@ -1,29 +1,30 @@ { - "_from": "npm-lifecycle@2.1.0", - "_id": "npm-lifecycle@2.1.0", + "_from": "npm-lifecycle@3.1.4", + "_id": "npm-lifecycle@3.1.4", "_inBundle": false, - "_integrity": "sha512-QbBfLlGBKsktwBZLj6AviHC6Q9Y3R/AY4a2PYSIRhSKSS0/CxRyD/PfxEX6tPeOCXQgMSNdwGeECacstgptc+g==", + "_integrity": "sha512-tgs1PaucZwkxECGKhC/stbEgFyc3TGh2TJcg2CDr6jbvQRdteHNhmMeljRzpe4wgFAXQADoy1cSqqi7mtiAa5A==", "_location": "/npm-lifecycle", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "npm-lifecycle@2.1.0", + "raw": "npm-lifecycle@3.1.4", "name": "npm-lifecycle", "escapedName": "npm-lifecycle", - "rawSpec": "2.1.0", + "rawSpec": "3.1.4", "saveSpec": null, - "fetchSpec": "2.1.0" + "fetchSpec": "3.1.4" }, "_requiredBy": [ "#USER", "/", - "/libcipm" + "/libcipm", + "/libnpm" ], - "_resolved": "https://registry.npmjs.org/npm-lifecycle/-/npm-lifecycle-2.1.0.tgz", - "_shasum": "1eda2eedb82db929e3a0c50341ab0aad140ed569", - "_spec": "npm-lifecycle@2.1.0", - "_where": "/Users/zkat/Documents/code/work/npm", + "_resolved": "https://registry.npmjs.org/npm-lifecycle/-/npm-lifecycle-3.1.4.tgz", + "_shasum": "de6975c7d8df65f5150db110b57cce498b0b604c", + "_spec": "npm-lifecycle@3.1.4", + "_where": "/Users/mperrotte/npminc/cli", "author": { "name": "Mike Sherov" }, @@ -33,8 +34,8 @@ "bundleDependencies": false, "dependencies": { "byline": "^5.0.0", - "graceful-fs": "^4.1.11", - "node-gyp": "^3.8.0", + "graceful-fs": "^4.1.15", + "node-gyp": "^5.0.2", "resolve-from": "^4.0.0", "slide": "^1.1.6", "uid-number": "0.0.6", @@ -44,11 +45,11 @@ "deprecated": false, "description": "JavaScript package lifecycle hook runner", "devDependencies": { - "nyc": "^12.0.2", - "sinon": "^6.1.5", - "standard": "^11.0.1", + "nyc": "^14.1.0", + "sinon": "^7.2.3", + "standard": "^12.0.1", "standard-version": "^4.4.0", - "tap": "^12.0.1", + "tap": "^12.7.0", "weallbehave": "^1.2.0", "weallcontribute": "^1.0.8" }, @@ -76,9 +77,10 @@ "prerelease": "npm t", "pretest": "standard", "release": "standard-version -s", - "test": "tap -J --coverage test/*.js", + "snap": "TAP_SNAPSHOT=1 npm test", + "test": "tap -J --cov test/*.js", "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "2.1.0" + "version": "3.1.4" } diff --git a/node_modules/npm-normalize-package-bin/.github/settings.yml b/node_modules/npm-normalize-package-bin/.github/settings.yml new file mode 100644 index 0000000000000..4aaa0dd57e4ad --- /dev/null +++ b/node_modules/npm-normalize-package-bin/.github/settings.yml @@ -0,0 +1,2 @@ +--- +_extends: 'open-source-project-boilerplate' diff --git a/node_modules/npm-normalize-package-bin/.npmignore b/node_modules/npm-normalize-package-bin/.npmignore new file mode 100644 index 0000000000000..3870bd5bb7207 --- /dev/null +++ b/node_modules/npm-normalize-package-bin/.npmignore @@ -0,0 +1,24 @@ +# ignore most things, include some others +/* +/.* + +!bin/ +!lib/ +!docs/ +!package.json +!package-lock.json +!README.md +!CONTRIBUTING.md +!LICENSE +!CHANGELOG.md +!example/ +!scripts/ +!tap-snapshots/ +!test/ +!.github/ +!.travis.yml +!.gitignore +!.gitattributes +!coverage-map.js +!map.js +!index.js diff --git a/node_modules/npm-normalize-package-bin/LICENSE b/node_modules/npm-normalize-package-bin/LICENSE new file mode 100644 index 0000000000000..19cec97b18468 --- /dev/null +++ b/node_modules/npm-normalize-package-bin/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-normalize-package-bin/README.md b/node_modules/npm-normalize-package-bin/README.md new file mode 100644 index 0000000000000..65ba316a0d97e --- /dev/null +++ b/node_modules/npm-normalize-package-bin/README.md @@ -0,0 +1,14 @@ +# npm-normalize-package-bin + +Turn any flavor of allowable package.json bin into a normalized object. + +## API + +```js +const normalize = require('npm-normalize-package-bin') +const pkg = {name: 'foo', bin: 'bar'} +console.log(normalize(pkg)) // {name:'foo', bin:{foo: 'bar'}} +``` + +Also strips out weird dots and slashes to prevent accidental and/or +malicious bad behavior when the package is installed. diff --git a/node_modules/npm-normalize-package-bin/index.js b/node_modules/npm-normalize-package-bin/index.js new file mode 100644 index 0000000000000..5a738ff82e1e3 --- /dev/null +++ b/node_modules/npm-normalize-package-bin/index.js @@ -0,0 +1,60 @@ +// pass in a manifest with a 'bin' field here, and it'll turn it +// into a properly santized bin object +const {join, basename} = require('path') + +const normalize = pkg => + !pkg.bin ? removeBin(pkg) + : typeof pkg.bin === 'string' ? normalizeString(pkg) + : Array.isArray(pkg.bin) ? normalizeArray(pkg) + : typeof pkg.bin === 'object' ? normalizeObject(pkg) + : removeBin(pkg) + +const normalizeString = pkg => { + if (!pkg.name) + return removeBin(pkg) + pkg.bin = { [pkg.name]: pkg.bin } + return normalizeObject(pkg) +} + +const normalizeArray = pkg => { + pkg.bin = pkg.bin.reduce((acc, k) => { + acc[basename(k)] = k + return acc + }, {}) + return normalizeObject(pkg) +} + +const removeBin = pkg => { + delete pkg.bin + return pkg +} + +const normalizeObject = pkg => { + const orig = pkg.bin + const clean = {} + let hasBins = false + Object.keys(orig).forEach(binKey => { + const base = join('/', basename(binKey.replace(/\\|:/g, '/'))).substr(1) + + if (typeof orig[binKey] !== 'string' || !base) + return + + const binTarget = join('/', orig[binKey]) + .replace(/\\/g, '/').substr(1) + + if (!binTarget) + return + + clean[base] = binTarget + hasBins = true + }) + + if (hasBins) + pkg.bin = clean + else + delete pkg.bin + + return pkg +} + +module.exports = normalize diff --git a/node_modules/npm-normalize-package-bin/package-lock.json b/node_modules/npm-normalize-package-bin/package-lock.json new file mode 100644 index 0000000000000..0d3390d4eee63 --- /dev/null +++ b/node_modules/npm-normalize-package-bin/package-lock.json @@ -0,0 +1,3529 @@ +{ + "name": "npm-normalize-package-bin", + "version": "1.0.1", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "@babel/code-frame": { + "version": "7.5.5", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.5.5.tgz", + "integrity": "sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw==", + "dev": true, + "requires": { + "@babel/highlight": "^7.0.0" + } + }, + "@babel/generator": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.7.4.tgz", + "integrity": "sha512-m5qo2WgdOJeyYngKImbkyQrnUN1mPceaG5BV+G0E3gWsa4l/jCSryWJdM2x8OuGAOyh+3d5pVYfZWCiNFtynxg==", + "dev": true, + "requires": { + "@babel/types": "^7.7.4", + "jsesc": "^2.5.1", + "lodash": "^4.17.13", + "source-map": "^0.5.0" + }, + "dependencies": { + "source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true + } + } + }, + "@babel/helper-function-name": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.7.4.tgz", + "integrity": "sha512-AnkGIdiBhEuiwdoMnKm7jfPfqItZhgRaZfMg1XX3bS25INOnLPjPG1Ppnajh8eqgt5kPJnfqrRHqFqmjKDZLzQ==", + "dev": true, + "requires": { + "@babel/helper-get-function-arity": "^7.7.4", + "@babel/template": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-get-function-arity": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.7.4.tgz", + "integrity": "sha512-QTGKEdCkjgzgfJ3bAyRwF4yyT3pg+vDgan8DSivq1eS0gwi+KGKE5x8kRcbeFTb/673mkO5SN1IZfmCfA5o+EA==", + "dev": true, + "requires": { + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-split-export-declaration": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.7.4.tgz", + "integrity": "sha512-guAg1SXFcVr04Guk9eq0S4/rWS++sbmyqosJzVs8+1fH5NI+ZcmkaSkc7dmtAFbHFva6yRJnjW3yAcGxjueDug==", + "dev": true, + "requires": { + "@babel/types": "^7.7.4" + } + }, + "@babel/highlight": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.5.0.tgz", + "integrity": "sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ==", + "dev": true, + "requires": { + "chalk": "^2.0.0", + "esutils": "^2.0.2", + "js-tokens": "^4.0.0" + } + }, + "@babel/parser": { + "version": "7.7.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.7.5.tgz", + "integrity": "sha512-KNlOe9+/nk4i29g0VXgl8PEXIRms5xKLJeuZ6UptN0fHv+jDiriG+y94X6qAgWTR0h3KaoM1wK5G5h7MHFRSig==", + "dev": true + }, + "@babel/runtime": { + "version": "7.7.6", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.7.6.tgz", + "integrity": "sha512-BWAJxpNVa0QlE5gZdWjSxXtemZyZ9RmrmVozxt3NUXeZhVIJ5ANyqmMc0JDrivBZyxUuQvFxlvH4OWWOogGfUw==", + "dev": true, + "requires": { + "regenerator-runtime": "^0.13.2" + } + }, + "@babel/template": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.7.4.tgz", + "integrity": "sha512-qUzihgVPguAzXCK7WXw8pqs6cEwi54s3E+HrejlkuWO6ivMKx9hZl3Y2fSXp9i5HgyWmj7RKP+ulaYnKM4yYxw==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "@babel/parser": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/traverse": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.7.4.tgz", + "integrity": "sha512-P1L58hQyupn8+ezVA2z5KBm4/Zr4lCC8dwKCMYzsa5jFMDMQAzaBNy9W5VjB+KAmBjb40U7a/H6ao+Xo+9saIw==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.5.5", + "@babel/generator": "^7.7.4", + "@babel/helper-function-name": "^7.7.4", + "@babel/helper-split-export-declaration": "^7.7.4", + "@babel/parser": "^7.7.4", + "@babel/types": "^7.7.4", + "debug": "^4.1.0", + "globals": "^11.1.0", + "lodash": "^4.17.13" + } + }, + "@babel/types": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.7.4.tgz", + "integrity": "sha512-cz5Ji23KCi4T+YIE/BolWosrJuSmoZeN1EFnRtBwF+KKLi8GG/Z2c2hOJJeCXPk4mwk4QFvTmwIodJowXgttRA==", + "dev": true, + "requires": { + "esutils": "^2.0.2", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + }, + "ajv": { + "version": "6.10.2", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.2.tgz", + "integrity": "sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw==", + "dev": true, + "requires": { + "fast-deep-equal": "^2.0.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "anymatch": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz", + "integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==", + "dev": true, + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, + "append-transform": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-1.0.0.tgz", + "integrity": "sha512-P009oYkeHyU742iSZJzZZywj4QRJdnTWffaKuJQLablCZ1uz6/cW4yaRgcDaoQ+uwOxxnt0gRUcwfsNP2ri0gw==", + "dev": true, + "requires": { + "default-require-extensions": "^2.0.0" + } + }, + "archy": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz", + "integrity": "sha1-+cjBN1fMHde8N5rHeyxipcKGjEA=", + "dev": true + }, + "arg": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.2.tgz", + "integrity": "sha512-+ytCkGcBtHZ3V2r2Z06AncYO8jz46UEamcspGoU8lHcEbpn6J77QK0vdWvChsclg/tM5XIJC5tnjmPp7Eq6Obg==", + "dev": true + }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "requires": { + "sprintf-js": "~1.0.2" + } + }, + "asn1": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", + "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", + "dev": true, + "requires": { + "safer-buffer": "~2.1.0" + } + }, + "assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "dev": true + }, + "async-hook-domain": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/async-hook-domain/-/async-hook-domain-1.1.3.tgz", + "integrity": "sha512-ZovMxSbADV3+biB7oR1GL5lGyptI24alp0LWHlmz1OFc5oL47pz3EiIF6nXOkDW7yLqih4NtsiYduzdDW0i+Wg==", + "dev": true, + "requires": { + "source-map-support": "^0.5.11" + } + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", + "dev": true + }, + "aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", + "dev": true + }, + "aws4": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.9.0.tgz", + "integrity": "sha512-Uvq6hVe90D0B2WEnUqtdgY1bATGz3mw33nH9Y+dmA+w5DHvUmBgkr5rM/KCHpCsiFNRUfokW/szpPPgMK2hm4A==", + "dev": true + }, + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "dev": true, + "requires": { + "tweetnacl": "^0.14.3" + } + }, + "binary-extensions": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.0.0.tgz", + "integrity": "sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow==", + "dev": true + }, + "bind-obj-methods": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/bind-obj-methods/-/bind-obj-methods-2.0.0.tgz", + "integrity": "sha512-3/qRXczDi2Cdbz6jE+W3IflJOutRVica8frpBn14de1mBOkzDo+6tY33kNhvkw54Kn3PzRRD2VnGbGPcTAk4sw==", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "browser-process-hrtime": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz", + "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==", + "dev": true + }, + "buffer-from": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", + "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==", + "dev": true + }, + "caching-transform": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-3.0.2.tgz", + "integrity": "sha512-Mtgcv3lh3U0zRii/6qVgQODdPA4G3zhG+jtbCWj39RXuUFTMzH0vcdMtaJS1jPowd+It2Pqr6y3NJMQqOqCE2w==", + "dev": true, + "requires": { + "hasha": "^3.0.0", + "make-dir": "^2.0.0", + "package-hash": "^3.0.0", + "write-file-atomic": "^2.4.2" + }, + "dependencies": { + "write-file-atomic": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", + "integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.11", + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.2" + } + } + } + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + }, + "caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", + "dev": true + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "chokidar": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.3.0.tgz", + "integrity": "sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A==", + "dev": true, + "requires": { + "anymatch": "~3.1.1", + "braces": "~3.0.2", + "fsevents": "~2.1.1", + "glob-parent": "~5.1.0", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.2.0" + } + }, + "cliui": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-4.1.0.tgz", + "integrity": "sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ==", + "dev": true, + "requires": { + "string-width": "^2.1.1", + "strip-ansi": "^4.0.0", + "wrap-ansi": "^2.0.0" + } + }, + "code-point-at": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", + "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", + "dev": true + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "color-support": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", + "dev": true + }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true, + "optional": true + }, + "commondir": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", + "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "convert-source-map": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.7.0.tgz", + "integrity": "sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.1" + }, + "dependencies": { + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + } + } + }, + "core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", + "dev": true + }, + "coveralls": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/coveralls/-/coveralls-3.0.9.tgz", + "integrity": "sha512-nNBg3B1+4iDox5A5zqHKzUTiwl2ey4k2o0NEcVZYvl+GOSJdKBj4AJGKLv6h3SvWch7tABHePAQOSZWM9E2hMg==", + "dev": true, + "requires": { + "js-yaml": "^3.13.1", + "lcov-parse": "^1.0.0", + "log-driver": "^1.2.7", + "minimist": "^1.2.0", + "request": "^2.88.0" + } + }, + "cp-file": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/cp-file/-/cp-file-6.2.0.tgz", + "integrity": "sha512-fmvV4caBnofhPe8kOcitBwSn2f39QLjnAnGq3gO9dfd75mUytzKNZB1hde6QHunW2Rt+OwuBOMc3i1tNElbszA==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "make-dir": "^2.0.0", + "nested-error-stacks": "^2.0.0", + "pify": "^4.0.1", + "safe-buffer": "^5.0.1" + } + }, + "cross-spawn": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-4.0.2.tgz", + "integrity": "sha1-e5JHYhwjrf3ThWAEqCPL45dCTUE=", + "dev": true, + "requires": { + "lru-cache": "^4.0.1", + "which": "^1.2.9" + }, + "dependencies": { + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + } + } + }, + "dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "dev": true, + "requires": { + "assert-plus": "^1.0.0" + } + }, + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true + }, + "default-require-extensions": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-2.0.0.tgz", + "integrity": "sha1-9fj7sYp9bVCyH2QfZJ67Uiz+JPc=", + "dev": true, + "requires": { + "strip-bom": "^3.0.0" + } + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "dev": true + }, + "diff": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.1.tgz", + "integrity": "sha512-s2+XdvhPCOF01LRQBC8hf4vhbVmI2CGS5aZnxLJlT5FtdhPCDFq80q++zK2KlrVorVDdL5BOGZ/VfLrVtYNF+Q==", + "dev": true + }, + "diff-frag": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/diff-frag/-/diff-frag-1.0.1.tgz", + "integrity": "sha512-6/v2PC/6UTGcWPPetb9acL8foberUg/CtPdALeJUdD1B/weHNvzftoo00gYznqHGRhHEbykUGzqfG9RWOSr5yw==", + "dev": true + }, + "ecc-jsbn": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", + "dev": true, + "requires": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "requires": { + "is-arrayish": "^0.2.1" + } + }, + "es6-error": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", + "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==", + "dev": true + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true + }, + "esm": { + "version": "3.2.25", + "resolved": "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz", + "integrity": "sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==", + "dev": true + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true + }, + "esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true + }, + "events-to-array": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/events-to-array/-/events-to-array-1.1.2.tgz", + "integrity": "sha1-LUH1Y+H+QA7Uli/hpNXGp1Od9/Y=", + "dev": true + }, + "extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "dev": true + }, + "extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", + "dev": true + }, + "fast-deep-equal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", + "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=", + "dev": true + }, + "fast-json-stable-stringify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", + "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=", + "dev": true + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "find-cache-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", + "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "dev": true, + "requires": { + "commondir": "^1.0.1", + "make-dir": "^2.0.0", + "pkg-dir": "^3.0.0" + } + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "findit": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/findit/-/findit-2.0.0.tgz", + "integrity": "sha1-ZQnwEmr0wXhVHPqZOU4DLhOk1W4=", + "dev": true + }, + "flow-parser": { + "version": "0.113.0", + "resolved": "https://registry.npmjs.org/flow-parser/-/flow-parser-0.113.0.tgz", + "integrity": "sha512-+hRyEB1sVLNMTMniDdM1JIS8BJ3HUL7IFIJaxX+t/JUy0GNYdI0Tg1QLx8DJmOF8HeoCrUDcREpnDAc/pPta3w==", + "dev": true + }, + "flow-remove-types": { + "version": "2.113.0", + "resolved": "https://registry.npmjs.org/flow-remove-types/-/flow-remove-types-2.113.0.tgz", + "integrity": "sha512-Rp4hN/JlGmUjNxXuBXr6Or+MgDH9xKc+ZiUSRzl/fbpiH9RaCPAQKsgVEYNPcIE26q6RpAuMQfvzR0jQfuwUZQ==", + "dev": true, + "requires": { + "flow-parser": "^0.113.0", + "pirates": "^3.0.2", + "vlq": "^0.2.1" + } + }, + "foreground-child": { + "version": "1.5.6", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-1.5.6.tgz", + "integrity": "sha1-T9ca0t/elnibmApcCilZN8svXOk=", + "dev": true, + "requires": { + "cross-spawn": "^4", + "signal-exit": "^3.0.0" + } + }, + "forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", + "dev": true + }, + "form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dev": true, + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, + "fs-exists-cached": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-exists-cached/-/fs-exists-cached-1.0.0.tgz", + "integrity": "sha1-zyVVTKBQ3EmuZla0HeQiWJidy84=", + "dev": true + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "fsevents": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.2.tgz", + "integrity": "sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA==", + "dev": true, + "optional": true + }, + "function-loop": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/function-loop/-/function-loop-1.0.2.tgz", + "integrity": "sha512-Iw4MzMfS3udk/rqxTiDDCllhGwlOrsr50zViTOO/W6lS/9y6B1J0BD2VZzrnWUYBJsl3aeqjgR5v7bWWhZSYbA==", + "dev": true + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "dev": true, + "requires": { + "assert-plus": "^1.0.0" + } + }, + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "glob-parent": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.0.tgz", + "integrity": "sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, + "globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true + }, + "graceful-fs": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", + "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==", + "dev": true + }, + "handlebars": { + "version": "4.5.3", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.5.3.tgz", + "integrity": "sha512-3yPecJoJHK/4c6aZhSvxOyG4vJKDshV36VHp0iVCDVh7o9w2vwi3NSnL2MMPj3YdduqaBcu7cGbggJQM0br9xA==", + "dev": true, + "requires": { + "neo-async": "^2.6.0", + "optimist": "^0.6.1", + "source-map": "^0.6.1", + "uglify-js": "^3.1.4" + } + }, + "har-schema": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", + "dev": true + }, + "har-validator": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz", + "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==", + "dev": true, + "requires": { + "ajv": "^6.5.5", + "har-schema": "^2.0.0" + } + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true + }, + "hasha": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hasha/-/hasha-3.0.0.tgz", + "integrity": "sha1-UqMvq4Vp1BymmmH/GiFPjrfIvTk=", + "dev": true, + "requires": { + "is-stream": "^1.0.1" + } + }, + "hosted-git-info": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.5.tgz", + "integrity": "sha512-kssjab8CvdXfcXMXVcvsXum4Hwdq9XGtRD3TteMEvEbq0LXyiNQr6AprqKqfeaDXze7SxWvRxdpwE6ku7ikLkg==", + "dev": true + }, + "http-signature": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "dev": true, + "requires": { + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" + } + }, + "imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true + }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "requires": { + "binary-extensions": "^2.0.0" + } + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "is-glob": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", + "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "is-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", + "dev": true + }, + "is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", + "dev": true + }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", + "dev": true, + "optional": true + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, + "isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", + "dev": true + }, + "istanbul-lib-coverage": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.5.tgz", + "integrity": "sha512-8aXznuEPCJvGnMSRft4udDRDtb1V3pkQkMMI5LI+6HuQz5oQ4J2UFn1H82raA3qJtyOLkkwVqICBQkjnGtn5mA==", + "dev": true + }, + "istanbul-lib-hook": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-2.0.7.tgz", + "integrity": "sha512-vrRztU9VRRFDyC+aklfLoeXyNdTfga2EI3udDGn4cZ6fpSXpHLV9X6CHvfoMCPtggg8zvDDmC4b9xfu0z6/llA==", + "dev": true, + "requires": { + "append-transform": "^1.0.0" + } + }, + "istanbul-lib-instrument": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-3.3.0.tgz", + "integrity": "sha512-5nnIN4vo5xQZHdXno/YDXJ0G+I3dAm4XgzfSVTPLQpj/zAV2dV6Juy0yaf10/zrJOJeHoN3fraFe+XRq2bFVZA==", + "dev": true, + "requires": { + "@babel/generator": "^7.4.0", + "@babel/parser": "^7.4.3", + "@babel/template": "^7.4.0", + "@babel/traverse": "^7.4.3", + "@babel/types": "^7.4.0", + "istanbul-lib-coverage": "^2.0.5", + "semver": "^6.0.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + } + } + }, + "istanbul-lib-processinfo": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-1.0.0.tgz", + "integrity": "sha512-FY0cPmWa4WoQNlvB8VOcafiRoB5nB+l2Pz2xGuXHRSy1KM8QFOYfz/rN+bGMCAeejrY3mrpF5oJHcN0s/garCg==", + "dev": true, + "requires": { + "archy": "^1.0.0", + "cross-spawn": "^6.0.5", + "istanbul-lib-coverage": "^2.0.3", + "rimraf": "^2.6.3", + "uuid": "^3.3.2" + }, + "dependencies": { + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dev": true, + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + } + } + }, + "istanbul-lib-report": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-2.0.8.tgz", + "integrity": "sha512-fHBeG573EIihhAblwgxrSenp0Dby6tJMFR/HvlerBsrCTD5bkUuoNtn3gVh29ZCS824cGGBPn7Sg7cNk+2xUsQ==", + "dev": true, + "requires": { + "istanbul-lib-coverage": "^2.0.5", + "make-dir": "^2.1.0", + "supports-color": "^6.1.0" + }, + "dependencies": { + "supports-color": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", + "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + } + } + }, + "istanbul-lib-source-maps": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-3.0.6.tgz", + "integrity": "sha512-R47KzMtDJH6X4/YW9XTx+jrLnZnscW4VpNN+1PViSYTejLVPWv7oov+Duf8YQSPyVRUvueQqz1TcsC6mooZTXw==", + "dev": true, + "requires": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^2.0.5", + "make-dir": "^2.1.0", + "rimraf": "^2.6.3", + "source-map": "^0.6.1" + } + }, + "istanbul-reports": { + "version": "2.2.6", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-2.2.6.tgz", + "integrity": "sha512-SKi4rnMyLBKe0Jy2uUdx28h8oG7ph2PPuQPvIAh31d+Ci+lSiEu4C+h3oBPuJ9+mPKhOyW0M8gY4U5NM1WLeXA==", + "dev": true, + "requires": { + "handlebars": "^4.1.2" + } + }, + "jackspeak": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-1.4.0.tgz", + "integrity": "sha512-VDcSunT+wcccoG46FtzuBAyQKlzhHjli4q31e1fIHGOsRspqNUFjVzGb+7eIFDlTvqLygxapDHPHS0ouT2o/tw==", + "dev": true, + "requires": { + "cliui": "^4.1.0" + } + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "js-yaml": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, + "jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", + "dev": true + }, + "jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "dev": true + }, + "json-parse-better-errors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", + "dev": true + }, + "json-schema": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", + "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=", + "dev": true + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", + "dev": true + }, + "jsprim": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", + "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "dev": true, + "requires": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.2.3", + "verror": "1.10.0" + } + }, + "lcov-parse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/lcov-parse/-/lcov-parse-1.0.0.tgz", + "integrity": "sha1-6w1GtUER68VhrLTECO+TY73I9+A=", + "dev": true + }, + "load-json-file": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", + "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^4.0.0", + "pify": "^3.0.0", + "strip-bom": "^3.0.0" + }, + "dependencies": { + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", + "dev": true + } + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==", + "dev": true + }, + "lodash.flattendeep": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz", + "integrity": "sha1-+wMJF/hqMTTlvJvsDWngAT3f7bI=", + "dev": true + }, + "log-driver": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/log-driver/-/log-driver-1.2.7.tgz", + "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg==", + "dev": true + }, + "loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "dev": true, + "requires": { + "js-tokens": "^3.0.0 || ^4.0.0" + } + }, + "lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "dev": true, + "requires": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + } + }, + "make-error": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.5.tgz", + "integrity": "sha512-c3sIjNUow0+8swNwVpqoH4YCShKNFkMaw6oH1mNS2haDZQqkeZFlHS3dhoeEbKKmJB4vXpJucU6oH75aDYeE9g==", + "dev": true + }, + "merge-source-map": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/merge-source-map/-/merge-source-map-1.1.0.tgz", + "integrity": "sha512-Qkcp7P2ygktpMPh2mCQZaf3jhN6D3Z/qVZHSdWvQ+2Ef5HgRAPBO57A77+ENm0CPx2+1Ce/MYKi3ymqdfuqibw==", + "dev": true, + "requires": { + "source-map": "^0.6.1" + } + }, + "mime-db": { + "version": "1.42.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.42.0.tgz", + "integrity": "sha512-UbfJCR4UAVRNgMpfImz05smAXK7+c+ZntjaA26ANtkXLlOe947Aag5zdIcKQULAiF9Cq4WxBi9jUs5zkA84bYQ==", + "dev": true + }, + "mime-types": { + "version": "2.1.25", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.25.tgz", + "integrity": "sha512-5KhStqB5xpTAeGqKBAMgwaYMnQik7teQN4IAzC7npDv6kzeU6prfkR67bc87J1kWMPGkoaZSq1npmexMgkmEVg==", + "dev": true, + "requires": { + "mime-db": "1.42.0" + } + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=", + "dev": true + }, + "minipass": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.1.tgz", + "integrity": "sha512-UFqVihv6PQgwj8/yTGvl9kPz7xIAY+R5z6XYjRInD3Gk3qx6QGSD6zEcpeG4Dy/lQnv1J6zv8ejV90hyYIKf3w==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + }, + "dependencies": { + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + } + } + }, + "mkdirp": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "dev": true, + "requires": { + "minimist": "0.0.8" + }, + "dependencies": { + "minimist": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", + "dev": true + } + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "neo-async": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.1.tgz", + "integrity": "sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw==", + "dev": true + }, + "nested-error-stacks": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/nested-error-stacks/-/nested-error-stacks-2.1.0.tgz", + "integrity": "sha512-AO81vsIO1k1sM4Zrd6Hu7regmJN1NSiAja10gc4bX3F0wd+9rQmcuHQaHVQCYIEC8iFXnE+mavh23GOt7wBgug==", + "dev": true + }, + "nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", + "dev": true + }, + "node-modules-regexp": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz", + "integrity": "sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA=", + "dev": true + }, + "normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "requires": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, + "number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", + "dev": true + }, + "nyc": { + "version": "14.1.1", + "resolved": "https://registry.npmjs.org/nyc/-/nyc-14.1.1.tgz", + "integrity": "sha512-OI0vm6ZGUnoGZv/tLdZ2esSVzDwUC88SNs+6JoSOMVxA+gKMB8Tk7jBwgemLx4O40lhhvZCVw1C+OYLOBOPXWw==", + "dev": true, + "requires": { + "archy": "^1.0.0", + "caching-transform": "^3.0.2", + "convert-source-map": "^1.6.0", + "cp-file": "^6.2.0", + "find-cache-dir": "^2.1.0", + "find-up": "^3.0.0", + "foreground-child": "^1.5.6", + "glob": "^7.1.3", + "istanbul-lib-coverage": "^2.0.5", + "istanbul-lib-hook": "^2.0.7", + "istanbul-lib-instrument": "^3.3.0", + "istanbul-lib-report": "^2.0.8", + "istanbul-lib-source-maps": "^3.0.6", + "istanbul-reports": "^2.2.4", + "js-yaml": "^3.13.1", + "make-dir": "^2.1.0", + "merge-source-map": "^1.1.0", + "resolve-from": "^4.0.0", + "rimraf": "^2.6.3", + "signal-exit": "^3.0.2", + "spawn-wrap": "^1.4.2", + "test-exclude": "^5.2.3", + "uuid": "^3.3.2", + "yargs": "^13.2.2", + "yargs-parser": "^13.0.0" + } + }, + "oauth-sign": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", + "dev": true + }, + "object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", + "dev": true + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "opener": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/opener/-/opener-1.5.1.tgz", + "integrity": "sha512-goYSy5c2UXE4Ra1xixabeVh1guIX/ZV/YokJksb6q2lubWu6UbvPQ20p542/sFIll1nl8JnCyK9oBaOcCWXwvA==", + "dev": true + }, + "optimist": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", + "dev": true, + "requires": { + "minimist": "~0.0.1", + "wordwrap": "~0.0.2" + }, + "dependencies": { + "minimist": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", + "integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=", + "dev": true + } + } + }, + "os-homedir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=", + "dev": true + }, + "own-or": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/own-or/-/own-or-1.0.0.tgz", + "integrity": "sha1-Tod/vtqaLsgAD7wLyuOWRe6L+Nw=", + "dev": true + }, + "own-or-env": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/own-or-env/-/own-or-env-1.0.1.tgz", + "integrity": "sha512-y8qULRbRAlL6x2+M0vIe7jJbJx/kmUTzYonRAa2ayesR2qWLswninkVyeJe4x3IEXhdgoNodzjQRKAoEs6Fmrw==", + "dev": true, + "requires": { + "own-or": "^1.0.0" + } + }, + "p-limit": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.1.tgz", + "integrity": "sha512-85Tk+90UCVWvbDavCLKPOLC9vvY8OwEX/RtKF+/1OADJMVlFfEHOiMTPVyxg7mk/dKa+ipdHm0OUkTvCpMTuwg==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "package-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-3.0.0.tgz", + "integrity": "sha512-lOtmukMDVvtkL84rJHI7dpTYq+0rli8N2wlnqUcBuDWCfVhRUfOmnR9SsoHFMLpACvEV60dX7rd0rFaYDZI+FA==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.15", + "hasha": "^3.0.0", + "lodash.flattendeep": "^4.4.0", + "release-zalgo": "^1.0.0" + } + }, + "parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", + "dev": true, + "requires": { + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", + "dev": true + }, + "path-parse": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", + "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", + "dev": true + }, + "path-type": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", + "dev": true, + "requires": { + "pify": "^3.0.0" + }, + "dependencies": { + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", + "dev": true + } + } + }, + "performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", + "dev": true + }, + "picomatch": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.1.1.tgz", + "integrity": "sha512-OYMyqkKzK7blWO/+XZYP6w8hH0LDvkBvdvKukti+7kqYFCiEAk+gI3DWnryapc0Dau05ugGTy0foQ6mqn4AHYA==", + "dev": true + }, + "pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true + }, + "pirates": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-3.0.2.tgz", + "integrity": "sha512-c5CgUJq6H2k6MJz72Ak1F5sN9n9wlSlJyEnwvpm9/y3WB4E3pHBDT2c6PEiS1vyJvq2bUxUAIu0EGf8Cx4Ic7Q==", + "dev": true, + "requires": { + "node-modules-regexp": "^1.0.0" + } + }, + "pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "dev": true, + "requires": { + "find-up": "^3.0.0" + } + }, + "process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true, + "optional": true + }, + "prop-types": { + "version": "15.7.2", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", + "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", + "dev": true, + "requires": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.8.1" + } + }, + "pseudomap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", + "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=", + "dev": true + }, + "psl": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.6.0.tgz", + "integrity": "sha512-SYKKmVel98NCOYXpkwUqZqh0ahZeeKfmisiLIcEZdsb+WbLv02g/dI5BUmZnIyOe7RzZtLax81nnb2HbvC2tzA==", + "dev": true + }, + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true + }, + "qs": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==", + "dev": true + }, + "react": { + "version": "16.12.0", + "resolved": "https://registry.npmjs.org/react/-/react-16.12.0.tgz", + "integrity": "sha512-fglqy3k5E+81pA8s+7K0/T3DBCF0ZDOher1elBFzF7O6arXJgzyu/FW+COxFvAWXJoJN9KIZbT2LXlukwphYTA==", + "dev": true, + "requires": { + "loose-envify": "^1.1.0", + "object-assign": "^4.1.1", + "prop-types": "^15.6.2" + } + }, + "react-is": { + "version": "16.12.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.12.0.tgz", + "integrity": "sha512-rPCkf/mWBtKc97aLL9/txD8DZdemK0vkA3JMLShjlJB3Pj3s+lpf1KaBzMfQrAmhMQB0n1cU/SUGgKKBCe837Q==", + "dev": true + }, + "read-pkg": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", + "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", + "dev": true, + "requires": { + "load-json-file": "^4.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^3.0.0" + } + }, + "read-pkg-up": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-4.0.0.tgz", + "integrity": "sha512-6etQSH7nJGsK0RbG/2TeDzZFa8shjQ1um+SwQQ5cwKy0dhSXdOncEhb1CPpvQG4h7FyOV6EB6YlV0yJvZQNAkA==", + "dev": true, + "requires": { + "find-up": "^3.0.0", + "read-pkg": "^3.0.0" + } + }, + "readable-stream": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "dev": true, + "optional": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "dependencies": { + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true, + "optional": true + } + } + }, + "readdirp": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.2.0.tgz", + "integrity": "sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ==", + "dev": true, + "requires": { + "picomatch": "^2.0.4" + } + }, + "regenerator-runtime": { + "version": "0.13.3", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz", + "integrity": "sha512-naKIZz2GQ8JWh///G7L3X6LaQUAMp2lvb1rvwwsURe/VXwD6VMfr+/1NuNw3ag8v2kY1aQ/go5SNn79O9JU7yw==", + "dev": true + }, + "release-zalgo": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz", + "integrity": "sha1-CXALflB0Mpc5Mw5TXFqQ+2eFFzA=", + "dev": true, + "requires": { + "es6-error": "^4.0.1" + } + }, + "request": { + "version": "2.88.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", + "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", + "dev": true, + "requires": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.0", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.4.3", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true + }, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "resolve": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.13.1.tgz", + "integrity": "sha512-CxqObCX8K8YtAhOBRg+lrcdn+LK+WYOS8tSjqSFbjtrI5PnS63QPhZl4+yKfrU9tdsbMu9Anr/amegT87M9Z6w==", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } + }, + "resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true + }, + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + }, + "safe-buffer": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz", + "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==", + "dev": true + }, + "safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", + "dev": true + }, + "shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "dev": true, + "requires": { + "shebang-regex": "^1.0.0" + } + }, + "shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", + "dev": true + }, + "signal-exit": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", + "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, + "source-map-support": { + "version": "0.5.16", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.16.tgz", + "integrity": "sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ==", + "dev": true, + "requires": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "spawn-wrap": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-1.4.3.tgz", + "integrity": "sha512-IgB8md0QW/+tWqcavuFgKYR/qIRvJkRLPJDFaoXtLLUaVcCDK0+HeFTkmQHj3eprcYhc+gOl0aEA1w7qZlYezw==", + "dev": true, + "requires": { + "foreground-child": "^1.5.6", + "mkdirp": "^0.5.0", + "os-homedir": "^1.0.1", + "rimraf": "^2.6.2", + "signal-exit": "^3.0.2", + "which": "^1.3.0" + }, + "dependencies": { + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + } + } + }, + "spdx-correct": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", + "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", + "dev": true, + "requires": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-exceptions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", + "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==", + "dev": true + }, + "spdx-expression-parse": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", + "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", + "dev": true, + "requires": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-license-ids": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz", + "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==", + "dev": true + }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + }, + "sshpk": { + "version": "1.16.1", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", + "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", + "dev": true, + "requires": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + } + }, + "stack-utils": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-1.0.2.tgz", + "integrity": "sha512-MTX+MeG5U994cazkjd/9KNAapsHnibjMLnfXodlkXw76JEea0UiNzrqidzo1emMwk7w5Qhc9jd4Bn9TBb1MFwA==", + "dev": true + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dev": true, + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "optional": true, + "requires": { + "safe-buffer": "~5.1.0" + }, + "dependencies": { + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true, + "optional": true + } + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } + }, + "strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", + "dev": true + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + }, + "tap": { + "version": "14.10.2", + "resolved": "https://registry.npmjs.org/tap/-/tap-14.10.2.tgz", + "integrity": "sha512-JeUDsVrMFmR6b3p9hO9yIT/jibrK6LI7nFza5cqDGsxJyCp7yU3enRgS5nekuoAOzewbrU7P+9QDRDT01urROA==", + "dev": true, + "requires": { + "async-hook-domain": "^1.1.3", + "bind-obj-methods": "^2.0.0", + "browser-process-hrtime": "^1.0.0", + "chokidar": "^3.3.0", + "color-support": "^1.1.0", + "coveralls": "^3.0.8", + "diff": "^4.0.1", + "esm": "^3.2.25", + "findit": "^2.0.0", + "flow-remove-types": "^2.112.0", + "foreground-child": "^1.3.3", + "fs-exists-cached": "^1.0.0", + "function-loop": "^1.0.2", + "glob": "^7.1.6", + "import-jsx": "^3.0.0", + "ink": "^2.5.0", + "isexe": "^2.0.0", + "istanbul-lib-processinfo": "^1.0.0", + "jackspeak": "^1.4.0", + "minipass": "^3.1.1", + "mkdirp": "^0.5.1", + "nyc": "^14.1.1", + "opener": "^1.5.1", + "own-or": "^1.0.0", + "own-or-env": "^1.0.1", + "react": "^16.12.0", + "rimraf": "^2.7.1", + "signal-exit": "^3.0.0", + "source-map-support": "^0.5.16", + "stack-utils": "^1.0.2", + "tap-mocha-reporter": "^5.0.0", + "tap-parser": "^10.0.1", + "tap-yaml": "^1.0.0", + "tcompare": "^3.0.0", + "treport": "^1.0.0", + "trivial-deferred": "^1.0.1", + "ts-node": "^8.5.2", + "typescript": "^3.7.2", + "which": "^2.0.2", + "write-file-atomic": "^3.0.1", + "yaml": "^1.7.2", + "yapool": "^1.0.0" + }, + "dependencies": { + "@babel/code-frame": { + "version": "7.5.5", + "bundled": true, + "requires": { + "@babel/highlight": "^7.0.0" + } + }, + "@babel/core": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/code-frame": "^7.5.5", + "@babel/generator": "^7.7.4", + "@babel/helpers": "^7.7.4", + "@babel/parser": "^7.7.4", + "@babel/template": "^7.7.4", + "@babel/traverse": "^7.7.4", + "@babel/types": "^7.7.4", + "convert-source-map": "^1.7.0", + "debug": "^4.1.0", + "json5": "^2.1.0", + "lodash": "^4.17.13", + "resolve": "^1.3.2", + "semver": "^5.4.1", + "source-map": "^0.5.0" + }, + "dependencies": { + "@babel/generator": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/types": "^7.7.4", + "jsesc": "^2.5.1", + "lodash": "^4.17.13", + "source-map": "^0.5.0" + } + }, + "@babel/helper-function-name": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/helper-get-function-arity": "^7.7.4", + "@babel/template": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-get-function-arity": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-split-export-declaration": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/types": "^7.7.4" + } + }, + "@babel/parser": { + "version": "7.7.4", + "bundled": true, + "dev": true + }, + "@babel/template": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "@babel/parser": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/traverse": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/code-frame": "^7.5.5", + "@babel/generator": "^7.7.4", + "@babel/helper-function-name": "^7.7.4", + "@babel/helper-split-export-declaration": "^7.7.4", + "@babel/parser": "^7.7.4", + "@babel/types": "^7.7.4", + "debug": "^4.1.0", + "globals": "^11.1.0", + "lodash": "^4.17.13" + } + }, + "@babel/types": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "esutils": "^2.0.2", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + }, + "convert-source-map": { + "version": "1.7.0", + "bundled": true, + "dev": true, + "requires": { + "safe-buffer": "~5.1.1" + } + }, + "debug": { + "version": "4.1.1", + "bundled": true, + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "bundled": true, + "dev": true + }, + "safe-buffer": { + "version": "5.1.2", + "bundled": true, + "dev": true + }, + "source-map": { + "version": "0.5.7", + "bundled": true, + "dev": true + } + } + }, + "@babel/generator": { + "version": "7.7.2", + "bundled": true, + "requires": { + "@babel/types": "^7.7.2", + "jsesc": "^2.5.1", + "lodash": "^4.17.13", + "source-map": "^0.5.0" + }, + "dependencies": { + "source-map": { + "version": "0.5.7", + "bundled": true + } + } + }, + "@babel/helper-builder-react-jsx": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/types": "^7.7.4", + "esutils": "^2.0.0" + }, + "dependencies": { + "@babel/types": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "esutils": "^2.0.2", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } + } + }, + "@babel/helper-plugin-utils": { + "version": "7.0.0", + "bundled": true, + "dev": true + }, + "@babel/helpers": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/template": "^7.7.4", + "@babel/traverse": "^7.7.4", + "@babel/types": "^7.7.4" + }, + "dependencies": { + "@babel/generator": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/types": "^7.7.4", + "jsesc": "^2.5.1", + "lodash": "^4.17.13", + "source-map": "^0.5.0" + } + }, + "@babel/helper-function-name": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/helper-get-function-arity": "^7.7.4", + "@babel/template": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-get-function-arity": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/types": "^7.7.4" + } + }, + "@babel/helper-split-export-declaration": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/types": "^7.7.4" + } + }, + "@babel/parser": { + "version": "7.7.4", + "bundled": true, + "dev": true + }, + "@babel/template": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "@babel/parser": "^7.7.4", + "@babel/types": "^7.7.4" + } + }, + "@babel/traverse": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/code-frame": "^7.5.5", + "@babel/generator": "^7.7.4", + "@babel/helper-function-name": "^7.7.4", + "@babel/helper-split-export-declaration": "^7.7.4", + "@babel/parser": "^7.7.4", + "@babel/types": "^7.7.4", + "debug": "^4.1.0", + "globals": "^11.1.0", + "lodash": "^4.17.13" + } + }, + "@babel/types": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "esutils": "^2.0.2", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + }, + "debug": { + "version": "4.1.1", + "bundled": true, + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "bundled": true, + "dev": true + }, + "source-map": { + "version": "0.5.7", + "bundled": true, + "dev": true + } + } + }, + "@babel/highlight": { + "version": "7.5.0", + "bundled": true, + "requires": { + "chalk": "^2.0.0", + "esutils": "^2.0.2", + "js-tokens": "^4.0.0" + }, + "dependencies": { + "js-tokens": { + "version": "4.0.0", + "bundled": true + } + } + }, + "@babel/parser": { + "version": "7.7.3", + "bundled": true + }, + "@babel/plugin-proposal-object-rest-spread": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/plugin-syntax-object-rest-spread": "^7.7.4" + } + }, + "@babel/plugin-syntax-jsx": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-syntax-object-rest-spread": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-destructuring": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.0.0" + } + }, + "@babel/plugin-transform-react-jsx": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "@babel/helper-builder-react-jsx": "^7.7.4", + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/plugin-syntax-jsx": "^7.7.4" + } + }, + "@babel/runtime": { + "version": "7.7.4", + "bundled": true, + "dev": true, + "requires": { + "regenerator-runtime": "^0.13.2" + } + }, + "@babel/template": { + "version": "7.7.0", + "bundled": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "@babel/parser": "^7.7.0", + "@babel/types": "^7.7.0" + } + }, + "@babel/types": { + "version": "7.7.2", + "bundled": true, + "requires": { + "esutils": "^2.0.2", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + }, + "@types/color-name": { + "version": "1.1.1", + "bundled": true, + "dev": true + }, + "@types/prop-types": { + "version": "15.7.3", + "bundled": true, + "dev": true + }, + "@types/react": { + "version": "16.9.13", + "bundled": true, + "dev": true, + "requires": { + "@types/prop-types": "*", + "csstype": "^2.2.0" + } + }, + "ansi-escapes": { + "version": "4.3.0", + "bundled": true, + "dev": true, + "requires": { + "type-fest": "^0.8.1" + } + }, + "ansi-regex": { + "version": "2.1.1", + "bundled": true + }, + "ansi-styles": { + "version": "3.2.1", + "bundled": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "ansicolors": { + "version": "0.3.2", + "bundled": true, + "dev": true + }, + "arrify": { + "version": "1.0.1", + "bundled": true, + "dev": true + }, + "astral-regex": { + "version": "1.0.0", + "bundled": true, + "dev": true + }, + "auto-bind": { + "version": "2.1.1", + "bundled": true, + "dev": true, + "requires": { + "@types/react": "^16.8.12" + } + }, + "caller-callsite": { + "version": "2.0.0", + "bundled": true, + "dev": true, + "requires": { + "callsites": "^2.0.0" + } + }, + "caller-path": { + "version": "2.0.0", + "bundled": true, + "dev": true, + "requires": { + "caller-callsite": "^2.0.0" + } + }, + "callsites": { + "version": "2.0.0", + "bundled": true, + "dev": true + }, + "cardinal": { + "version": "2.1.1", + "bundled": true, + "dev": true, + "requires": { + "ansicolors": "~0.3.2", + "redeyed": "~2.1.0" + } + }, + "chalk": { + "version": "2.4.2", + "bundled": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "ci-info": { + "version": "2.0.0", + "bundled": true, + "dev": true + }, + "cli-cursor": { + "version": "2.1.0", + "bundled": true, + "dev": true, + "requires": { + "restore-cursor": "^2.0.0" + } + }, + "cli-truncate": { + "version": "1.1.0", + "bundled": true, + "dev": true, + "requires": { + "slice-ansi": "^1.0.0", + "string-width": "^2.0.0" + } + }, + "color-convert": { + "version": "1.9.3", + "bundled": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "bundled": true + }, + "csstype": { + "version": "2.6.7", + "bundled": true, + "dev": true + }, + "debug": { + "version": "2.6.9", + "bundled": true, + "requires": { + "ms": "2.0.0" + } + }, + "emoji-regex": { + "version": "7.0.3", + "bundled": true, + "dev": true + }, + "escape-string-regexp": { + "version": "1.0.5", + "bundled": true + }, + "esprima": { + "version": "4.0.1", + "bundled": true, + "dev": true + }, + "esutils": { + "version": "2.0.3", + "bundled": true + }, + "events-to-array": { + "version": "1.1.2", + "bundled": true, + "dev": true + }, + "globals": { + "version": "11.12.0", + "bundled": true, + "dev": true + }, + "has-flag": { + "version": "3.0.0", + "bundled": true + }, + "import-jsx": { + "version": "3.0.0", + "bundled": true, + "dev": true, + "requires": { + "@babel/core": "^7.5.5", + "@babel/plugin-proposal-object-rest-spread": "^7.5.5", + "@babel/plugin-transform-destructuring": "^7.5.0", + "@babel/plugin-transform-react-jsx": "^7.3.0", + "caller-path": "^2.0.0", + "resolve-from": "^3.0.0" + } + }, + "ink": { + "version": "2.5.0", + "bundled": true, + "dev": true, + "requires": { + "@types/react": "^16.8.6", + "ansi-escapes": "^4.2.1", + "arrify": "^1.0.1", + "auto-bind": "^2.0.0", + "chalk": "^2.4.1", + "cli-cursor": "^2.1.0", + "cli-truncate": "^1.1.0", + "is-ci": "^2.0.0", + "lodash.throttle": "^4.1.1", + "log-update": "^3.0.0", + "prop-types": "^15.6.2", + "react-reconciler": "^0.21.0", + "scheduler": "^0.15.0", + "signal-exit": "^3.0.2", + "slice-ansi": "^1.0.0", + "string-length": "^2.0.0", + "widest-line": "^2.0.0", + "wrap-ansi": "^5.0.0", + "yoga-layout-prebuilt": "^1.9.3" + } + }, + "is-ci": { + "version": "2.0.0", + "bundled": true, + "dev": true, + "requires": { + "ci-info": "^2.0.0" + } + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "bundled": true, + "dev": true + }, + "js-tokens": { + "version": "3.0.2", + "bundled": true, + "dev": true + }, + "jsesc": { + "version": "2.5.2", + "bundled": true + }, + "json5": { + "version": "2.1.1", + "bundled": true, + "dev": true, + "requires": { + "minimist": "^1.2.0" + } + }, + "lodash": { + "version": "4.17.15", + "bundled": true + }, + "lodash.throttle": { + "version": "4.1.1", + "bundled": true, + "dev": true + }, + "log-update": { + "version": "3.3.0", + "bundled": true, + "dev": true, + "requires": { + "ansi-escapes": "^3.2.0", + "cli-cursor": "^2.1.0", + "wrap-ansi": "^5.0.0" + }, + "dependencies": { + "ansi-escapes": { + "version": "3.2.0", + "bundled": true, + "dev": true + } + } + }, + "loose-envify": { + "version": "1.4.0", + "bundled": true, + "dev": true, + "requires": { + "js-tokens": "^3.0.0 || ^4.0.0" + } + }, + "mimic-fn": { + "version": "1.2.0", + "bundled": true, + "dev": true + }, + "minimist": { + "version": "1.2.0", + "bundled": true, + "dev": true + }, + "minipass": { + "version": "3.1.1", + "bundled": true, + "dev": true, + "requires": { + "yallist": "^4.0.0" + }, + "dependencies": { + "yallist": { + "version": "4.0.0", + "bundled": true, + "dev": true + } + } + }, + "ms": { + "version": "2.0.0", + "bundled": true + }, + "object-assign": { + "version": "4.1.1", + "bundled": true, + "dev": true + }, + "onetime": { + "version": "2.0.1", + "bundled": true, + "dev": true, + "requires": { + "mimic-fn": "^1.0.0" + } + }, + "path-parse": { + "version": "1.0.6", + "bundled": true, + "dev": true + }, + "prop-types": { + "version": "15.7.2", + "bundled": true, + "dev": true, + "requires": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.8.1" + } + }, + "punycode": { + "version": "2.1.1", + "bundled": true, + "dev": true + }, + "react-is": { + "version": "16.10.2", + "bundled": true, + "dev": true + }, + "react-reconciler": { + "version": "0.21.0", + "bundled": true, + "dev": true, + "requires": { + "loose-envify": "^1.1.0", + "object-assign": "^4.1.1", + "prop-types": "^15.6.2", + "scheduler": "^0.15.0" + } + }, + "redeyed": { + "version": "2.1.1", + "bundled": true, + "dev": true, + "requires": { + "esprima": "~4.0.0" + } + }, + "regenerator-runtime": { + "version": "0.13.3", + "bundled": true, + "dev": true + }, + "resolve": { + "version": "1.12.0", + "bundled": true, + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } + }, + "resolve-from": { + "version": "3.0.0", + "bundled": true, + "dev": true + }, + "restore-cursor": { + "version": "2.0.0", + "bundled": true, + "dev": true, + "requires": { + "onetime": "^2.0.0", + "signal-exit": "^3.0.2" + } + }, + "scheduler": { + "version": "0.15.0", + "bundled": true, + "dev": true, + "requires": { + "loose-envify": "^1.1.0", + "object-assign": "^4.1.1" + } + }, + "semver": { + "version": "5.7.1", + "bundled": true, + "dev": true + }, + "signal-exit": { + "version": "3.0.2", + "bundled": true, + "dev": true + }, + "slice-ansi": { + "version": "1.0.0", + "bundled": true, + "dev": true, + "requires": { + "is-fullwidth-code-point": "^2.0.0" + } + }, + "source-map": { + "version": "0.6.1", + "bundled": true + }, + "string-length": { + "version": "2.0.0", + "bundled": true, + "dev": true, + "requires": { + "astral-regex": "^1.0.0", + "strip-ansi": "^4.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "bundled": true, + "dev": true + }, + "strip-ansi": { + "version": "4.0.0", + "bundled": true, + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } + } + } + }, + "string-width": { + "version": "2.1.1", + "bundled": true, + "dev": true, + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "bundled": true, + "dev": true + }, + "strip-ansi": { + "version": "4.0.0", + "bundled": true, + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } + } + } + }, + "strip-ansi": { + "version": "3.0.1", + "bundled": true, + "requires": { + "ansi-regex": "^2.0.0" + } + }, + "supports-color": { + "version": "5.5.0", + "bundled": true, + "requires": { + "has-flag": "^3.0.0" + } + }, + "tap-parser": { + "version": "10.0.1", + "bundled": true, + "dev": true, + "requires": { + "events-to-array": "^1.0.1", + "minipass": "^3.0.0", + "tap-yaml": "^1.0.0" + } + }, + "tap-yaml": { + "version": "1.0.0", + "bundled": true, + "dev": true, + "requires": { + "yaml": "^1.5.0" + } + }, + "to-fast-properties": { + "version": "2.0.0", + "bundled": true + }, + "treport": { + "version": "1.0.0", + "bundled": true, + "dev": true, + "requires": { + "cardinal": "^2.1.1", + "chalk": "^3.0.0", + "import-jsx": "^3.0.0", + "ink": "^2.5.0", + "ms": "^2.1.2", + "string-length": "^3.1.0", + "tap-parser": "^10.0.1", + "unicode-length": "^2.0.2" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "bundled": true, + "dev": true + }, + "ansi-styles": { + "version": "4.2.0", + "bundled": true, + "dev": true, + "requires": { + "@types/color-name": "^1.1.1", + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "3.0.0", + "bundled": true, + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "bundled": true, + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "bundled": true, + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "bundled": true, + "dev": true + }, + "ms": { + "version": "2.1.2", + "bundled": true, + "dev": true + }, + "string-length": { + "version": "3.1.0", + "bundled": true, + "dev": true, + "requires": { + "astral-regex": "^1.0.0", + "strip-ansi": "^5.2.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "bundled": true, + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + }, + "supports-color": { + "version": "7.1.0", + "bundled": true, + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + }, + "unicode-length": { + "version": "2.0.2", + "bundled": true, + "dev": true, + "requires": { + "punycode": "^2.0.0", + "strip-ansi": "^3.0.1" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "bundled": true, + "dev": true + }, + "strip-ansi": { + "version": "3.0.1", + "bundled": true, + "dev": true, + "requires": { + "ansi-regex": "^2.0.0" + } + } + } + } + } + }, + "type-fest": { + "version": "0.8.1", + "bundled": true, + "dev": true + }, + "widest-line": { + "version": "2.0.1", + "bundled": true, + "dev": true, + "requires": { + "string-width": "^2.1.1" + } + }, + "wrap-ansi": { + "version": "5.1.0", + "bundled": true, + "dev": true, + "requires": { + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "bundled": true, + "dev": true + }, + "string-width": { + "version": "3.1.0", + "bundled": true, + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "bundled": true, + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "yaml": { + "version": "1.7.2", + "bundled": true, + "dev": true, + "requires": { + "@babel/runtime": "^7.6.3" + } + }, + "yoga-layout-prebuilt": { + "version": "1.9.3", + "bundled": true, + "dev": true + } + } + }, + "tap-mocha-reporter": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/tap-mocha-reporter/-/tap-mocha-reporter-5.0.0.tgz", + "integrity": "sha512-8HlAtdmYGlDZuW83QbF/dc46L7cN+AGhLZcanX3I9ILvxUAl+G2/mtucNPSXecTlG/4iP1hv6oMo0tMhkn3Tsw==", + "dev": true, + "requires": { + "color-support": "^1.1.0", + "debug": "^2.1.3", + "diff": "^1.3.2", + "escape-string-regexp": "^1.0.3", + "glob": "^7.0.5", + "readable-stream": "^2.1.5", + "tap-parser": "^10.0.0", + "tap-yaml": "^1.0.0", + "unicode-length": "^1.0.0" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "diff": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-1.4.0.tgz", + "integrity": "sha1-fyjS657nsVqX79ic5j3P2qPMur8=", + "dev": true + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + } + } + }, + "tap-parser": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/tap-parser/-/tap-parser-10.0.1.tgz", + "integrity": "sha512-qdT15H0DoJIi7zOqVXDn9X0gSM68JjNy1w3VemwTJlDnETjbi6SutnqmBfjDJAwkFS79NJ97gZKqie00ZCGmzg==", + "dev": true, + "requires": { + "events-to-array": "^1.0.1", + "minipass": "^3.0.0", + "tap-yaml": "^1.0.0" + } + }, + "tap-yaml": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/tap-yaml/-/tap-yaml-1.0.0.tgz", + "integrity": "sha512-Rxbx4EnrWkYk0/ztcm5u3/VznbyFJpyXO12dDBHKWiDVxy7O2Qw6MRrwO5H6Ww0U5YhRY/4C/VzWmFPhBQc4qQ==", + "dev": true, + "requires": { + "yaml": "^1.5.0" + } + }, + "tcompare": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/tcompare/-/tcompare-3.0.4.tgz", + "integrity": "sha512-Q3TitMVK59NyKgQyFh+857wTAUE329IzLDehuPgU4nF5e8g+EUQ+yUbjUy1/6ugiNnXztphT+NnqlCXolv9P3A==", + "dev": true, + "requires": { + "diff-frag": "^1.0.1" + } + }, + "test-exclude": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-5.2.3.tgz", + "integrity": "sha512-M+oxtseCFO3EDtAaGH7iiej3CBkzXqFMbzqYAACdzKui4eZA+pq3tZEwChvOdNfa7xxy8BfbmgJSIr43cC/+2g==", + "dev": true, + "requires": { + "glob": "^7.1.3", + "minimatch": "^3.0.4", + "read-pkg-up": "^4.0.0", + "require-main-filename": "^2.0.0" + } + }, + "to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", + "dev": true + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, + "tough-cookie": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", + "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", + "dev": true, + "requires": { + "psl": "^1.1.24", + "punycode": "^1.4.1" + }, + "dependencies": { + "punycode": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=", + "dev": true + } + } + }, + "trivial-deferred": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/trivial-deferred/-/trivial-deferred-1.0.1.tgz", + "integrity": "sha1-N21NKdlR1jaKb3oK6FwvTV4GWPM=", + "dev": true + }, + "ts-node": { + "version": "8.5.4", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-8.5.4.tgz", + "integrity": "sha512-izbVCRV68EasEPQ8MSIGBNK9dc/4sYJJKYA+IarMQct1RtEot6Xp0bXuClsbUSnKpg50ho+aOAx8en5c+y4OFw==", + "dev": true, + "requires": { + "arg": "^4.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "source-map-support": "^0.5.6", + "yn": "^3.0.0" + } + }, + "tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "dev": true, + "requires": { + "safe-buffer": "^5.0.1" + } + }, + "tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", + "dev": true + }, + "typedarray-to-buffer": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", + "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", + "dev": true, + "requires": { + "is-typedarray": "^1.0.0" + } + }, + "typescript": { + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.3.tgz", + "integrity": "sha512-Mcr/Qk7hXqFBXMN7p7Lusj1ktCBydylfQM/FZCk5glCNQJrCUKPkMHdo9R0MTFWsC/4kPFvDS0fDPvukfCkFsw==", + "dev": true + }, + "uglify-js": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.7.2.tgz", + "integrity": "sha512-uhRwZcANNWVLrxLfNFEdltoPNhECUR3lc+UdJoG9CBpMcSnKyWA94tc3eAujB1GcMY5Uwq8ZMp4qWpxWYDQmaA==", + "dev": true, + "optional": true, + "requires": { + "commander": "~2.20.3", + "source-map": "~0.6.1" + } + }, + "unicode-length": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/unicode-length/-/unicode-length-1.0.3.tgz", + "integrity": "sha1-Wtp6f+1RhBpBijKM8UlHisg1irs=", + "dev": true, + "requires": { + "punycode": "^1.3.2", + "strip-ansi": "^3.0.1" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true + }, + "punycode": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=", + "dev": true + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, + "requires": { + "ansi-regex": "^2.0.0" + } + } + } + }, + "uri-js": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", + "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", + "dev": true, + "requires": { + "punycode": "^2.1.0" + } + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", + "dev": true, + "optional": true + }, + "uuid": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.3.tgz", + "integrity": "sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ==", + "dev": true + }, + "validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "requires": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "dev": true, + "requires": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + } + }, + "vlq": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/vlq/-/vlq-0.2.3.tgz", + "integrity": "sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==", + "dev": true + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "dev": true + }, + "wordwrap": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", + "dev": true + }, + "wrap-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", + "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", + "dev": true, + "requires": { + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "dev": true, + "requires": { + "number-is-nan": "^1.0.0" + } + }, + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "dev": true, + "requires": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, + "requires": { + "ansi-regex": "^2.0.0" + } + } + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "write-file-atomic": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.1.tgz", + "integrity": "sha512-JPStrIyyVJ6oCSz/691fAjFtefZ6q+fP6tm+OS4Qw6o+TGQxNp1ziY2PgS+X/m0V8OWhZiO/m4xSj+Pr4RrZvw==", + "dev": true, + "requires": { + "imurmurhash": "^0.1.4", + "is-typedarray": "^1.0.0", + "signal-exit": "^3.0.2", + "typedarray-to-buffer": "^3.1.5" + } + }, + "y18n": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", + "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", + "dev": true + }, + "yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=", + "dev": true + }, + "yaml": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.7.2.tgz", + "integrity": "sha512-qXROVp90sb83XtAoqE8bP9RwAkTTZbugRUTm5YeFCBfNRPEp2YzTeqWiz7m5OORHzEvrA/qcGS8hp/E+MMROYw==", + "dev": true, + "requires": { + "@babel/runtime": "^7.6.3" + } + }, + "yapool": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/yapool/-/yapool-1.0.0.tgz", + "integrity": "sha1-9pPymjFbUNmp2iZGp6ZkXJaYW2o=", + "dev": true + }, + "yargs": { + "version": "13.3.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.0.tgz", + "integrity": "sha512-2eehun/8ALW8TLoIl7MVaRUrg+yCnenu8B4kBlRxj3GJGDKU1Og7sMXPNm1BYyM1DOJmTZ4YeN/Nwxv+8XJsUA==", + "dev": true, + "requires": { + "cliui": "^5.0.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^13.1.1" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "cliui": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "dev": true, + "requires": { + "string-width": "^3.1.0", + "strip-ansi": "^5.2.0", + "wrap-ansi": "^5.1.0" + } + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + }, + "wrap-ansi": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" + } + } + } + }, + "yargs-parser": { + "version": "13.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.1.tgz", + "integrity": "sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ==", + "dev": true, + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + }, + "yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true + } + } +} diff --git a/node_modules/npm-normalize-package-bin/package.json b/node_modules/npm-normalize-package-bin/package.json new file mode 100644 index 0000000000000..c0e548cc537bd --- /dev/null +++ b/node_modules/npm-normalize-package-bin/package.json @@ -0,0 +1,57 @@ +{ + "_from": "npm-normalize-package-bin@^1.0.0", + "_id": "npm-normalize-package-bin@1.0.1", + "_inBundle": false, + "_integrity": "sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==", + "_location": "/npm-normalize-package-bin", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "npm-normalize-package-bin@^1.0.0", + "name": "npm-normalize-package-bin", + "escapedName": "npm-normalize-package-bin", + "rawSpec": "^1.0.0", + "saveSpec": null, + "fetchSpec": "^1.0.0" + }, + "_requiredBy": [ + "/bin-links" + ], + "_resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz", + "_shasum": "6e79a41f23fd235c0623218228da7d9c23b8f6e2", + "_spec": "npm-normalize-package-bin@^1.0.0", + "_where": "/Users/isaacs/dev/npm/cli/node_modules/bin-links", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "https://izs.me" + }, + "bugs": { + "url": "https://github.com/npm/npm-normalize-package-bin/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Turn any flavor of allowable package.json bin into a normalized object", + "devDependencies": { + "tap": "^14.10.2" + }, + "homepage": "https://github.com/npm/npm-normalize-package-bin#readme", + "license": "ISC", + "name": "npm-normalize-package-bin", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/npm-normalize-package-bin.git" + }, + "scripts": { + "postpublish": "git push origin --follow-tags", + "postversion": "npm publish", + "preversion": "npm test", + "snap": "tap", + "test": "tap" + }, + "tap": { + "check-coverage": true + }, + "version": "1.0.1" +} diff --git a/node_modules/npm-normalize-package-bin/test/array.js b/node_modules/npm-normalize-package-bin/test/array.js new file mode 100644 index 0000000000000..63dafa8913741 --- /dev/null +++ b/node_modules/npm-normalize-package-bin/test/array.js @@ -0,0 +1,37 @@ +const normalize = require('../') +const t = require('tap') + +t.test('benign array', async t => { + const pkg = { name: 'hello', version: 'world', bin: ['./x/y', 'y/z', './a'] } + const expect = { name: 'hello', version: 'world', bin: { + y: 'x/y', + z: 'y/z', + a: 'a', + } } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) + +t.test('conflicting array', async t => { + const pkg = { name: 'hello', version: 'world', bin: ['./x/y', 'z/y', './a'] } + const expect = { name: 'hello', version: 'world', bin: { + y: 'z/y', + a: 'a', + } } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) + +t.test('slashy array', async t => { + const pkg = { name: 'hello', version: 'world', bin: [ '/etc/passwd' ] } + const expect = { name: 'hello', version: 'world', bin: { passwd: 'etc/passwd' } } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) + +t.test('dotty array', async t => { + const pkg = { name: 'hello', version: 'world', bin: ['../../../../etc/passwd'] } + const expect = { name: 'hello', version: 'world', bin: { passwd: 'etc/passwd' } } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) diff --git a/node_modules/npm-normalize-package-bin/test/nobin.js b/node_modules/npm-normalize-package-bin/test/nobin.js new file mode 100644 index 0000000000000..536d7eb22f68a --- /dev/null +++ b/node_modules/npm-normalize-package-bin/test/nobin.js @@ -0,0 +1,35 @@ +const normalize = require('../') +const t = require('tap') + +// all of these just delete the bins, so expect the same value +const expect = { name: 'hello', version: 'world' } + +t.test('no bin in object', async t => { + const pkg = { name: 'hello', version: 'world' } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) + +t.test('empty string bin in object', async t => { + const pkg = { name: 'hello', version: 'world', bin: '' } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) + +t.test('false bin in object', async t => { + const pkg = { name: 'hello', version: 'world', bin: false } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) + +t.test('null bin in object', async t => { + const pkg = { name: 'hello', version: 'world', bin: null } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) + +t.test('number bin', async t => { + const pkg = { name: 'hello', version: 'world', bin: 42069 } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) diff --git a/node_modules/npm-normalize-package-bin/test/object.js b/node_modules/npm-normalize-package-bin/test/object.js new file mode 100644 index 0000000000000..00d23684fb75b --- /dev/null +++ b/node_modules/npm-normalize-package-bin/test/object.js @@ -0,0 +1,141 @@ +const normalize = require('../') +const t = require('tap') + +t.test('benign object', async t => { + // just clean up the ./ in the targets and remove anything weird + const pkg = { name: 'hello', version: 'world', bin: { + y: './x/y', + z: './y/z', + a: './a', + } } + const expect = { name: 'hello', version: 'world', bin: { + y: 'x/y', + z: 'y/z', + a: 'a', + } } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) + +t.test('empty and non-string targets', async t => { + // just clean up the ./ in the targets and remove anything weird + const pkg = { name: 'hello', version: 'world', bin: { + z: './././', + y: '', + './x': 'x.js', + re: /asdf/, + foo: { bar: 'baz' }, + false: false, + null: null, + array: [1,2,3], + func: function () {}, + } } + const expect = { name: 'hello', version: 'world', bin: { + x: 'x.js', + } } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) + +t.test('slashy object', async t => { + const pkg = { name: 'hello', version: 'world', bin: { + '/path/foo': '/etc/passwd', + 'bar': '/etc/passwd', + '/etc/glorb/baz': '/etc/passwd', + '/etc/passwd:/bin/usr/exec': '/etc/passwd', + } } + const expect = { + name: 'hello', + version: 'world', + bin: { + foo: 'etc/passwd', + bar: 'etc/passwd', + baz: 'etc/passwd', + exec: 'etc/passwd', + } + } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) + +t.test('dotty object', async t => { + const pkg = { + name: 'hello', + version: 'world', + bin: { + 'nodots': '../../../../etc/passwd', + '../../../../../../dots': '../../../../etc/passwd', + '.././../\\./..//C:\\./': 'this is removed', + '.././../\\./..//C:\\/': 'super safe programming language', + '.././../\\./..//C:\\x\\y\\z/': 'xyz', + } } + const expect = { name: 'hello', version: 'world', bin: { + nodots: 'etc/passwd', + dots: 'etc/passwd', + C: 'super safe programming language', + z: 'xyz', + } } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) + +t.test('weird object', async t => { + const pkg = { name: 'hello', version: 'world', bin: /asdf/ } + const expect = { name: 'hello', version: 'world' } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) + +t.test('oddball keys', async t => { + const pkg = { + bin: { + '~': 'target', + '£': 'target', + 'ζ': 'target', + 'ぎ': 'target', + '操': 'target', + '🎱': 'target', + '💎': 'target', + '💸': 'target', + '🦉': 'target', + 'сheck-dom': 'target', + 'Ωpm': 'target', + 'ζλ': 'target', + 'мга': 'target', + 'пше': 'target', + 'тзч': 'target', + 'тзь': 'target', + 'нфкт': 'target', + 'ссср': 'target', + '君の名は': 'target', + '君の名は': 'target', + } + } + + const expect = { + bin: { + '~': 'target', + '£': 'target', + 'ζ': 'target', + 'ぎ': 'target', + '操': 'target', + '🎱': 'target', + '💎': 'target', + '💸': 'target', + '🦉': 'target', + 'сheck-dom': 'target', + 'Ωpm': 'target', + 'ζλ': 'target', + 'мга': 'target', + 'пше': 'target', + 'тзч': 'target', + 'тзь': 'target', + 'нфкт': 'target', + 'ссср': 'target', + '君の名は': 'target', + }, + } + + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) diff --git a/node_modules/npm-normalize-package-bin/test/string.js b/node_modules/npm-normalize-package-bin/test/string.js new file mode 100644 index 0000000000000..b6de8f8f589b5 --- /dev/null +++ b/node_modules/npm-normalize-package-bin/test/string.js @@ -0,0 +1,37 @@ +const normalize = require('../') +const t = require('tap') + +t.test('benign string', async t => { + const pkg = { name: 'hello', version: 'world', bin: 'hello.js' } + const expect = { name: 'hello', version: 'world', bin: { hello: 'hello.js' } } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) + +t.test('slashy string', async t => { + const pkg = { name: 'hello', version: 'world', bin: '/etc/passwd' } + const expect = { name: 'hello', version: 'world', bin: { hello: 'etc/passwd' } } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) + +t.test('dotty string', async t => { + const pkg = { name: 'hello', version: 'world', bin: '../../../../etc/passwd' } + const expect = { name: 'hello', version: 'world', bin: { hello: 'etc/passwd' } } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) + +t.test('double path', async t => { + const pkg = { name: 'hello', version: 'world', bin: '/etc/passwd:/bin/usr/exec' } + const expect = { name: 'hello', version: 'world', bin: { hello: 'etc/passwd:/bin/usr/exec' } } + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) + +t.test('string with no name', async t => { + const pkg = { bin: 'foobar.js' } + const expect = {} + t.strictSame(normalize(pkg), expect) + t.strictSame(normalize(normalize(pkg)), expect, 'double sanitize ok') +}) diff --git a/node_modules/npm-package-arg/CHANGELOG.md b/node_modules/npm-package-arg/CHANGELOG.md index 83e5763f4ffec..1b3431acced77 100644 --- a/node_modules/npm-package-arg/CHANGELOG.md +++ b/node_modules/npm-package-arg/CHANGELOG.md @@ -2,6 +2,16 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [6.1.1](https://github.com/npm/npm-package-arg/compare/v6.1.0...v6.1.1) (2019-08-21) + + +### Bug Fixes + +* preserve drive letter on windows git file:// urls ([3909203](https://github.com/npm/npm-package-arg/commit/3909203)) + + + # [6.1.0](https://github.com/npm/npm-package-arg/compare/v6.0.0...v6.1.0) (2018-04-10) diff --git a/node_modules/npm-package-arg/npa.js b/node_modules/npm-package-arg/npa.js index 4d56237a570b9..bf2c17cfd513f 100644 --- a/node_modules/npm-package-arg/npa.js +++ b/node_modules/npm-package-arg/npa.js @@ -6,7 +6,11 @@ module.exports.Result = Result let url let HostedGit let semver -let path +let path_ +function path () { + if (!path_) path_ = require('path') + return path_ +} let validatePackageName let osenv @@ -109,7 +113,6 @@ function Result (opts) { this.gitCommittish = opts.gitCommittish this.hosted = opts.hosted } -Result.prototype = {} Result.prototype.setName = function (name) { if (!validatePackageName) validatePackageName = require('validate-npm-package-name') @@ -152,8 +155,7 @@ const isAbsolutePath = /^[/]|^[A-Za-z]:/ function resolvePath (where, spec) { if (isAbsolutePath.test(spec)) return spec - if (!path) path = require('path') - return path.resolve(where, spec) + return path().resolve(where, spec) } function isAbsolute (dir) { @@ -180,8 +182,7 @@ function fromFile (res, where) { if (isAbsolute(spec)) { res.saveSpec = 'file:' + spec } else { - if (!path) path = require('path') - res.saveSpec = 'file:' + path.relative(where, res.fetchSpec) + res.saveSpec = 'file:' + path().relative(where, res.fetchSpec) } } return res @@ -238,6 +239,11 @@ function fromURL (res) { } else { setGitCommittish(res, urlparse.hash != null ? urlparse.hash.slice(1) : '') urlparse.protocol = urlparse.protocol.replace(/^git[+]/, '') + if (urlparse.protocol === 'file:' && /^git\+file:\/\/[a-z]:/i.test(res.rawSpec)) { + // keep the drive letter : on windows file paths + urlparse.host += ':' + urlparse.hostname += ':' + } delete urlparse.hash res.fetchSpec = url.format(urlparse) } diff --git a/node_modules/npm-package-arg/package.json b/node_modules/npm-package-arg/package.json index 7bd5efeb43f4b..7d978a4d48a5c 100644 --- a/node_modules/npm-package-arg/package.json +++ b/node_modules/npm-package-arg/package.json @@ -1,41 +1,38 @@ { - "_args": [ - [ - "npm-package-arg@6.1.0", - "/Users/rebecca/code/npm" - ] - ], - "_from": "npm-package-arg@6.1.0", - "_id": "npm-package-arg@6.1.0", + "_from": "npm-package-arg@6.1.1", + "_id": "npm-package-arg@6.1.1", "_inBundle": false, - "_integrity": "sha512-zYbhP2k9DbJhA0Z3HKUePUgdB1x7MfIfKssC+WLPFMKTBZKpZh5m13PgexJjCq6KW7j17r0jHWcCpxEqnnncSA==", + "_integrity": "sha512-qBpssaL3IOZWi5vEKUKW0cO7kzLeT+EQO9W8RsLOZf76KF9E/K9+wH0C7t06HXPpaH8WH5xF1MExLuCwbTqRUg==", "_location": "/npm-package-arg", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "npm-package-arg@6.1.0", + "raw": "npm-package-arg@6.1.1", "name": "npm-package-arg", "escapedName": "npm-package-arg", - "rawSpec": "6.1.0", + "rawSpec": "6.1.1", "saveSpec": null, - "fetchSpec": "6.1.0" + "fetchSpec": "6.1.1" }, "_requiredBy": [ + "#USER", "/", "/init-package-json", "/libcipm", - "/libnpmhook/npm-registry-fetch", + "/libnpm", + "/libnpmaccess", + "/libnpmpublish", "/libnpx", "/lock-verify", "/npm-pick-manifest", - "/npm-registry-client", "/npm-registry-fetch", "/pacote" ], - "_resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-6.1.0.tgz", - "_spec": "6.1.0", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-6.1.1.tgz", + "_shasum": "02168cb0a49a2b75bf988a28698de7b529df5cb7", + "_spec": "npm-package-arg@6.1.1", + "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Isaac Z. Schlueter", "email": "i@izs.me", @@ -44,17 +41,19 @@ "bugs": { "url": "https://github.com/npm/npm-package-arg/issues" }, + "bundleDependencies": false, "dependencies": { - "hosted-git-info": "^2.6.0", + "hosted-git-info": "^2.7.1", "osenv": "^0.1.5", - "semver": "^5.5.0", + "semver": "^5.6.0", "validate-npm-package-name": "^3.0.0" }, + "deprecated": false, "description": "Parse the things that can be arguments to `npm install`", "devDependencies": { "standard": "^11.0.1", - "standard-version": "^4.3.0", - "tap": "^11.1.3", + "standard-version": "^4.4.0", + "tap": "^12.5.0", "weallbehave": "^1.2.0", "weallcontribute": "^1.0.8" }, @@ -77,9 +76,9 @@ "prerelease": "npm t", "pretest": "standard", "release": "standard-version -s", - "test": "tap -J --coverage test/*.js", + "test": "tap --100 -J --coverage test/*.js", "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "6.1.0" + "version": "6.1.1" } diff --git a/node_modules/npm-packlist/index.js b/node_modules/npm-packlist/index.js index 2cdd37ec3df06..eaf14b8661751 100644 --- a/node_modules/npm-packlist/index.js +++ b/node_modules/npm-packlist/index.js @@ -17,6 +17,8 @@ const rootBuiltinRules = Symbol('root-builtin-rules') const packageNecessaryRules = Symbol('package-necessary-rules') const path = require('path') +const normalizePackageBin = require('npm-normalize-package-bin') + const defaultRules = [ '.npmignore', '.gitignore', @@ -35,12 +37,25 @@ const defaultRules = [ '**/.npmrc', '.*.swp', '.DS_Store', + '**/.DS_Store/**', '._*', + '**/._*/**', '*.orig', - 'package-lock.json', + '/package-lock.json', + '/yarn.lock', 'archived-packages/**', + 'core', + '!core/', + '!**/core/', + '*.core', + '*.vgcore', + 'vgcore.*', + 'core.+([0-9])', ] +// There may be others, but :?|<> are handled by node-tar +const nameIsBadForWindows = file => /\*/.test(file) + // a decorator that applies our custom rules to an ignore walker const npmWalker = Class => class Walker extends Class { constructor (opt) { @@ -57,7 +72,11 @@ const npmWalker = Class => class Walker extends Class { opt.includeEmpty = false opt.path = opt.path || process.cwd() - opt.follow = path.basename(opt.path) === 'node_modules' + const dirName = path.basename(opt.path) + const parentName = path.basename(path.dirname(opt.path)) + opt.follow = + dirName === 'node_modules' || + (parentName === 'node_modules' && /^@/.test(dirName)) super(opt) // ignore a bunch of things by default at the root level. @@ -77,6 +96,16 @@ const npmWalker = Class => class Walker extends Class { } } + onReaddir (entries) { + if (!this.parent) { + entries = entries.filter(e => + e !== '.git' && + !(e === 'node_modules' && this.bundled.length === 0) + ) + } + return super.onReaddir(entries) + } + filterEntry (entry, partial) { // get the partial path from the root of the walk const p = this.path.substr(this.root.length + 1) @@ -132,13 +161,31 @@ const npmWalker = Class => class Walker extends Class { onPackageJson (ig, pkg, then) { this.packageJsonCache.set(ig, pkg) - // if there's a browser or main, make sure we don't ignore it + // if there's a bin, browser or main, make sure we don't ignore it + // also, don't ignore the package.json itself! + // + // Weird side-effect of this: a readme (etc) file will be included + // if it exists anywhere within a folder with a package.json file. + // The original intent was only to include these files in the root, + // but now users in the wild are dependent on that behavior for + // localized documentation and other use cases. Adding a `/` to + // these rules, while tempting and arguably more "correct", is a + // breaking change. const rules = [ pkg.browser ? '!' + pkg.browser : '', pkg.main ? '!' + pkg.main : '', - '!@(readme|copying|license|licence|notice|changes|changelog|history){,.*}' - ].filter(f => f).join('\n') + '\n' - super.onReadIgnoreFile(packageNecessaryRules, rules, _=>_) + '!package.json', + '!npm-shrinkwrap.json', + '!@(readme|copying|license|licence|notice|changes|changelog|history){,.*[^~$]}' + ] + if (pkg.bin) { + // always an object, because normalized already + for (const key in pkg.bin) + rules.push('!' + pkg.bin[key]) + } + + const data = rules.filter(f => f).join('\n') + '\n' + super.onReadIgnoreFile(packageNecessaryRules, data, _=>_) if (Array.isArray(pkg.files)) super.onReadIgnoreFile('package.json', '*\n' + pkg.files.map( @@ -148,6 +195,16 @@ const npmWalker = Class => class Walker extends Class { then() } + // override parent stat function to completely skip any filenames + // that will break windows entirely. + // XXX(isaacs) Next major version should make this an error instead. + stat (entry, file, dir, then) { + if (nameIsBadForWindows(entry)) + then() + else + super.stat(entry, file, dir, then) + } + // override parent onstat function to nix all symlinks onstat (st, entry, file, dir, then) { if (st.isSymbolicLink()) @@ -159,7 +216,8 @@ const npmWalker = Class => class Walker extends Class { onReadIgnoreFile (file, data, then) { if (file === 'package.json') try { - this.onPackageJson(file, JSON.parse(data), then) + const ig = path.resolve(this.path, file) + this.onPackageJson(ig, normalizePackageBin(JSON.parse(data)), then) } catch (er) { // ignore package.json files that are not json then() @@ -189,12 +247,13 @@ class WalkerSync extends npmWalker(IgnoreWalkerSync) { const walk = (options, callback) => { options = options || {} const p = new Promise((resolve, reject) => { - const bw = new BundleWalker(options).start() + const bw = new BundleWalker(options) bw.on('done', bundled => { options.bundled = bundled options.packageJsonCache = bw.packageJsonCache new Walker(options).on('done', resolve).on('error', reject).start() }) + bw.start() }) return callback ? p.then(res => callback(null, res), callback) : p } @@ -209,15 +268,20 @@ const walkSync = options => { return walker.result } -// package.json first, node_modules last, files before folders, alphasort -const sort = (a, b) => - a === 'package.json' ? -1 - : b === 'package.json' ? 1 - : /^node_modules/.test(a) && !/^node_modules/.test(b) ? 1 - : /^node_modules/.test(b) && !/^node_modules/.test(a) ? -1 - : path.dirname(a) === '.' && path.dirname(b) !== '.' ? -1 - : path.dirname(b) === '.' && path.dirname(a) !== '.' ? 1 - : a.localeCompare(b) +// optimize for compressibility +// extname, then basename, then locale alphabetically +// https://twitter.com/isntitvacant/status/1131094910923231232 +const sort = (a, b) => { + const exta = path.extname(a).toLowerCase() + const extb = path.extname(b).toLowerCase() + const basea = path.basename(a).toLowerCase() + const baseb = path.basename(b).toLowerCase() + + return exta.localeCompare(extb) || + basea.localeCompare(baseb) || + a.localeCompare(b) +} + module.exports = walk walk.sync = walkSync diff --git a/node_modules/npm-packlist/package.json b/node_modules/npm-packlist/package.json index f1188c393f2c9..693c51b96140d 100644 --- a/node_modules/npm-packlist/package.json +++ b/node_modules/npm-packlist/package.json @@ -1,29 +1,29 @@ { - "_from": "npm-packlist@1.1.12", - "_id": "npm-packlist@1.1.12", + "_from": "npm-packlist@1.4.8", + "_id": "npm-packlist@1.4.8", "_inBundle": false, - "_integrity": "sha512-WJKFOVMeAlsU/pjXuqVdzU0WfgtIBCupkEVwn+1Y0ERAbUfWw8R4GjgVbaKnUjRoD2FoQbHOCbOyT5Mbs9Lw4g==", + "_integrity": "sha512-5+AZgwru5IevF5ZdnFglB5wNlHG1AOOuw28WhUq8/8emhBmLv6jX5by4WJCh7lW0uSYZYS6DXqIsyZVIXRZU9A==", "_location": "/npm-packlist", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "npm-packlist@1.1.12", + "raw": "npm-packlist@1.4.8", "name": "npm-packlist", "escapedName": "npm-packlist", - "rawSpec": "1.1.12", + "rawSpec": "1.4.8", "saveSpec": null, - "fetchSpec": "1.1.12" + "fetchSpec": "1.4.8" }, "_requiredBy": [ "#USER", "/", "/pacote" ], - "_resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.1.12.tgz", - "_shasum": "22bde2ebc12e72ca482abd67afc51eb49377243a", - "_spec": "npm-packlist@1.1.12", - "_where": "/Users/zkat/Documents/code/work/npm", + "_resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.4.8.tgz", + "_shasum": "56ee6cc135b9f98ad3d51c1c95da22bbb9b2ef3e", + "_spec": "npm-packlist@1.4.8", + "_where": "/Users/darcyclarke/Documents/Repos/npm/cli", "author": { "name": "Isaac Z. Schlueter", "email": "i@izs.me", @@ -35,14 +35,15 @@ "bundleDependencies": false, "dependencies": { "ignore-walk": "^3.0.1", - "npm-bundled": "^1.0.1" + "npm-bundled": "^1.0.1", + "npm-normalize-package-bin": "^1.0.1" }, "deprecated": false, "description": "Get a list of the files to add from a folder into an npm package", "devDependencies": { "mkdirp": "^0.5.1", "rimraf": "^2.6.1", - "tap": "^12.0.1" + "tap": "^14.6.9" }, "directories": { "test": "test" @@ -54,15 +55,22 @@ "license": "ISC", "main": "index.js", "name": "npm-packlist", + "publishConfig": { + "tag": "legacy-v1" + }, "repository": { "type": "git", "url": "git+https://github.com/npm/npm-packlist.git" }, "scripts": { - "postpublish": "git push origin --all; git push origin --tags", + "postpublish": "git push origin --follow-tags", "postversion": "npm publish", "preversion": "npm test", - "test": "tap test/*.js --100 -J" + "snap": "tap", + "test": "tap" + }, + "tap": { + "jobs": 1 }, - "version": "1.1.12" + "version": "1.4.8" } diff --git a/node_modules/npm-pick-manifest/CHANGELOG.md b/node_modules/npm-pick-manifest/CHANGELOG.md index 5f53e8fce591f..c594ba140f72b 100644 --- a/node_modules/npm-pick-manifest/CHANGELOG.md +++ b/node_modules/npm-pick-manifest/CHANGELOG.md @@ -2,33 +2,107 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [3.0.2](https://github.com/npm/npm-pick-manifest/compare/v3.0.1...v3.0.2) (2019-08-30) + + + + +## [3.0.1](https://github.com/npm/npm-pick-manifest/compare/v3.0.0...v3.0.1) (2019-08-28) + + +### Bug Fixes + +* throw 403 for forbidden major/minor versions ([003286e](https://github.com/npm/npm-pick-manifest/commit/003286e)), closes [#2](https://github.com/npm/npm-pick-manifest/issues/2) + + + + +# [3.0.0](https://github.com/npm/npm-pick-manifest/compare/v2.2.3...v3.0.0) (2019-08-20) + + +### Features + +* throw forbidden error when package is blocked by policy ([ad2a962](https://github.com/npm/npm-pick-manifest/commit/ad2a962)), closes [#1](https://github.com/npm/npm-pick-manifest/issues/1) + + +### BREAKING CHANGES + +* This adds a new error code when package versions are +blocked. + +PR-URL: https://github.com/npm/npm-pick-manifest/pull/1 +Credit: @claudiahdz + + + + +## [2.2.3](https://github.com/npm/npm-pick-manifest/compare/v2.2.2...v2.2.3) (2018-10-31) + + +### Bug Fixes + +* **enjoyBy:** rework semantics for enjoyBy again ([5e89b62](https://github.com/npm/npm-pick-manifest/commit/5e89b62)) + + + + +## [2.2.2](https://github.com/npm/npm-pick-manifest/compare/v2.2.1...v2.2.2) (2018-10-31) + + +### Bug Fixes + +* **enjoyBy:** rework semantics for enjoyBy ([5684f45](https://github.com/npm/npm-pick-manifest/commit/5684f45)) + + + + +## [2.2.1](https://github.com/npm/npm-pick-manifest/compare/v2.2.0...v2.2.1) (2018-10-30) + + + + +# [2.2.0](https://github.com/npm/npm-pick-manifest/compare/v2.1.0...v2.2.0) (2018-10-30) + + +### Bug Fixes + +* **audit:** npm audit fix --force ([d5ae6c4](https://github.com/npm/npm-pick-manifest/commit/d5ae6c4)) + + +### Features + +* **enjoyBy:** add opts.enjoyBy option to filter versions by date ([0b8a790](https://github.com/npm/npm-pick-manifest/commit/0b8a790)) + + + -# [2.1.0](https://github.com/zkat/npm-pick-manifest/compare/v2.0.1...v2.1.0) (2017-10-18) +# [2.1.0](https://github.com/npm/npm-pick-manifest/compare/v2.0.1...v2.1.0) (2017-10-18) ### Features -* **selection:** allow manually disabling deprecation skipping ([0d239d3](https://github.com/zkat/npm-pick-manifest/commit/0d239d3)) +* **selection:** allow manually disabling deprecation skipping ([0d239d3](https://github.com/npm/npm-pick-manifest/commit/0d239d3)) -## [2.0.1](https://github.com/zkat/npm-pick-manifest/compare/v2.0.0...v2.0.1) (2017-10-18) +## [2.0.1](https://github.com/npm/npm-pick-manifest/compare/v2.0.0...v2.0.1) (2017-10-18) -# [2.0.0](https://github.com/zkat/npm-pick-manifest/compare/v1.0.4...v2.0.0) (2017-10-03) +# [2.0.0](https://github.com/npm/npm-pick-manifest/compare/v1.0.4...v2.0.0) (2017-10-03) ### Bug Fixes -* **license:** relicense project according to npm policy (#3) ([ed743a0](https://github.com/zkat/npm-pick-manifest/commit/ed743a0)) +* **license:** relicense project according to npm policy (#3) ([ed743a0](https://github.com/npm/npm-pick-manifest/commit/ed743a0)) ### Features -* **selection:** Avoid matching deprecated packages if possible ([3fc6c3a](https://github.com/zkat/npm-pick-manifest/commit/3fc6c3a)) +* **selection:** Avoid matching deprecated packages if possible ([3fc6c3a](https://github.com/npm/npm-pick-manifest/commit/3fc6c3a)) ### BREAKING CHANGES @@ -39,43 +113,43 @@ All notable changes to this project will be documented in this file. See [standa -## [1.0.4](https://github.com/zkat/npm-pick-manifest/compare/v1.0.3...v1.0.4) (2017-06-29) +## [1.0.4](https://github.com/npm/npm-pick-manifest/compare/v1.0.3...v1.0.4) (2017-06-29) ### Bug Fixes -* **npa:** bump npa version for bugfixes ([7cdaca7](https://github.com/zkat/npm-pick-manifest/commit/7cdaca7)) -* **semver:** use loose semver parsing for *all* ops ([bbc0daa](https://github.com/zkat/npm-pick-manifest/commit/bbc0daa)) +* **npa:** bump npa version for bugfixes ([7cdaca7](https://github.com/npm/npm-pick-manifest/commit/7cdaca7)) +* **semver:** use loose semver parsing for *all* ops ([bbc0daa](https://github.com/npm/npm-pick-manifest/commit/bbc0daa)) -## [1.0.3](https://github.com/zkat/npm-pick-manifest/compare/v1.0.2...v1.0.3) (2017-05-04) +## [1.0.3](https://github.com/npm/npm-pick-manifest/compare/v1.0.2...v1.0.3) (2017-05-04) ### Bug Fixes -* **semver:** use semver.clean() instead ([f4133b5](https://github.com/zkat/npm-pick-manifest/commit/f4133b5)) +* **semver:** use semver.clean() instead ([f4133b5](https://github.com/npm/npm-pick-manifest/commit/f4133b5)) -## [1.0.2](https://github.com/zkat/npm-pick-manifest/compare/v1.0.1...v1.0.2) (2017-05-04) +## [1.0.2](https://github.com/npm/npm-pick-manifest/compare/v1.0.1...v1.0.2) (2017-05-04) ### Bug Fixes -* **picker:** spaces in `wanted` prevented match ([97a7d0a](https://github.com/zkat/npm-pick-manifest/commit/97a7d0a)) +* **picker:** spaces in `wanted` prevented match ([97a7d0a](https://github.com/npm/npm-pick-manifest/commit/97a7d0a)) -## [1.0.1](https://github.com/zkat/npm-pick-manifest/compare/v1.0.0...v1.0.1) (2017-04-24) +## [1.0.1](https://github.com/npm/npm-pick-manifest/compare/v1.0.0...v1.0.1) (2017-04-24) ### Bug Fixes -* **deps:** forgot to add semver ([1876f4f](https://github.com/zkat/npm-pick-manifest/commit/1876f4f)) +* **deps:** forgot to add semver ([1876f4f](https://github.com/npm/npm-pick-manifest/commit/1876f4f)) @@ -85,7 +159,7 @@ All notable changes to this project will be documented in this file. See [standa ### Features -* **api:** initial implementation. ([b086912](https://github.com/zkat/npm-pick-manifest/commit/b086912)) +* **api:** initial implementation. ([b086912](https://github.com/npm/npm-pick-manifest/commit/b086912)) ### BREAKING CHANGES diff --git a/node_modules/npm-pick-manifest/README.md b/node_modules/npm-pick-manifest/README.md index 206af2f317f82..d32d47af1997b 100644 --- a/node_modules/npm-pick-manifest/README.md +++ b/node_modules/npm-pick-manifest/README.md @@ -1,6 +1,6 @@ -# npm-pick-manifest [![npm version](https://img.shields.io/npm/v/npm-pick-manifest.svg)](https://npm.im/npm-pick-manifest) [![license](https://img.shields.io/npm/l/npm-pick-manifest.svg)](https://npm.im/npm-pick-manifest) [![Travis](https://img.shields.io/travis/zkat/npm-pick-manifest.svg)](https://travis-ci.org/zkat/npm-pick-manifest) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/npm-pick-manifest?svg=true)](https://ci.appveyor.com/project/zkat/npm-pick-manifest) [![Coverage Status](https://coveralls.io/repos/github/zkat/npm-pick-manifest/badge.svg?branch=latest)](https://coveralls.io/github/zkat/npm-pick-manifest?branch=latest) +# npm-pick-manifest [![npm version](https://img.shields.io/npm/v/npm-pick-manifest.svg)](https://npm.im/npm-pick-manifest) [![license](https://img.shields.io/npm/l/npm-pick-manifest.svg)](https://npm.im/npm-pick-manifest) [![Travis](https://img.shields.io/travis/npm/npm-pick-manifest.svg)](https://travis-ci.org/npm/npm-pick-manifest) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/npm-pick-manifest?svg=true)](https://ci.appveyor.com/project/npm/npm-pick-manifest) [![Coverage Status](https://coveralls.io/repos/github/npm/npm-pick-manifest/badge.svg?branch=latest)](https://coveralls.io/github/npm/npm-pick-manifest?branch=latest) -[`npm-pick-manifest`](https://github.com/zkat/npm-pick-manifest) is a standalone +[`npm-pick-manifest`](https://github.com/npm/npm-pick-manifest) is a standalone implementation of [npm](https://npmjs.com)'s semver range resolution algorithm. ## Install @@ -74,3 +74,11 @@ The function will throw `ETARGET` if there was no matching manifest, and If `opts.defaultTag` is provided, it will be used instead of `latest`. That is, if that tag matches the selector, it will be used, even if a higher available version matches the range. + +If `opts.enjoyBy` is provided, it should be something that can be passed to `new +Date(x)`, such as a `Date` object or a timestamp string. It will be used to +filter the selected versions such that only versions less than or equal to +`enjoyBy` are considered. + +If `opts.includeDeprecated` passed in as true, deprecated versions will be +selected. By default, deprecated versions other than `defaultTag` are ignored. diff --git a/node_modules/npm-pick-manifest/index.js b/node_modules/npm-pick-manifest/index.js index 133b62723457b..9eb2d82d10024 100644 --- a/node_modules/npm-pick-manifest/index.js +++ b/node_modules/npm-pick-manifest/index.js @@ -1,22 +1,41 @@ 'use strict' +const figgyPudding = require('figgy-pudding') const npa = require('npm-package-arg') const semver = require('semver') +const PickerOpts = figgyPudding({ + defaultTag: { default: 'latest' }, + enjoyBy: {}, + includeDeprecated: { default: false } +}) + module.exports = pickManifest function pickManifest (packument, wanted, opts) { - opts = opts || {} + opts = PickerOpts(opts) + const time = opts.enjoyBy && packument.time && +(new Date(opts.enjoyBy)) const spec = npa.resolve(packument.name, wanted) const type = spec.type if (type === 'version' || type === 'range') { wanted = semver.clean(wanted, true) || wanted } const distTags = packument['dist-tags'] || {} - const versions = Object.keys(packument.versions || {}).filter(v => semver.valid(v, true)) - const undeprecated = versions.filter(v => !packument.versions[v].deprecated) + const versions = Object.keys(packument.versions || {}).filter(v => { + return semver.valid(v, true) + }) + const policyRestrictions = packument.policyRestrictions + const restrictedVersions = policyRestrictions + ? Object.keys(policyRestrictions.versions) : [] + + function enjoyableBy (v) { + return !time || ( + packument.time[v] && time >= +(new Date(packument.time[v])) + ) + } + let err - if (!versions.length) { + if (!versions.length && !restrictedVersions.length) { err = new Error(`No valid versions available for ${packument.name}`) err.code = 'ENOVERSIONS' err.name = packument.name @@ -27,45 +46,83 @@ function pickManifest (packument, wanted, opts) { let target - if (type === 'tag') { + if (type === 'tag' && enjoyableBy(distTags[wanted])) { target = distTags[wanted] } else if (type === 'version') { target = wanted - } else if (type !== 'range') { + } else if (type !== 'range' && enjoyableBy(distTags[wanted])) { throw new Error('Only tag, version, and range are supported') } - const tagVersion = distTags[opts.defaultTag || 'latest'] + const tagVersion = distTags[opts.defaultTag] if ( !target && tagVersion && packument.versions[tagVersion] && + enjoyableBy(tagVersion) && semver.satisfies(tagVersion, wanted, true) ) { target = tagVersion } if (!target && !opts.includeDeprecated) { + const undeprecated = versions.filter(v => !packument.versions[v].deprecated && enjoyableBy(v) + ) target = semver.maxSatisfying(undeprecated, wanted, true) } if (!target) { - target = semver.maxSatisfying(versions, wanted, true) + const stillFresh = versions.filter(enjoyableBy) + target = semver.maxSatisfying(stillFresh, wanted, true) } - if (!target && wanted === '*') { + if (!target && wanted === '*' && enjoyableBy(tagVersion)) { // This specific corner is meant for the case where // someone is using `*` as a selector, but all versions // are pre-releases, which don't match ranges at all. target = tagVersion } - const manifest = target && packument.versions[target] + if ( + !target && + time && + type === 'tag' && + distTags[wanted] && + !enjoyableBy(distTags[wanted]) + ) { + const stillFresh = versions.filter(v => + enjoyableBy(v) && semver.lte(v, distTags[wanted], true) + ).sort(semver.rcompare) + target = stillFresh[0] + } + + if (!target && restrictedVersions) { + target = semver.maxSatisfying(restrictedVersions, wanted, true) + } + + const manifest = ( + target && + packument.versions[target] + ) if (!manifest) { - err = new Error( - `No matching version found for ${packument.name}@${wanted}` - ) - err.code = 'ETARGET' + // Check if target is forbidden + const isForbidden = target && policyRestrictions && policyRestrictions.versions[target] + const pckg = `${packument.name}@${wanted}${ + opts.enjoyBy + ? ` with an Enjoy By date of ${ + new Date(opts.enjoyBy).toLocaleString() + }. Maybe try a different date?` + : '' + }` + + if (isForbidden) { + err = new Error(`Could not download ${pckg} due to policy violations.\n${policyRestrictions.message}\n`) + err.code = 'E403' + } else { + err = new Error(`No matching version found for ${pckg}.`) + err.code = 'ETARGET' + } + err.name = packument.name err.type = type err.wanted = wanted diff --git a/node_modules/npm-pick-manifest/package.json b/node_modules/npm-pick-manifest/package.json index 4cf8bf1a13c6c..5adbc26957485 100644 --- a/node_modules/npm-pick-manifest/package.json +++ b/node_modules/npm-pick-manifest/package.json @@ -1,40 +1,37 @@ { - "_args": [ - [ - "npm-pick-manifest@2.1.0", - "/Users/rebecca/code/npm" - ] - ], - "_from": "npm-pick-manifest@2.1.0", - "_id": "npm-pick-manifest@2.1.0", + "_from": "npm-pick-manifest@3.0.2", + "_id": "npm-pick-manifest@3.0.2", "_inBundle": false, - "_integrity": "sha512-q9zLP8cTr8xKPmMZN3naxp1k/NxVFsjxN6uWuO1tiw9gxg7wZWQ/b5UTfzD0ANw2q1lQxdLKTeCCksq+bPSgbQ==", + "_integrity": "sha512-wNprTNg+X5nf+tDi+hbjdHhM4bX+mKqv6XmPh7B5eG+QY9VARfQPfCEH013H5GqfNj6ee8Ij2fg8yk0mzps1Vw==", "_location": "/npm-pick-manifest", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "npm-pick-manifest@2.1.0", + "raw": "npm-pick-manifest@3.0.2", "name": "npm-pick-manifest", "escapedName": "npm-pick-manifest", - "rawSpec": "2.1.0", + "rawSpec": "3.0.2", "saveSpec": null, - "fetchSpec": "2.1.0" + "fetchSpec": "3.0.2" }, "_requiredBy": [ + "#USER", "/", "/pacote" ], - "_resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-2.1.0.tgz", - "_spec": "2.1.0", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-3.0.2.tgz", + "_shasum": "f4d9e5fd4be2153e5f4e5f9b7be8dc419a99abb7", + "_spec": "npm-pick-manifest@3.0.2", + "_where": "/Users/claudiahdz/npm/cli", "author": { "name": "Kat Marchán", "email": "kzm@sykosomatic.org" }, "bugs": { - "url": "https://github.com/zkat/npm-pick-manifest/issues" + "url": "https://github.com/npm/npm-pick-manifest/issues" }, + "bundleDependencies": false, "config": { "nyc": { "exclude": [ @@ -44,22 +41,24 @@ } }, "dependencies": { + "figgy-pudding": "^3.5.1", "npm-package-arg": "^6.0.0", "semver": "^5.4.1" }, + "deprecated": false, "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.", "devDependencies": { - "nyc": "^11.2.1", + "nyc": "^13.1.0", "standard": "^10.0.3", - "standard-version": "^4.2.0", - "tap": "^10.7.0", + "standard-version": "^4.4.0", + "tap": "^12.0.1", "weallbehave": "^1.2.0", "weallcontribute": "^1.0.8" }, "files": [ "*.js" ], - "homepage": "https://github.com/zkat/npm-pick-manifest#readme", + "homepage": "https://github.com/npm/npm-pick-manifest#readme", "keywords": [ "npm", "semver", @@ -70,7 +69,7 @@ "name": "npm-pick-manifest", "repository": { "type": "git", - "url": "git+https://github.com/zkat/npm-pick-manifest.git" + "url": "git+https://github.com/npm/npm-pick-manifest.git" }, "scripts": { "postrelease": "npm publish && git push --follow-tags", @@ -81,5 +80,5 @@ "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "2.1.0" + "version": "3.0.2" } diff --git a/node_modules/npm-profile/CHANGELOG.md b/node_modules/npm-profile/CHANGELOG.md index cc36c3857054b..6d937580c307b 100644 --- a/node_modules/npm-profile/CHANGELOG.md +++ b/node_modules/npm-profile/CHANGELOG.md @@ -1,3 +1,14 @@ +# v4.0.1 (2018-08-29) + +- `opts.password` needs to be base64-encoded when passed in for login +- Bump `npm-registry-fetch` dep because we depend on `opts.forceAuth` + +# v4.0.0 (2018-08-28) + +## BREAKING CHANGES: + +- Networking and auth-related options now use the latest [`npm-registry-fetch` config format](https://www.npmjs.com/package/npm-registry-fetch#fetch-opts). + # v3.0.2 (2018-06-07) - Allow newer make-fetch-happen. diff --git a/node_modules/npm-profile/README.md b/node_modules/npm-profile/README.md index e895ab61794dd..7a80a729e8996 100644 --- a/node_modules/npm-profile/README.md +++ b/node_modules/npm-profile/README.md @@ -4,9 +4,8 @@ Provides functions for fetching and updating an npmjs.com profile. ```js const profile = require('npm-profile') -profile.get(registry, {token}).then(result => { - // … -}) +const result = await profile.get(registry, {token}) +//... ``` The API that this implements is documented here: @@ -14,22 +13,37 @@ The API that this implements is documented here: * [authentication](https://github.com/npm/registry/blob/master/docs/user/authentication.md) * [profile editing](https://github.com/npm/registry/blob/master/docs/user/profile.md) (and two-factor authentication) -## Functions +## Table of Contents + +* [API](#api) + * Login and Account Creation + * [`adduser()`](#adduser) + * [`login()`](#login) + * [`adduserWeb()`](#adduser-web) + * [`loginWeb()`](#login-web) + * [`adduserCouch()`](#adduser-couch) + * [`loginCouch()`](#login-couch) + * Profile Data Management + * [`get()`](#get) + * [`set()`](#set) + * Token Management + * [`listTokens()`](#list-tokens) + * [`removeToken()`](#remove-token) + * [`createToken()`](#create-token) -### profile.adduser(opener, prompter, config) → Promise +## API + +### `> profile.adduser(opener, prompter, [opts]) → Promise` Tries to create a user new web based login, if that fails it falls back to using the legacy CouchDB APIs. * `opener` Function (url) → Promise, returns a promise that resolves after a browser has been opened for the user at `url`. * `prompter` Function (creds) → Promise, returns a promise that resolves to an object with `username`, `email` and `password` properties. -* `config` Object +* [`opts`](#opts) Object (optional) plus extra keys: * `creds` Object, passed through to prompter, common values are: * `username` String, default value for username * `email` String, default value for email - * `registry` String (for reference, the npm registry is `https://registry.npmjs.org`) - * `opts` Object, [make-fetch-happen options](https://www.npmjs.com/package/make-fetch-happen#extra-options) for setting - things like cache, proxy, SSL CA and retry rules. #### **Promise Value** @@ -50,21 +64,16 @@ on this account isn't allowed from then the `code` will be set to `EAUTHIP`. Otherwise the code will be `'E'` followed by the HTTP response code, for example a Forbidden response would be `E403`. -### profile.login(opener, prompter, config) → Promise +### `> profile.login(opener, prompter, [opts]) → Promise` Tries to login using new web based login, if that fails it falls back to using the legacy CouchDB APIs. * `opener` Function (url) → Promise, returns a promise that resolves after a browser has been opened for the user at `url`. * `prompter` Function (creds) → Promise, returns a promise that resolves to an object with `username`, and `password` properties. -* `config` Object +* [`opts`](#opts) Object (optional) plus extra keys: * `creds` Object, passed through to prompter, common values are: * `name` String, default value for username - * `registry` String (for reference, the npm registry is `https://registry.npmjs.org`) - * `auth` Object, properties: `otp` - the one-time password from a two-factor authentication device. - * `opts` Object, [make-fetch-happen options](https://www.npmjs.com/package/make-fetch-happen#extra-options) for setting - things like cache, proxy, SSL CA and retry rules. #### **Promise Value** @@ -89,16 +98,13 @@ on this account isn't allowed from then the `code` will be set to `EAUTHIP`. Otherwise the code will be `'E'` followed by the HTTP response code, for example a Forbidden response would be `E403`. -### profile.adduserWeb(opener, config) → Promise +### `> profile.adduserWeb(opener, [opts]) → Promise` Tries to create a user new web based login, if that fails it falls back to using the legacy CouchDB APIs. * `opener` Function (url) → Promise, returns a promise that resolves after a browser has been opened for the user at `url`. -* `config` Object - * `registry` String (for reference, the npm registry is `https://registry.npmjs.org`) - * `opts` Object, [make-fetch-happen options](https://www.npmjs.com/package/make-fetch-happen#extra-options) for setting - things like cache, proxy, SSL CA and retry rules. +* [`opts`](#opts) Object #### **Promise Value** @@ -123,16 +129,13 @@ on this account isn't allowed from then the `code` will be set to `EAUTHIP`. Otherwise the code will be `'E'` followed by the HTTP response code, for example a Forbidden response would be `E403`. -### profile.loginWeb(opener, config) → Promise +### `> profile.loginWeb(opener, [opts]) → Promise` Tries to login using new web based login, if that fails it falls back to using the legacy CouchDB APIs. * `opener` Function (url) → Promise, returns a promise that resolves after a browser has been opened for the user at `url`. -* `config` Object - * `registry` String (for reference, the npm registry is `https://registry.npmjs.org`) - * `opts` Object, [make-fetch-happen options](https://www.npmjs.com/package/make-fetch-happen#extra-options) for setting - things like cache, proxy, SSL CA and retry rules. +* [`opts`](#opts) Object (optional) #### **Promise Value** @@ -151,19 +154,17 @@ If the registry does not support web-login then an error will be thrown with its `code` property set to `ENYI` . You should retry with `loginCouch`. If you use `login` then this fallback will be done automatically. - If the action was denied because it came from an IP address that this action on this account isn't allowed from then the `code` will be set to `EAUTHIP`. Otherwise the code will be `'E'` followed by the HTTP response code, for example a Forbidden response would be `E403`. -### profile.adduserCouch(username, email, password, config) → Promise +### `> profile.adduserCouch(username, email, password, [opts]) → Promise` ```js -profile.adduser(username, email, password, {registry}).then(result => { - // do something with result.token -}) +const {token} = await profile.adduser(username, email, password, {registry}) +// `token` can be passed in through `opts` for authentication. ``` Creates a new user on the server along with a fresh bearer token for future @@ -176,10 +177,7 @@ this is registry specific and not guaranteed. * `username` String * `email` String * `password` String -* `config` Object - * `registry` String (for reference, the npm registry is `https://registry.npmjs.org`) - * `opts` Object, [make-fetch-happen options](https://www.npmjs.com/package/make-fetch-happen#extra-options) for setting - things like cache, proxy, SSL CA and retry rules. +* [`opts`](#opts) Object (optional) #### **Promise Value** @@ -203,33 +201,29 @@ on this account isn't allowed from then the `code` will be set to `EAUTHIP`. Otherwise the code will be `'E'` followed by the HTTP response code, for example a Forbidden response would be `E403`. -### profile.loginCouch(username, password, config) → Promise +### `> profile.loginCouch(username, password, [opts]) → Promise` ```js -profile.login(username, password, {registry}).catch(err => { +let token +try { + {token} = await profile.login(username, password, {registry}) +} catch (err) { if (err.code === 'otp') { - return getOTPFromSomewhere().then(otp => { - return profile.login(username, password, {registry, auth: {otp}}) - }) + const otp = await getOTPFromSomewhere() + {token} = await profile.login(username, password, {otp}) } -}).then(result => { - // do something with result.token -}) +} +// `token` can now be passed in through `opts` for authentication. ``` Logs you into an existing user. Does not create the user if they do not already exist. Logging in means generating a new bearer token for use in future authentication. This is what you use as an `authToken` in an `.npmrc`. - + * `username` String * `email` String * `password` String -* `config` Object - * `registry` String (for reference, the npm registry is `https://registry.npmjs.org`) - * `auth` Object, properties: `otp` — the one-time password from a two-factor - authentication device. - * `opts` Object, [make-fetch-happen options](https://www.npmjs.com/package/make-fetch-happen#extra-options) for setting - things like cache, proxy, SSL CA and retry rules. +* [`opts`](#opts) Object (optional) #### **Promise Value** @@ -254,24 +248,16 @@ If the error was neither of these then the error object will have a `code` property set to the HTTP response code and a `headers` property with the HTTP headers in the response. -### profile.get(config) → Promise +### `> profile.get([opts]) → Promise` ```js -profile.get(registry, {auth: {token}}).then(userProfile => { - // do something with userProfile -}) +const {name, email} = await profile.get({token}) +console.log(`${token} belongs to https://npm.im/~${name}, (mailto:${email})`) ``` Fetch profile information for the authenticated user. - -* `config` Object - * `registry` String (for reference, the npm registry is `https://registry.npmjs.org`) - * `auth` Object, properties: `token` — a bearer token returned from - `adduser`, `login` or `createToken`, or, `username`, `password` (and - optionally `otp`). Authenticating for this command via a username and - password will likely not be supported in the future. - * `opts` Object, [make-fetch-happen options](https://www.npmjs.com/package/make-fetch-happen#extra-options) for setting - things like cache, proxy, SSL CA and retry rules. + +* [`opts`](#opts) Object #### **Promise Value** @@ -313,24 +299,17 @@ on this account isn't allowed from then the `code` will be set to `EAUTHIP`. Otherwise the code will be the HTTP response code. -### profile.set(profileData, config) → Promise +### `> profile.set(profileData, [opts]) → Promise` ```js -profile.set({github: 'great-github-account-name'}, {registry, auth: {token}}) +await profile.set({github: 'great-github-account-name'}, {token}) ``` Update profile information for the authenticated user. * `profileData` An object, like that returned from `profile.get`, but see below for caveats relating to `password`, `tfa` and `cidr_whitelist`. -* `config` Object - * `registry` String (for reference, the npm registry is `https://registry.npmjs.org`) - * `auth` Object, properties: `token` — a bearer token returned from - `adduser`, `login` or `createToken`, or, `username`, `password` (and - optionally `otp`). Authenticating for this command via a username and - password will likely not be supported in the future. - * `opts` Object, [make-fetch-happen options](https://www.npmjs.com/package/make-fetch-happen#extra-options) for setting - things like cache, proxy, SSL CA and retry rules. +* [`opts`](#opts) Object (optional) #### **SETTING `password`** @@ -340,7 +319,12 @@ and `new` properties, where the former has the user's current password and the latter has the desired new password. For example ```js -profile.set({password: {old: 'abc123', new: 'my new (more secure) password'}}, {registry, auth: {token}}) +await profile.set({ + password: { + old: 'abc123', + new: 'my new (more secure) password' + } +}, {token}) ``` #### **SETTING `cidr_whitelist`** @@ -350,7 +334,9 @@ Be very careful as it's possible to lock yourself out of your account with this. This is not currently exposed in `npm` itself. ```js -profile.set({cidr_whitelist: [ '8.8.8.8/32' ], {registry, auth: {token}}) +await profile.set({ + cidr_whitelist: [ '8.8.8.8/32' ] +}, {token}) // ↑ only one of google's dns servers can now access this account. ``` @@ -360,7 +346,7 @@ Enabling two-factor authentication is a multi-step process. 1. Call `profile.get` and check the status of `tfa`. If `pending` is true then you'll need to disable it with `profile.set({tfa: {password, mode: 'disable'}, …)`. -2. `profile.set({tfa: {password, mode}}, {registry, auth: {token}})` +2. `profile.set({tfa: {password, mode}}, {registry, token})` * Note that the user's `password` is required here in the `tfa` object, regardless of how you're authenticating. * `mode` is either `auth-only` which requires an `otp` when calling `login` @@ -381,7 +367,7 @@ Enabling two-factor authentication is a multi-step process. and they can type or copy paste that in. 4. To complete setting up two factor auth you need to make a second call to `profile.set` with `tfa` set to an array of TWO codes from the user's - authenticator, eg: `profile.set(tfa: [otp1, otp2]}, registry, {token})` + authenticator, eg: `profile.set(tfa: [otp1, otp2]}, {registry, token})` 5. On success you'll get a result object with a `tfa` property that has an array of one-time-use recovery codes. These are used to authenticate later if the second factor is lost and generally should be printed and @@ -391,7 +377,7 @@ Disabling two-factor authentication is more straightforward, set the `tfa` attribute to an object with a `password` property and a `mode` of `disable`. ```js -profile.set({tfa: {password, mode: 'disable'}, {registry, auth: {token}}} +await profile.set({tfa: {password, mode: 'disable'}}, {token}) ``` #### **Promise Value** @@ -412,24 +398,16 @@ on this account isn't allowed from then the `code` will be set to `EAUTHIP`. Otherwise the code will be the HTTP response code. -### profile.listTokens(config) → Promise +### `> profile.listTokens([opts]) → Promise` ```js -profile.listTokens(registry, {token}).then(tokens => { - // do something with tokens -}) +const tokens = await profile.listTokens({registry, token}) +console.log(`Number of tokens in your accounts: ${tokens.length}`) ``` Fetch a list of all of the authentication tokens the authenticated user has. -* `config` Object - * `registry` String (for reference, the npm registry is `https://registry.npmjs.org`) - * `auth` Object, properties: `token` — a bearer token returned from - `adduser`, `login` or `createToken`, or, `username`, `password` (and - optionally `otp`). Authenticating for this command via a username and - password will likely not be supported in the future. - * `opts` Object, [make-fetch-happen options](https://www.npmjs.com/package/make-fetch-happen#extra-options) for setting - things like cache, proxy, SSL CA and retry rules. +* [`opts`](#opts) Object (optional) #### **Promise Value** @@ -456,25 +434,17 @@ on this account isn't allowed from then the `code` will be set to `EAUTHIP`. Otherwise the code will be the HTTP response code. -### profile.removeToken(token|key, config) → Promise +### `> profile.removeToken(token|key, opts) → Promise` ```js -profile.removeToken(key, registry, {token}).then(() => { - // token is gone! -}) +await profile.removeToken(key, {token}) +// token is gone! ``` Remove a specific authentication token. * `token|key` String, either a complete authentication token or the key returned by `profile.listTokens`. -* `config` Object - * `registry` String (for reference, the npm registry is `https://registry.npmjs.org`) - * `auth` Object, properties: `token` — a bearer token returned from - `adduser`, `login` or `createToken`, or, `username`, `password` (and - optionally `otp`). Authenticating for this command via a username and - password will likely not be supported in the future. - * `opts` Object, [make-fetch-happen options](https://www.npmjs.com/package/make-fetch-happen#extra-options) for setting - things like cache, proxy, SSL CA and retry rules. +* [`opts`](#opts) Object (optional) #### **Promise Value** @@ -494,12 +464,13 @@ on this account isn't allowed from then the `code` will be set to `EAUTHIP`. Otherwise the code will be the HTTP response code. -### profile.createToken(password, readonly, cidr_whitelist, config) → Promise +### `> profile.createToken(password, readonly, cidr_whitelist, [opts]) → Promise` ```js -profile.createToken(password, readonly, cidr_whitelist, registry, {token, otp}).then(newToken => { - // do something with the newToken -}) +const newToken = await profile.createToken( + password, readonly, cidr_whitelist, {token, otp} +) +// do something with the newToken ``` Create a new authentication token, possibly with restrictions. @@ -507,21 +478,14 @@ Create a new authentication token, possibly with restrictions. * `password` String * `readonly` Boolean * `cidr_whitelist` Array -* `config` Object - * `registry` String (for reference, the npm registry is `https://registry.npmjs.org`) - * `auth` Object, properties: `token` — a bearer token returned from - `adduser`, `login` or `createToken`, or, `username`, `password` (and - optionally `otp`). Authenticating for this command via a username and - password will likely not be supported in the future. - * `opts` Object, [make-fetch-happen options](https://www.npmjs.com/package/make-fetch-happen#extra-options) for setting - things like cache, proxy, SSL CA and retry rules. +* [`opts`](#opts) Object Optional #### **Promise Value** The promise will resolve with an object very much like the one's returned by `profile.listTokens`. The only difference is that `token` is not truncated. -``` +```js { token: String, key: String, // sha512 hash of the token UUID @@ -545,12 +509,28 @@ on this account isn't allowed from then the `code` will be set to `EAUTHIP`. Otherwise the code will be the HTTP response code. -## Logging +### options objects + +The various API functions accept an optional `opts` object as a final +argument. This opts object can either be a regular Object, or a +[`figgy-pudding`](https://npm.im/figgy-pudding) options object instance. + +Unless otherwise noted, the options accepted are the same as the +[`npm-registry-fetch` +options](https://www.npmjs.com/package/npm-registry-fetch#fetch-opts). + +Of particular note are `opts.registry`, and the auth-related options: + +* `opts.token` - used for Bearer auth +* `opts.username` and `opts.password` - used for Basic auth +* `opts.otp` - the 2fa OTP token + +## Logging This modules logs by emitting `log` events on the global `process` object. These events look like this: -``` +```js process.emit('log', 'loglevel', 'feature', 'message part 1', 'part 2', 'part 3', 'etc') ``` @@ -562,13 +542,13 @@ The remaining arguments are evaluated like `console.log` and joined together wit A real world example of this is: -``` - process.emit('log', 'http', 'request', '→',conf.method || 'GET', conf.target) +```js + process.emit('log', 'http', 'request', '→', conf.method || 'GET', conf.target) ``` To handle the log events, you would do something like this: -``` +```js const log = require('npmlog') process.on('log', function (level) { return log[level].apply(log, [].slice.call(arguments, 1)) diff --git a/node_modules/npm-profile/index.js b/node_modules/npm-profile/index.js index 023ddae40c8c6..d9c48c131bc92 100644 --- a/node_modules/npm-profile/index.js +++ b/node_modules/npm-profile/index.js @@ -1,8 +1,10 @@ 'use strict' -const fetch = require('make-fetch-happen').defaults({retry: false}) -const validate = require('aproba') -const url = require('url') + +const fetch = require('npm-registry-fetch') +const { HttpErrorBase } = require('npm-registry-fetch/errors.js') const os = require('os') +const pudding = require('figgy-pudding') +const validate = require('aproba') exports.adduserCouch = adduserCouch exports.loginCouch = loginCouch @@ -16,99 +18,88 @@ exports.listTokens = listTokens exports.removeToken = removeToken exports.createToken = createToken +const url = require('url') + +const isValidUrl = u => { + if (u && typeof u === 'string') { + const p = url.parse(u) + return p.slashes && p.host && p.path && /^https?:$/.test(p.protocol) + } + return false +} + +const ProfileConfig = pudding({ + creds: {}, + hostname: {}, + otp: {} +}) + // try loginWeb, catch the "not supported" message and fall back to couch -function login (opener, prompter, conf) { +function login (opener, prompter, opts) { validate('FFO', arguments) - return loginWeb(opener, conf).catch(er => { + opts = ProfileConfig(opts) + return loginWeb(opener, opts).catch(er => { if (er instanceof WebLoginNotSupported) { process.emit('log', 'verbose', 'web login not supported, trying couch') - return prompter(conf.creds) - .then(data => loginCouch(data.username, data.password, conf)) + return prompter(opts.creds) + .then(data => loginCouch(data.username, data.password, opts)) } else { throw er } }) } -function adduser (opener, prompter, conf) { +function adduser (opener, prompter, opts) { validate('FFO', arguments) - return adduserWeb(opener, conf).catch(er => { + opts = ProfileConfig(opts) + return adduserWeb(opener, opts).catch(er => { if (er instanceof WebLoginNotSupported) { process.emit('log', 'verbose', 'web adduser not supported, trying couch') - return prompter(conf.creds) - .then(data => adduserCouch(data.username, data.email, data.password, conf)) + return prompter(opts.creds) + .then(data => adduserCouch(data.username, data.email, data.password, opts)) } else { throw er } }) } -function adduserWeb (opener, conf) { +function adduserWeb (opener, opts) { validate('FO', arguments) const body = { create: true } process.emit('log', 'verbose', 'web adduser', 'before first POST') - return webAuth(opener, conf, body) + return webAuth(opener, opts, body) } -function loginWeb (opener, conf) { +function loginWeb (opener, opts) { validate('FO', arguments) process.emit('log', 'verbose', 'web login', 'before first POST') - return webAuth(opener, conf, {}) + return webAuth(opener, opts, {}) } -function webAuth (opener, conf, body) { - if (!conf.opts) conf.opts = {} - const target = url.resolve(conf.registry, '-/v1/login') - body.hostname = conf.hostname || os.hostname() - return fetchJSON({ - target: target, +function webAuth (opener, opts, body) { + opts = ProfileConfig(opts) + body.hostname = opts.hostname || os.hostname() + const target = '/-/v1/login' + return fetch(target, opts.concat({ method: 'POST', - body: body, - opts: conf.opts, - saveResponse: true - }).then(result => { - const res = result[0] - const content = result[1] + body + })).then(res => { + return Promise.all([res, res.json()]) + }).then(([res, content]) => { + const { doneUrl, loginUrl } = content process.emit('log', 'verbose', 'web auth', 'got response', content) - const doneUrl = content.doneUrl - const loginUrl = content.loginUrl - if (typeof doneUrl !== 'string' || - typeof loginUrl !== 'string' || - !doneUrl || !loginUrl) { - throw new WebLoginInvalidResponse('POST', target, res, content) + if (!isValidUrl(doneUrl) || !isValidUrl(loginUrl)) { + throw new WebLoginInvalidResponse('POST', res, content) } + return content + }).then(({ doneUrl, loginUrl }) => { process.emit('log', 'verbose', 'web auth', 'opening url pair') - const doneConf = { - target: doneUrl, - method: 'GET', - opts: conf.opts, - saveResponse: true - } - return opener(loginUrl).then(() => fetchJSON(doneConf)).then(onDone) - function onDone (result) { - const res = result[0] - const content = result[1] - if (res.status === 200) { - if (!content.token) { - throw new WebLoginInvalidResponse('GET', doneUrl, res, content) - } else { - return content - } - } else if (res.status === 202) { - const retry = +res.headers.get('retry-after') - if (retry > 0) { - return new Promise(resolve => setTimeout(resolve, 1000 * retry)) - .then(() => fetchJSON(doneConf)).then(onDone) - } else { - return fetchJSON(doneConf).then(onDone) - } - } else { - throw new WebLoginInvalidResponse('GET', doneUrl, res, content) - } - } + return opener(loginUrl).then( + () => webAuthCheckLogin(doneUrl, opts.concat({ cache: false })) + ) }).catch(er => { if ((er.statusCode >= 400 && er.statusCode <= 499) || er.statusCode === 500) { - throw new WebLoginNotSupported('POST', target, { + throw new WebLoginNotSupported('POST', { status: er.statusCode, headers: { raw: () => er.headers } }, er.body) @@ -118,10 +109,33 @@ function webAuth (opener, conf, body) { }) } -function adduserCouch (username, email, password, conf) { +function webAuthCheckLogin (doneUrl, opts) { + return fetch(doneUrl, opts).then(res => { + return Promise.all([res, res.json()]) + }).then(([res, content]) => { + if (res.status === 200) { + if (!content.token) { + throw new WebLoginInvalidResponse('GET', res, content) + } else { + return content + } + } else if (res.status === 202) { + const retry = +res.headers.get('retry-after') * 1000 + if (retry > 0) { + return sleep(retry).then(() => webAuthCheckLogin(doneUrl, opts)) + } else { + return webAuthCheckLogin(doneUrl, opts) + } + } else { + throw new WebLoginInvalidResponse('GET', res, content) + } + }) +} + +function adduserCouch (username, email, password, opts) { validate('SSSO', arguments) - if (!conf.opts) conf.opts = {} - const userobj = { + opts = ProfileConfig(opts) + const body = { _id: 'org.couchdb.user:' + username, name: username, password: password, @@ -131,23 +145,25 @@ function adduserCouch (username, email, password, conf) { date: new Date().toISOString() } const logObj = {} - Object.keys(userobj).forEach(k => { - logObj[k] = k === 'password' ? 'XXXXX' : userobj[k] + Object.keys(body).forEach(k => { + logObj[k] = k === 'password' ? 'XXXXX' : body[k] }) process.emit('log', 'verbose', 'adduser', 'before first PUT', logObj) - const target = url.resolve(conf.registry, '-/user/org.couchdb.user:' + encodeURIComponent(username)) - - return fetchJSON({target: target, method: 'PUT', body: userobj, opts: conf.opts}) - .then(result => { - result.username = username - return result - }) + const target = '/-/user/org.couchdb.user:' + encodeURIComponent(username) + return fetch.json(target, opts.concat({ + method: 'PUT', + body + })).then(result => { + result.username = username + return result + }) } -function loginCouch (username, password, conf) { +function loginCouch (username, password, opts) { validate('SSO', arguments) - const userobj = { + opts = ProfileConfig(opts) + const body = { _id: 'org.couchdb.user:' + username, name: username, password: password, @@ -156,36 +172,38 @@ function loginCouch (username, password, conf) { date: new Date().toISOString() } const logObj = {} - Object.keys(userobj).forEach(k => { - logObj[k] = k === 'password' ? 'XXXXX' : userobj[k] + Object.keys(body).forEach(k => { + logObj[k] = k === 'password' ? 'XXXXX' : body[k] }) process.emit('log', 'verbose', 'login', 'before first PUT', logObj) - const target = url.resolve(conf.registry, '-/user/org.couchdb.user:' + encodeURIComponent(username)) - return fetchJSON(Object.assign({method: 'PUT', target: target, body: userobj}, conf)).catch(err => { + const target = '-/user/org.couchdb.user:' + encodeURIComponent(username) + return fetch.json(target, opts.concat({ + method: 'PUT', + body + })).catch(err => { if (err.code === 'E400') { err.message = `There is no user with the username "${username}".` throw err } if (err.code !== 'E409') throw err - return fetchJSON(Object.assign({method: 'GET', target: target + '?write=true'}, conf)).then(result => { + return fetch.json(target, opts.concat({ + query: { write: true } + })).then(result => { Object.keys(result).forEach(function (k) { - if (!userobj[k] || k === 'roles') { - userobj[k] = result[k] + if (!body[k] || k === 'roles') { + body[k] = result[k] } }) - const req = { + return fetch.json(`${target}/-rev/${body._rev}`, opts.concat({ method: 'PUT', - target: target + '/-rev/' + userobj._rev, - body: userobj, - auth: { - basic: { - username: username, - password: password - } + body, + forceAuth: { + username, + password: Buffer.from(password, 'utf8').toString('base64'), + otp: opts.otp } - } - return fetchJSON(Object.assign({}, conf, req)) + })) }) }).then(result => { result.username = username @@ -193,29 +211,31 @@ function loginCouch (username, password, conf) { }) } -function get (conf) { +function get (opts) { validate('O', arguments) - const target = url.resolve(conf.registry, '-/npm/v1/user') - return fetchJSON(Object.assign({target: target}, conf)) + return fetch.json('/-/npm/v1/user', opts) } -function set (profile, conf) { +function set (profile, opts) { validate('OO', arguments) - const target = url.resolve(conf.registry, '-/npm/v1/user') Object.keys(profile).forEach(key => { // profile keys can't be empty strings, but they CAN be null if (profile[key] === '') profile[key] = null }) - return fetchJSON(Object.assign({target: target, method: 'POST', body: profile}, conf)) + return fetch.json('/-/npm/v1/user', ProfileConfig(opts, { + method: 'POST', + body: profile + })) } -function listTokens (conf) { +function listTokens (opts) { validate('O', arguments) + opts = ProfileConfig(opts) - return untilLastPage(`-/npm/v1/tokens`) + return untilLastPage('/-/npm/v1/tokens') function untilLastPage (href, objects) { - return fetchJSON(Object.assign({target: url.resolve(conf.registry, href)}, conf)).then(result => { + return fetch.json(href, opts).then(result => { objects = objects ? objects.concat(result.objects) : result.objects if (result.urls.next) { return untilLastPage(result.urls.next, objects) @@ -226,174 +246,44 @@ function listTokens (conf) { } } -function removeToken (tokenKey, conf) { +function removeToken (tokenKey, opts) { validate('SO', arguments) - const target = url.resolve(conf.registry, `-/npm/v1/tokens/token/${tokenKey}`) - return fetchJSON(Object.assign({target: target, method: 'DELETE'}, conf)) + const target = `/-/npm/v1/tokens/token/${tokenKey}` + return fetch(target, ProfileConfig(opts, { + method: 'DELETE', + ignoreBody: true + })).then(() => null) } -function createToken (password, readonly, cidrs, conf) { +function createToken (password, readonly, cidrs, opts) { validate('SBAO', arguments) - const target = url.resolve(conf.registry, '-/npm/v1/tokens') - const props = { - password: password, - readonly: readonly, - cidr_whitelist: cidrs - } - return fetchJSON(Object.assign({target: target, method: 'POST', body: props}, conf)) -} - -function FetchError (err, method, target) { - err.method = method - err.href = target - return err -} - -class HttpErrorBase extends Error { - constructor (method, target, res, body) { - super() - this.headers = res.headers.raw() - this.statusCode = res.status - this.code = 'E' + res.status - this.method = method - this.target = target - this.body = body - this.pkgid = packageName(target) - } -} - -class HttpErrorGeneral extends HttpErrorBase { - constructor (method, target, res, body) { - super(method, target, res, body) - if (body && body.error) { - this.message = `Registry returned ${this.statusCode} for ${this.method} on ${this.target}: ${body.error}` - } else { - this.message = `Registry returned ${this.statusCode} for ${this.method} on ${this.target}` + return fetch.json('/-/npm/v1/tokens', ProfileConfig(opts, { + method: 'POST', + body: { + password: password, + readonly: readonly, + cidr_whitelist: cidrs } - Error.captureStackTrace(this, HttpErrorGeneral) - } + })) } class WebLoginInvalidResponse extends HttpErrorBase { - constructor (method, target, res, body) { - super(method, target, res, body) + constructor (method, res, body) { + super(method, res, body) this.message = 'Invalid response from web login endpoint' Error.captureStackTrace(this, WebLoginInvalidResponse) } } class WebLoginNotSupported extends HttpErrorBase { - constructor (method, target, res, body) { - super(method, target, res, body) + constructor (method, res, body) { + super(method, res, body) this.message = 'Web login not supported' this.code = 'ENYI' Error.captureStackTrace(this, WebLoginNotSupported) } } -class HttpErrorAuthOTP extends HttpErrorBase { - constructor (method, target, res, body) { - super(method, target, res, body) - this.message = 'OTP required for authentication' - this.code = 'EOTP' - Error.captureStackTrace(this, HttpErrorAuthOTP) - } -} - -class HttpErrorAuthIPAddress extends HttpErrorBase { - constructor (method, target, res, body) { - super(method, target, res, body) - this.message = 'Login is not allowed from your IP address' - this.code = 'EAUTHIP' - Error.captureStackTrace(this, HttpErrorAuthIPAddress) - } -} - -class HttpErrorAuthUnknown extends HttpErrorBase { - constructor (method, target, res, body) { - super(method, target, res, body) - this.message = 'Unable to authenticate, need: ' + res.headers.get('www-authenticate') - this.code = 'EAUTHUNKNOWN' - Error.captureStackTrace(this, HttpErrorAuthUnknown) - } -} - -function authHeaders (auth) { - const headers = {} - if (!auth) return headers - if (auth.otp) headers['npm-otp'] = auth.otp - if (auth.token) { - headers['Authorization'] = 'Bearer ' + auth.token - } else if (auth.basic) { - const basic = auth.basic.username + ':' + auth.basic.password - headers['Authorization'] = 'Basic ' + Buffer.from(basic).toString('base64') - } - return headers -} - -function fetchJSON (conf) { - const fetchOpts = { - method: conf.method, - headers: Object.assign({}, conf.headers || (conf.auth && authHeaders(conf.auth)) || {}) - } - if (conf.body != null) { - fetchOpts.headers['Content-Type'] = 'application/json' - fetchOpts.body = JSON.stringify(conf.body) - } - process.emit('log', 'http', 'request', '→', conf.method || 'GET', conf.target) - return fetch.defaults(conf.opts || {})(conf.target, fetchOpts).catch(err => { - throw new FetchError(err, conf.method, conf.target) - }).then(res => { - if (res.headers.has('npm-notice')) { - process.emit('warn', 'notice', res.headers.get('npm-notice')) - } - if (res.headers.get('content-type') === 'application/json') { - return res.json().then(content => [res, content]) - } else { - return res.buffer().then(content => { - try { - return [res, JSON.parse(content)] - } catch (_) { - return [res, content] - } - }) - } - }).then(result => { - const res = result[0] - const content = result[1] - const retVal = conf.saveResponse ? result : content - process.emit('log', 'http', res.status, `← ${res.statusText} (${conf.target})`) - if (res.status === 401 && res.headers.get('www-authenticate')) { - const auth = res.headers.get('www-authenticate').split(/,\s*/).map(s => s.toLowerCase()) - if (auth.indexOf('ipaddress') !== -1) { - throw new HttpErrorAuthIPAddress(conf.method, conf.target, res, content) - } else if (auth.indexOf('otp') !== -1) { - throw new HttpErrorAuthOTP(conf.method, conf.target, res, content) - } else { - throw new HttpErrorAuthUnknown(conf.method, conf.target, res, content) - } - } else if (res.status < 200 || res.status >= 300) { - throw new HttpErrorGeneral(conf.method, conf.target, res, content) - } else { - return retVal - } - }) -} - -function packageName (href) { - try { - let basePath = url.parse(href).pathname.substr(1) - if (!basePath.match(/^-/)) { - basePath = basePath.split('/') - var index = basePath.indexOf('_rewrite') - if (index === -1) { - index = basePath.length - 1 - } else { - index++ - } - return decodeURIComponent(basePath[index]) - } - } catch (_) { - // this is ok - } +function sleep (ms) { + return new Promise((resolve, reject) => setTimeout(resolve, ms)) } diff --git a/node_modules/npm-profile/package.json b/node_modules/npm-profile/package.json index d158b71863292..e90e16ddf975d 100644 --- a/node_modules/npm-profile/package.json +++ b/node_modules/npm-profile/package.json @@ -1,28 +1,29 @@ { - "_from": "npm-profile@3.0.2", - "_id": "npm-profile@3.0.2", + "_from": "npm-profile@4.0.4", + "_id": "npm-profile@4.0.4", "_inBundle": false, - "_integrity": "sha512-rEJOFR6PbwOvvhGa2YTNOJQKNuc6RovJ6T50xPU7pS9h/zKPNCJ+VHZY2OFXyZvEi+UQYtHRTp8O/YM3tUD20A==", + "_integrity": "sha512-Ta8xq8TLMpqssF0H60BXS1A90iMoM6GeKwsmravJ6wYjWwSzcYBTdyWa3DZCYqPutacBMEm7cxiOkiIeCUAHDQ==", "_location": "/npm-profile", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "npm-profile@3.0.2", + "raw": "npm-profile@4.0.4", "name": "npm-profile", "escapedName": "npm-profile", - "rawSpec": "3.0.2", + "rawSpec": "4.0.4", "saveSpec": null, - "fetchSpec": "3.0.2" + "fetchSpec": "4.0.4" }, "_requiredBy": [ "#USER", - "/" + "/", + "/libnpm" ], - "_resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-3.0.2.tgz", - "_shasum": "58d568f1b56ef769602fd0aed8c43fa0e0de0f57", - "_spec": "npm-profile@3.0.2", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-4.0.4.tgz", + "_shasum": "28ee94390e936df6d084263ee2061336a6a1581b", + "_spec": "npm-profile@4.0.4", + "_where": "/Users/darcyclarke/Documents/Repos/npm/cli", "author": { "name": "Rebecca Turner", "email": "me@re-becca.org", @@ -34,7 +35,8 @@ "bundleDependencies": false, "dependencies": { "aproba": "^1.1.2 || 2", - "make-fetch-happen": "^2.5.0 || 3 || 4" + "figgy-pudding": "^3.4.1", + "npm-registry-fetch": "^4.0.0" }, "deprecated": false, "description": "Library for updating an npmjs.com profile", @@ -47,9 +49,12 @@ "license": "ISC", "main": "index.js", "name": "npm-profile", + "publishConfig": { + "tag": "legacy-v4" + }, "repository": { "type": "git", "url": "git+https://github.com/npm/npm-profile.git" }, - "version": "3.0.2" + "version": "4.0.4" } diff --git a/node_modules/npm-registry-client/CHANGELOG.md b/node_modules/npm-registry-client/CHANGELOG.md deleted file mode 100644 index 138b3be2d9e5c..0000000000000 --- a/node_modules/npm-registry-client/CHANGELOG.md +++ /dev/null @@ -1,21 +0,0 @@ -# Change Log - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - - -# [8.6.0](https://github.com/npm/npm-registry-client/compare/v8.5.1...v8.6.0) (2018-07-13) - - -### Features - -* **access:** Add support for npm access to set per-package 2fa requirements ([8b472d2](https://github.com/npm/npm-registry-client/commit/8b472d2)) - - - - -## [8.5.1](https://github.com/npm/npm-registry-client/compare/v8.5.0...v8.5.1) (2018-03-08) - - -### Bug Fixes - -* **error:** improve `User not found` publish message ([#167](https://github.com/npm/npm-registry-client/issues/167)) ([5ebcffc](https://github.com/npm/npm-registry-client/commit/5ebcffc)) diff --git a/node_modules/npm-registry-client/LICENSE b/node_modules/npm-registry-client/LICENSE deleted file mode 100644 index 19129e315fe59..0000000000000 --- a/node_modules/npm-registry-client/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-registry-client/README.md b/node_modules/npm-registry-client/README.md deleted file mode 100644 index e9ab77b0dc02b..0000000000000 --- a/node_modules/npm-registry-client/README.md +++ /dev/null @@ -1,357 +0,0 @@ -# npm-registry-client - -The code that npm uses to talk to the registry. - -It handles all the caching and HTTP calls. - -## Usage - -```javascript -var RegClient = require('npm-registry-client') -var client = new RegClient(config) -var uri = "https://registry.npmjs.org/npm" -var params = {timeout: 1000} - -client.get(uri, params, function (error, data, raw, res) { - // error is an error if there was a problem. - // data is the parsed data object - // raw is the json string - // res is the response from couch -}) -``` - -# Registry URLs - -The registry calls take either a full URL pointing to a resource in the -registry, or a base URL for the registry as a whole (including the registry -path – but be sure to terminate the path with `/`). `http` and `https` URLs are -the only ones supported. - -## Using the client - -Every call to the client follows the same pattern: - -* `uri` {String} The *fully-qualified* URI of the registry API method being - invoked. -* `params` {Object} Per-request parameters. -* `callback` {Function} Callback to be invoked when the call is complete. - -### Credentials - -Many requests to the registry can be authenticated, and require credentials -for authorization. These credentials always look the same: - -* `username` {String} -* `password` {String} -* `email` {String} -* `alwaysAuth` {Boolean} Whether calls to the target registry are always - authed. - -**or** - -* `token` {String} -* `alwaysAuth` {Boolean} Whether calls to the target registry are always - authed. - -## Requests - -As of `npm-registry-client@8`, all requests are made with an `Accept` header -of `application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*`. - -This enables filtered document responses to requests for package metadata. -You know that you got a filtered response if the mime type is set to -`application/vnd.npm.install-v1+json` and not `application/json`. - -This filtering substantially reduces the over all data size. For example -for `https://registry.npmjs.org/npm`, the compressed metadata goes from -410kB to 21kB. - -## API - -### client.access(uri, params, cb) - -* `uri` {String} Registry URL for the package's access API endpoint. - Looks like `/-/package//access`. -* `params` {Object} Object containing per-request properties. - * `access` {String} New access level for the package. Can be either - `public` or `restricted`. Registry will raise an error if trying - to change the access level of an unscoped package. - * `auth` {Credentials} - -Set the access level for scoped packages. For now, there are only two -access levels: "public" and "restricted". - -### client.adduser(uri, params, cb) - -* `uri` {String} Base registry URL. -* `params` {Object} Object containing per-request properties. - * `auth` {Credentials} -* `cb` {Function} - * `error` {Error | null} - * `data` {Object} the parsed data object - * `raw` {String} the json - * `res` {Response Object} response from couch - -Add a user account to the registry, or verify the credentials. - -### client.deprecate(uri, params, cb) - -* `uri` {String} Full registry URI for the deprecated package. -* `params` {Object} Object containing per-request properties. - * `version` {String} Semver version range. - * `message` {String} The message to use as a deprecation warning. - * `auth` {Credentials} -* `cb` {Function} - -Deprecate a version of a package in the registry. - -### client.distTags.fetch(uri, params, cb) - -* `uri` {String} Base URL for the registry. -* `params` {Object} Object containing per-request properties. - * `package` {String} Name of the package. - * `auth` {Credentials} -* `cb` {Function} - -Fetch all of the `dist-tags` for the named package. - -### client.distTags.add(uri, params, cb) - -* `uri` {String} Base URL for the registry. -* `params` {Object} Object containing per-request properties. - * `package` {String} Name of the package. - * `distTag` {String} Name of the new `dist-tag`. - * `version` {String} Exact version to be mapped to the `dist-tag`. - * `auth` {Credentials} -* `cb` {Function} - -Add (or replace) a single dist-tag onto the named package. - -### client.distTags.set(uri, params, cb) - -* `uri` {String} Base URL for the registry. -* `params` {Object} Object containing per-request properties. - * `package` {String} Name of the package. - * `distTags` {Object} Object containing a map from tag names to package - versions. - * `auth` {Credentials} -* `cb` {Function} - -Set all of the `dist-tags` for the named package at once, creating any -`dist-tags` that do not already exist. Any `dist-tags` not included in the -`distTags` map will be removed. - -### client.distTags.update(uri, params, cb) - -* `uri` {String} Base URL for the registry. -* `params` {Object} Object containing per-request properties. - * `package` {String} Name of the package. - * `distTags` {Object} Object containing a map from tag names to package - versions. - * `auth` {Credentials} -* `cb` {Function} - -Update the values of multiple `dist-tags`, creating any `dist-tags` that do -not already exist. Any pre-existing `dist-tags` not included in the `distTags` -map will be left alone. - -### client.distTags.rm(uri, params, cb) - -* `uri` {String} Base URL for the registry. -* `params` {Object} Object containing per-request properties. - * `package` {String} Name of the package. - * `distTag` {String} Name of the new `dist-tag`. - * `auth` {Credentials} -* `cb` {Function} - -Remove a single `dist-tag` from the named package. - -### client.get(uri, params, cb) - -* `uri` {String} The complete registry URI to fetch -* `params` {Object} Object containing per-request properties. - * `timeout` {Number} Duration before the request times out. Optional - (default: never). - * `follow` {Boolean} Follow 302/301 responses. Optional (default: true). - * `staleOk` {Boolean} If there's cached data available, then return that to - the callback quickly, and update the cache the background. Optional - (default: false). - * `auth` {Credentials} Optional. - * `fullMetadata` {Boolean} If true, don't attempt to fetch filtered - ("corgi") registry metadata. (default: false) -* `cb` {Function} - -Fetches data from the registry via a GET request, saving it in the cache folder -with the ETag or the "Last Modified" timestamp. - -### client.publish(uri, params, cb) - -* `uri` {String} The registry URI for the package to publish. -* `params` {Object} Object containing per-request properties. - * `metadata` {Object} Package metadata. - * `access` {String} Access for the package. Can be `public` or `restricted` (no default). - * `body` {Stream} Stream of the package body / tarball. - * `auth` {Credentials} -* `cb` {Function} - -Publish a package to the registry. - -Note that this does not create the tarball from a folder. - -### client.sendAnonymousCLIMetrics(uri, params, cb) - -- `uri` {String} Base URL for the registry. -- `params` {Object} Object containing per-request properties. - - `metricId` {String} A uuid unique to this dataset. - - `metrics` {Object} The metrics to share with the registry, with the following properties: - - `from` {Date} When the first data in this report was collected. - - `to` {Date} When the last data in this report was collected. Usually right now. - - `successfulInstalls` {Number} The number of successful installs in this period. - - `failedInstalls` {Number} The number of installs that ended in error in this period. -- `cb` {Function} - -PUT a metrics object to the `/-/npm/anon-metrics/v1/` endpoint on the registry. - -### client.star(uri, params, cb) - -* `uri` {String} The complete registry URI for the package to star. -* `params` {Object} Object containing per-request properties. - * `starred` {Boolean} True to star the package, false to unstar it. Optional - (default: false). - * `auth` {Credentials} -* `cb` {Function} - -Star or unstar a package. - -Note that the user does not have to be the package owner to star or unstar a -package, though other writes do require that the user be the package owner. - -### client.stars(uri, params, cb) - -* `uri` {String} The base URL for the registry. -* `params` {Object} Object containing per-request properties. - * `username` {String} Name of user to fetch starred packages for. Optional - (default: user in `auth`). - * `auth` {Credentials} Optional (required if `username` is omitted). -* `cb` {Function} - -View your own or another user's starred packages. - -### client.tag(uri, params, cb) - -* `uri` {String} The complete registry URI to tag -* `params` {Object} Object containing per-request properties. - * `version` {String} Version to tag. - * `tag` {String} Tag name to apply. - * `auth` {Credentials} -* `cb` {Function} - -Mark a version in the `dist-tags` hash, so that `pkg@tag` will fetch the -specified version. - -### client.unpublish(uri, params, cb) - -* `uri` {String} The complete registry URI of the package to unpublish. -* `params` {Object} Object containing per-request properties. - * `version` {String} version to unpublish. Optional – omit to unpublish all - versions. - * `auth` {Credentials} -* `cb` {Function} - -Remove a version of a package (or all versions) from the registry. When the -last version us unpublished, the entire document is removed from the database. - -### client.whoami(uri, params, cb) - -* `uri` {String} The base registry for the URI. -* `params` {Object} Object containing per-request properties. - * `auth` {Credentials} -* `cb` {Function} - -Simple call to see who the registry thinks you are. Especially useful with -token-based auth. - - -## PLUMBING - -The below are primarily intended for use by the rest of the API, or by the npm -caching logic directly. - -### client.request(uri, params, cb) - -* `uri` {String} URI pointing to the resource to request. -* `params` {Object} Object containing per-request properties. - * `method` {String} HTTP method. Optional (default: "GET"). - * `body` {Stream | Buffer | String | Object} The request body. Objects - that are not Buffers or Streams are encoded as JSON. Optional – body - only used for write operations. - * `etag` {String} The cached ETag. Optional. - * `lastModified` {String} The cached Last-Modified timestamp. Optional. - * `follow` {Boolean} Follow 302/301 responses. Optional (default: true). - * `streaming` {Boolean} Stream the request body as it comes, handling error - responses in a non-streaming way. - * `auth` {Credentials} Optional. -* `cb` {Function} - * `error` {Error | null} - * `data` {Object} the parsed data object - * `raw` {String} the json - * `res` {Response Object} response from couch - -Make a generic request to the registry. All the other methods are wrappers -around `client.request`. - -### client.fetch(uri, params, cb) - -* `uri` {String} The complete registry URI to upload to -* `params` {Object} Object containing per-request properties. - * `headers` {Stream} HTTP headers to be included with the request. Optional. - * `auth` {Credentials} Optional. -* `cb` {Function} - -Fetch a package from a URL, with auth set appropriately if included. Used to -cache remote tarballs as well as request package tarballs from the registry. - -# Configuration - -The client uses its own configuration, which is just passed in as a simple -nested object. The following are the supported values (with their defaults, if -any): - -* `proxy.http` {URL} The URL to proxy HTTP requests through. -* `proxy.https` {URL} The URL to proxy HTTPS requests through. Defaults to be - the same as `proxy.http` if unset. -* `proxy.localAddress` {IP} The local address to use on multi-homed systems. -* `ssl.ca` {String} Certificate signing authority certificates to trust. -* `ssl.certificate` {String} Client certificate (PEM encoded). Enable access - to servers that require client certificates. -* `ssl.key` {String} Private key (PEM encoded) for client certificate. -* `ssl.strict` {Boolean} Whether or not to be strict with SSL certificates. - Default = `true` -* `retry.count` {Number} Number of times to retry on GET failures. Default = 2. -* `retry.factor` {Number} `factor` setting for `node-retry`. Default = 10. -* `retry.minTimeout` {Number} `minTimeout` setting for `node-retry`. - Default = 10000 (10 seconds) -* `retry.maxTimeout` {Number} `maxTimeout` setting for `node-retry`. - Default = 60000 (60 seconds) -* `userAgent` {String} User agent header to send. Default = - `"node/{process.version}"` -* `log` {Object} The logger to use. Defaults to `require("npmlog")` if - that works, otherwise logs are disabled. -* `defaultTag` {String} The default tag to use when publishing new packages. - Default = `"latest"` -* `couchToken` {Object} A token for use with - [couch-login](https://npmjs.org/package/couch-login). -* `sessionToken` {String} A random identifier for this set of client requests. - Default = 8 random hexadecimal bytes. -* `maxSockets` {Number} The maximum number of connections that will be open per - origin (unique combination of protocol:host:port). Passed to the - [httpAgent](https://nodejs.org/api/http.html#http_agent_maxsockets). - Default = 50 -* `isFromCI` {Boolean} Identify to severs if this request is coming from CI (for statistics purposes). - Default = detected from environment– primarily this is done by looking for - the CI environment variable to be set to `true`. Also accepted are the - existence of the `JENKINS_URL`, `bamboo.buildKey` and `TDDIUM` environment - variables. -* `scope` {String} The scope of the project this command is being run for. This is the - top level npm module in which a command was run. - Default = none diff --git a/node_modules/npm-registry-client/index.js b/node_modules/npm-registry-client/index.js deleted file mode 100644 index 07eab3f36cfe1..0000000000000 --- a/node_modules/npm-registry-client/index.js +++ /dev/null @@ -1,74 +0,0 @@ -// utilities for working with the js-registry site. - -module.exports = RegClient - -var npmlog -try { - npmlog = require('npmlog') -} catch (er) { - npmlog = { - error: noop, - warn: noop, - info: noop, - verbose: noop, - silly: noop, - http: noop, - pause: noop, - resume: noop - } -} - -function noop () {} - -function RegClient (config) { - this.config = Object.create(config || {}) - - this.config.proxy = this.config.proxy || {} - if (!this.config.proxy.https && this.config.proxy.http) { - this.config.proxy.https = this.config.proxy.http - } - - this.config.ssl = this.config.ssl || {} - if (this.config.ssl.strict === undefined) this.config.ssl.strict = true - - this.config.retry = this.config.retry || {} - if (typeof this.config.retry.retries !== 'number') this.config.retry.retries = 2 - if (typeof this.config.retry.factor !== 'number') this.config.retry.factor = 10 - if (typeof this.config.retry.minTimeout !== 'number') this.config.retry.minTimeout = 10000 - if (typeof this.config.retry.maxTimeout !== 'number') this.config.retry.maxTimeout = 60000 - if (typeof this.config.maxSockets !== 'number') this.config.maxSockets = 50 - - this.config.userAgent = this.config.userAgent || 'node/' + process.version - this.config.defaultTag = this.config.defaultTag || 'latest' - - this.log = this.config.log || npmlog - delete this.config.log - - var client = this - client.access = require('./lib/access') - client.adduser = require('./lib/adduser') - client.attempt = require('./lib/attempt') - client.authify = require('./lib/authify') - client.deprecate = require('./lib/deprecate') - client.distTags = Object.create(client) - client.distTags.add = require('./lib/dist-tags/add') - client.distTags.fetch = require('./lib/dist-tags/fetch') - client.distTags.rm = require('./lib/dist-tags/rm') - client.distTags.set = require('./lib/dist-tags/set') - client.distTags.update = require('./lib/dist-tags/update') - client.fetch = require('./lib/fetch') - client.get = require('./lib/get') - client.initialize = require('./lib/initialize') - client.logout = require('./lib/logout') - client.org = require('./lib/org') - client.ping = require('./lib/ping') - client.publish = require('./lib/publish') - client.request = require('./lib/request') - client.sendAnonymousCLIMetrics = require('./lib/send-anonymous-CLI-metrics') - client.star = require('./lib/star') - client.stars = require('./lib/stars') - client.tag = require('./lib/tag') - client.team = require('./lib/team') - client.unpublish = require('./lib/unpublish') - client.whoami = require('./lib/whoami') -} diff --git a/node_modules/npm-registry-client/lib/access.js b/node_modules/npm-registry-client/lib/access.js deleted file mode 100644 index caa80b12191c1..0000000000000 --- a/node_modules/npm-registry-client/lib/access.js +++ /dev/null @@ -1,168 +0,0 @@ -module.exports = access - -var assert = require('assert') -var url = require('url') -var npa = require('npm-package-arg') -var subcommands = {} - -function access (sub, uri, params, cb) { - accessAssertions(sub, uri, params, cb) - return subcommands[sub].call(this, uri, params, cb) -} - -subcommands.public = function (uri, params, cb) { - return setAccess.call(this, 'public', uri, params, cb) -} -subcommands.restricted = function (uri, params, cb) { - return setAccess.call(this, 'restricted', uri, params, cb) -} -subcommands['2fa-required'] = function (uri, params, cb) { - return setRequires2fa.call(this, true, uri, params, cb) -} -subcommands['2fa-not-required'] = function (uri, params, cb) { - return setRequires2fa.call(this, false, uri, params, cb) -} - -function setAccess (access, uri, params, cb) { - return this.request(apiUri(uri, 'package', params.package, 'access'), { - method: 'POST', - auth: params.auth, - body: JSON.stringify({ access: access }) - }, cb) -} - -function setRequires2fa (requires2fa, uri, params, cb) { - return this.request(apiUri(uri, 'package', params.package, 'access'), { - method: 'POST', - auth: params.auth, - body: JSON.stringify({ publish_requires_tfa: requires2fa }) - }, cb) -} - -subcommands.grant = function (uri, params, cb) { - var reqUri = apiUri(uri, 'team', params.scope, params.team, 'package') - return this.request(reqUri, { - method: 'PUT', - auth: params.auth, - body: JSON.stringify({ - permissions: params.permissions, - package: params.package - }) - }, cb) -} - -subcommands.revoke = function (uri, params, cb) { - var reqUri = apiUri(uri, 'team', params.scope, params.team, 'package') - return this.request(reqUri, { - method: 'DELETE', - auth: params.auth, - body: JSON.stringify({ - package: params.package - }) - }, cb) -} - -subcommands['ls-packages'] = function (uri, params, cb, type) { - type = type || (params.team ? 'team' : 'org') - var client = this - var uriParams = '?format=cli' - var reqUri = apiUri(uri, type, params.scope, params.team, 'package') - return client.request(reqUri + uriParams, { - method: 'GET', - auth: params.auth - }, function (err, perms) { - if (err && err.statusCode === 404 && type === 'org') { - subcommands['ls-packages'].call(client, uri, params, cb, 'user') - } else { - cb(err, perms && translatePermissions(perms)) - } - }) -} - -subcommands['ls-collaborators'] = function (uri, params, cb) { - var uriParams = '?format=cli' - if (params.user) { - uriParams += ('&user=' + encodeURIComponent(params.user)) - } - var reqUri = apiUri(uri, 'package', params.package, 'collaborators') - return this.request(reqUri + uriParams, { - method: 'GET', - auth: params.auth - }, function (err, perms) { - cb(err, perms && translatePermissions(perms)) - }) -} - -subcommands.edit = function () { - throw new Error('edit subcommand is not implemented yet') -} - -function apiUri (registryUri) { - var path = Array.prototype.slice.call(arguments, 1) - .filter(function (x) { return x }) - .map(encodeURIComponent) - .join('/') - return url.resolve(registryUri, '-/' + path) -} - -function accessAssertions (subcommand, uri, params, cb) { - assert(subcommands.hasOwnProperty(subcommand), - 'access subcommand must be one of ' + - Object.keys(subcommands).join(', ')) - typeChecks({ - 'uri': [uri, 'string'], - 'params': [params, 'object'], - 'auth': [params.auth, 'object'], - 'callback': [cb, 'function'] - }) - if (contains([ - 'public', 'restricted' - ], subcommand)) { - typeChecks({ 'package': [params.package, 'string'] }) - assert(!!npa(params.package).scope, - 'access commands are only accessible for scoped packages') - } - if (contains(['grant', 'revoke', 'ls-packages'], subcommand)) { - typeChecks({ 'scope': [params.scope, 'string'] }) - } - if (contains(['grant', 'revoke'], subcommand)) { - typeChecks({ 'team': [params.team, 'string'] }) - } - if (subcommand === 'grant') { - typeChecks({ 'permissions': [params.permissions, 'string'] }) - assert(params.permissions === 'read-only' || - params.permissions === 'read-write', - 'permissions must be either read-only or read-write') - } -} - -function typeChecks (specs) { - Object.keys(specs).forEach(function (key) { - var checks = specs[key] - /* eslint valid-typeof:0 */ - assert(typeof checks[0] === checks[1], - key + ' is required and must be of type ' + checks[1]) - }) -} - -function contains (arr, item) { - return arr.indexOf(item) !== -1 -} - -function translatePermissions (perms) { - var newPerms = {} - for (var key in perms) { - if (perms.hasOwnProperty(key)) { - if (perms[key] === 'read') { - newPerms[key] = 'read-only' - } else if (perms[key] === 'write') { - newPerms[key] = 'read-write' - } else { - // This shouldn't happen, but let's not break things - // if the API starts returning different things. - newPerms[key] = perms[key] - } - } - } - return newPerms -} diff --git a/node_modules/npm-registry-client/lib/adduser.js b/node_modules/npm-registry-client/lib/adduser.js deleted file mode 100644 index a31d5b0333999..0000000000000 --- a/node_modules/npm-registry-client/lib/adduser.js +++ /dev/null @@ -1,128 +0,0 @@ -module.exports = adduser - -var url = require('url') -var assert = require('assert') - -function adduser (uri, params, cb) { - assert(typeof uri === 'string', 'must pass registry URI to adduser') - assert( - params && typeof params === 'object', - 'must pass params to adduser' - ) - assert(typeof cb === 'function', 'must pass callback to adduser') - - assert(params.auth && typeof params.auth, 'must pass auth to adduser') - var auth = params.auth - assert(typeof auth.username === 'string', 'must include username in auth') - assert(typeof auth.password === 'string', 'must include password in auth') - assert(typeof auth.email === 'string', 'must include email in auth') - - // normalize registry URL - if (uri.slice(-1) !== '/') uri += '/' - - var username = auth.username.trim() - var password = auth.password.trim() - var email = auth.email.trim() - - // validation - if (!username) return cb(new Error('No username supplied.')) - if (!password) return cb(new Error('No password supplied.')) - if (!email) return cb(new Error('No email address supplied.')) - if (!email.match(/^[^@]+@[^.]+\.[^.]+/)) { - return cb(new Error('Please use a real email address.')) - } - - var userobj = { - _id: 'org.couchdb.user:' + username, - name: username, - password: password, - email: email, - type: 'user', - roles: [], - date: new Date().toISOString() - } - - var token = this.config.couchToken - if (this.couchLogin) this.couchLogin.token = null - - cb = done.call(this, token, cb) - - var logObj = Object.keys(userobj).map(function (k) { - if (k === 'password') return [k, 'XXXXX'] - return [k, userobj[k]] - }).reduce(function (s, kv) { - s[kv[0]] = kv[1] - return s - }, {}) - - this.log.verbose('adduser', 'before first PUT', logObj) - - var client = this - - uri = url.resolve(uri, '-/user/org.couchdb.user:' + encodeURIComponent(username)) - var options = { - method: 'PUT', - body: userobj, - auth: auth - } - this.request( - uri, - Object.assign({}, options), - function (error, data, json, response) { - if (!error || !response || response.statusCode !== 409) { - return cb(error, data, json, response) - } - - client.log.verbose('adduser', 'update existing user') - return client.request( - uri + '?write=true', - { auth: auth }, - function (er, data, json, response) { - if (er || data.error) { - return cb(er, data, json, response) - } - Object.keys(data).forEach(function (k) { - if (!userobj[k] || k === 'roles') { - userobj[k] = data[k] - } - }) - client.log.verbose('adduser', 'userobj', logObj) - client.request(uri + '/-rev/' + userobj._rev, options, cb) - } - ) - } - ) - - function done (token, cb) { - return function (error, data, json, response) { - if (!error && (!response || response.statusCode === 201)) { - return cb(error, data, json, response) - } - - // there was some kind of error, reinstate previous auth/token/etc. - if (client.couchLogin) { - client.couchLogin.token = token - if (client.couchLogin.tokenSet) { - client.couchLogin.tokenSet(token) - } - } - - client.log.verbose('adduser', 'back', [error, data, json]) - if (!error) { - error = new Error( - ((response && response.statusCode) || '') + ' ' + - 'Could not create user\n' + JSON.stringify(data) - ) - } - - if (response && (response.statusCode === 401 || response.statusCode === 403)) { - client.log.warn('adduser', 'Incorrect username or password\n' + - 'You can reset your account by visiting:\n' + - '\n' + - ' https://npmjs.org/forgot\n') - } - - return cb(error) - } - } -} diff --git a/node_modules/npm-registry-client/lib/attempt.js b/node_modules/npm-registry-client/lib/attempt.js deleted file mode 100644 index d41bbc4fae825..0000000000000 --- a/node_modules/npm-registry-client/lib/attempt.js +++ /dev/null @@ -1,20 +0,0 @@ -var retry = require('retry') - -module.exports = attempt - -function attempt (cb) { - // Tuned to spread 3 attempts over about a minute. - // See formula at . - var operation = retry.operation(this.config.retry) - - var client = this - operation.attempt(function (currentAttempt) { - client.log.info( - 'attempt', - 'registry request try #' + currentAttempt + - ' at ' + (new Date()).toLocaleTimeString() - ) - - cb(operation) - }) -} diff --git a/node_modules/npm-registry-client/lib/authify.js b/node_modules/npm-registry-client/lib/authify.js deleted file mode 100644 index 9b38a30a92830..0000000000000 --- a/node_modules/npm-registry-client/lib/authify.js +++ /dev/null @@ -1,26 +0,0 @@ -module.exports = authify - -function authify (authed, parsed, headers, credentials) { - if (credentials && credentials.otp) { - this.log.verbose('request', 'passing along npm otp') - headers['npm-otp'] = credentials.otp - } - if (credentials && credentials.token) { - this.log.verbose('request', 'using bearer token for auth') - headers.authorization = 'Bearer ' + credentials.token - - return null - } - - if (authed) { - if (credentials && credentials.username && credentials.password) { - var username = encodeURIComponent(credentials.username) - var password = encodeURIComponent(credentials.password) - parsed.auth = username + ':' + password - } else { - return new Error( - 'This request requires auth credentials. Run `npm login` and repeat the request.' - ) - } - } -} diff --git a/node_modules/npm-registry-client/lib/deprecate.js b/node_modules/npm-registry-client/lib/deprecate.js deleted file mode 100644 index 5ff3a8891f5e2..0000000000000 --- a/node_modules/npm-registry-client/lib/deprecate.js +++ /dev/null @@ -1,42 +0,0 @@ -module.exports = deprecate - -var assert = require('assert') -var semver = require('semver') - -function deprecate (uri, params, cb) { - assert(typeof uri === 'string', 'must pass registry URI to deprecate') - assert(params && typeof params === 'object', 'must pass params to deprecate') - assert(typeof cb === 'function', 'must pass callback to deprecate') - - assert(typeof params.version === 'string', 'must pass version to deprecate') - assert(typeof params.message === 'string', 'must pass message to deprecate') - assert( - params.auth && typeof params.auth === 'object', - 'must pass auth to deprecate' - ) - - var version = params.version - var message = params.message - var auth = params.auth - - if (semver.validRange(version) === null) { - return cb(new Error('invalid version range: ' + version)) - } - - this.get(uri + '?write=true', { auth: auth }, function (er, data) { - if (er) return cb(er) - // filter all the versions that match - Object.keys(data.versions).filter(function (v) { - return semver.satisfies(v, version) - }).forEach(function (v) { - data.versions[v].deprecated = message - }) - // now update the doc on the registry - var options = { - method: 'PUT', - body: data, - auth: auth - } - this.request(uri, options, cb) - }.bind(this)) -} diff --git a/node_modules/npm-registry-client/lib/dist-tags/add.js b/node_modules/npm-registry-client/lib/dist-tags/add.js deleted file mode 100644 index 924199ad1e01d..0000000000000 --- a/node_modules/npm-registry-client/lib/dist-tags/add.js +++ /dev/null @@ -1,43 +0,0 @@ -module.exports = add - -var assert = require('assert') -var url = require('url') - -var npa = require('npm-package-arg') - -function add (uri, params, cb) { - assert(typeof uri === 'string', 'must pass registry URI to distTags.add') - assert( - params && typeof params === 'object', - 'must pass params to distTags.add' - ) - assert(typeof cb === 'function', 'muss pass callback to distTags.add') - - assert( - typeof params.package === 'string', - 'must pass package name to distTags.add' - ) - assert( - typeof params.distTag === 'string', - 'must pass package distTag name to distTags.add' - ) - assert( - typeof params.version === 'string', - 'must pass version to be mapped to distTag to distTags.add' - ) - assert( - params.auth && typeof params.auth === 'object', - 'must pass auth to distTags.add' - ) - - var p = npa(params.package) - var pkg = p.scope ? params.package.replace('/', '%2f') : params.package - var rest = '-/package/' + pkg + '/dist-tags/' + params.distTag - - var options = { - method: 'PUT', - body: JSON.stringify(params.version), - auth: params.auth - } - this.request(url.resolve(uri, rest), options, cb) -} diff --git a/node_modules/npm-registry-client/lib/dist-tags/fetch.js b/node_modules/npm-registry-client/lib/dist-tags/fetch.js deleted file mode 100644 index 69a126d1f45b4..0000000000000 --- a/node_modules/npm-registry-client/lib/dist-tags/fetch.js +++ /dev/null @@ -1,37 +0,0 @@ -module.exports = fetch - -var assert = require('assert') -var url = require('url') - -var npa = require('npm-package-arg') - -function fetch (uri, params, cb) { - assert(typeof uri === 'string', 'must pass registry URI to distTags.fetch') - assert( - params && typeof params === 'object', - 'must pass params to distTags.fetch' - ) - assert(typeof cb === 'function', 'must pass callback to distTags.fetch') - - assert( - typeof params.package === 'string', - 'must pass package name to distTags.fetch' - ) - assert( - params.auth && typeof params.auth === 'object', - 'must pass auth to distTags.fetch' - ) - - var p = npa(params.package) - var pkg = p.scope ? params.package.replace('/', '%2f') : params.package - var rest = '-/package/' + pkg + '/dist-tags' - - var options = { - method: 'GET', - auth: params.auth - } - this.request(url.resolve(uri, rest), options, function (er, data) { - if (data && typeof data === 'object') delete data._etag - cb(er, data) - }) -} diff --git a/node_modules/npm-registry-client/lib/dist-tags/rm.js b/node_modules/npm-registry-client/lib/dist-tags/rm.js deleted file mode 100644 index d2bdda05dac3d..0000000000000 --- a/node_modules/npm-registry-client/lib/dist-tags/rm.js +++ /dev/null @@ -1,38 +0,0 @@ -module.exports = rm - -var assert = require('assert') -var url = require('url') - -var npa = require('npm-package-arg') - -function rm (uri, params, cb) { - assert(typeof uri === 'string', 'must pass registry URI to distTags.rm') - assert( - params && typeof params === 'object', - 'must pass params to distTags.rm' - ) - assert(typeof cb === 'function', 'muss pass callback to distTags.rm') - - assert( - typeof params.package === 'string', - 'must pass package name to distTags.rm' - ) - assert( - typeof params.distTag === 'string', - 'must pass package distTag name to distTags.rm' - ) - assert( - params.auth && typeof params.auth === 'object', - 'must pass auth to distTags.rm' - ) - - var p = npa(params.package) - var pkg = p.scope ? params.package.replace('/', '%2f') : params.package - var rest = '-/package/' + pkg + '/dist-tags/' + params.distTag - - var options = { - method: 'DELETE', - auth: params.auth - } - this.request(url.resolve(uri, rest), options, cb) -} diff --git a/node_modules/npm-registry-client/lib/dist-tags/set.js b/node_modules/npm-registry-client/lib/dist-tags/set.js deleted file mode 100644 index 7af351d6359e8..0000000000000 --- a/node_modules/npm-registry-client/lib/dist-tags/set.js +++ /dev/null @@ -1,39 +0,0 @@ -module.exports = set - -var assert = require('assert') -var url = require('url') - -var npa = require('npm-package-arg') - -function set (uri, params, cb) { - assert(typeof uri === 'string', 'must pass registry URI to distTags.set') - assert( - params && typeof params === 'object', - 'must pass params to distTags.set' - ) - assert(typeof cb === 'function', 'muss pass callback to distTags.set') - - assert( - typeof params.package === 'string', - 'must pass package name to distTags.set' - ) - assert( - params.distTags && typeof params.distTags === 'object', - 'must pass distTags map to distTags.set' - ) - assert( - params.auth && typeof params.auth === 'object', - 'must pass auth to distTags.set' - ) - - var p = npa(params.package) - var pkg = p.scope ? params.package.replace('/', '%2f') : params.package - var rest = '-/package/' + pkg + '/dist-tags' - - var options = { - method: 'PUT', - body: JSON.stringify(params.distTags), - auth: params.auth - } - this.request(url.resolve(uri, rest), options, cb) -} diff --git a/node_modules/npm-registry-client/lib/dist-tags/update.js b/node_modules/npm-registry-client/lib/dist-tags/update.js deleted file mode 100644 index 07ec3e5e75f0f..0000000000000 --- a/node_modules/npm-registry-client/lib/dist-tags/update.js +++ /dev/null @@ -1,39 +0,0 @@ -module.exports = update - -var assert = require('assert') -var url = require('url') - -var npa = require('npm-package-arg') - -function update (uri, params, cb) { - assert(typeof uri === 'string', 'must pass registry URI to distTags.update') - assert( - params && typeof params === 'object', - 'must pass params to distTags.update' - ) - assert(typeof cb === 'function', 'muss pass callback to distTags.update') - - assert( - typeof params.package === 'string', - 'must pass package name to distTags.update' - ) - assert( - params.distTags && typeof params.distTags === 'object', - 'must pass distTags map to distTags.update' - ) - assert( - params.auth && typeof params.auth === 'object', - 'must pass auth to distTags.update' - ) - - var p = npa(params.package) - var pkg = p.scope ? params.package.replace('/', '%2f') : params.package - var rest = '-/package/' + pkg + '/dist-tags' - - var options = { - method: 'POST', - body: JSON.stringify(params.distTags), - auth: params.auth - } - this.request(url.resolve(uri, rest), options, cb) -} diff --git a/node_modules/npm-registry-client/lib/fetch.js b/node_modules/npm-registry-client/lib/fetch.js deleted file mode 100644 index 5ab8587780f20..0000000000000 --- a/node_modules/npm-registry-client/lib/fetch.js +++ /dev/null @@ -1,85 +0,0 @@ -var assert = require('assert') -var url = require('url') - -var request = require('request') -var once = require('once') - -module.exports = fetch - -function fetch (uri, params, cb) { - assert(typeof uri === 'string', 'must pass uri to request') - assert(params && typeof params === 'object', 'must pass params to request') - assert(typeof cb === 'function', 'must pass callback to request') - - cb = once(cb) - - var client = this - this.attempt(function (operation) { - makeRequest.call(client, uri, params, function (er, req) { - if (er) return cb(er) - - req.once('error', retryOnError) - - function retryOnError (er) { - if (operation.retry(er)) { - client.log.info('retry', 'will retry, error on last attempt: ' + er) - } else { - cb(er) - } - } - - req.on('response', function (res) { - client.log.http('fetch', '' + res.statusCode, uri) - req.removeListener('error', retryOnError) - - var er - var statusCode = res && res.statusCode - if (statusCode === 200) { - res.resume() - - req.once('error', function (er) { - res.emit('error', er) - }) - - return cb(null, res) - // Only retry on 408, 5xx or no `response`. - } else if (statusCode === 408) { - er = new Error('request timed out') - } else if (statusCode >= 500) { - er = new Error('server error ' + statusCode) - } - - if (er && operation.retry(er)) { - client.log.info('retry', 'will retry, error on last attempt: ' + er) - } else { - cb(new Error('fetch failed with status code ' + statusCode)) - } - }) - }) - }) -} - -function makeRequest (remote, params, cb) { - var parsed = url.parse(remote) - this.log.http('fetch', 'GET', parsed.href) - - var headers = params.headers || {} - var er = this.authify( - params.auth && params.auth.alwaysAuth, - parsed, - headers, - params.auth - ) - if (er) return cb(er) - - var opts = this.initialize( - parsed, - 'GET', - 'application/x-tar, application/vnd.github+json; q=0.1', - headers - ) - // always want to follow redirects for fetch - opts.followRedirect = true - - cb(null, request(opts)) -} diff --git a/node_modules/npm-registry-client/lib/get.js b/node_modules/npm-registry-client/lib/get.js deleted file mode 100644 index ab0eae10f0121..0000000000000 --- a/node_modules/npm-registry-client/lib/get.js +++ /dev/null @@ -1,22 +0,0 @@ -module.exports = get - -var assert = require('assert') -var url = require('url') - -/* - * This is meant to be overridden in specific implementations if you - * want specialized behavior for metadata (i.e. caching). - */ -function get (uri, params, cb) { - assert(typeof uri === 'string', 'must pass registry URI to get') - assert(params && typeof params === 'object', 'must pass params to get') - assert(typeof cb === 'function', 'must pass callback to get') - - var parsed = url.parse(uri) - assert( - parsed.protocol === 'http:' || parsed.protocol === 'https:', - 'must have a URL that starts with http: or https:' - ) - - this.request(uri, params, cb) -} diff --git a/node_modules/npm-registry-client/lib/initialize.js b/node_modules/npm-registry-client/lib/initialize.js deleted file mode 100644 index a25077eae5524..0000000000000 --- a/node_modules/npm-registry-client/lib/initialize.js +++ /dev/null @@ -1,91 +0,0 @@ -var crypto = require('crypto') -var HttpAgent = require('http').Agent -var HttpsAgent = require('https').Agent - -var pkg = require('../package.json') - -module.exports = initialize - -function initialize (uri, method, accept, headers) { - if (!this.config.sessionToken) { - this.config.sessionToken = crypto.randomBytes(8).toString('hex') - this.log.verbose('request id', this.config.sessionToken) - } - if (this.config.isFromCI == null) { - this.config.isFromCI = Boolean( - process.env['CI'] === 'true' || process.env['TDDIUM'] || - process.env['JENKINS_URL'] || process.env['bamboo.buildKey'] || - process.env['GO_PIPELINE_NAME']) - } - - var opts = { - url: uri, - method: method, - headers: headers, - localAddress: this.config.proxy.localAddress, - strictSSL: this.config.ssl.strict, - cert: this.config.ssl.certificate, - key: this.config.ssl.key, - ca: this.config.ssl.ca, - agent: getAgent.call(this, uri.protocol) - } - - // allow explicit disabling of proxy in environment via CLI - // - // how false gets here is the CLI's problem (it's gross) - if (this.config.proxy.http === false) { - opts.proxy = null - } else { - // request will not pay attention to the NOPROXY environment variable if a - // config value named proxy is passed in, even if it's set to null. - var proxy - if (uri.protocol === 'https:') { - proxy = this.config.proxy.https - } else { - proxy = this.config.proxy.http - } - if (typeof proxy === 'string') opts.proxy = proxy - } - - headers.version = this.version || pkg.version - headers.accept = accept - - if (this.refer) headers.referer = this.refer - - headers['npm-session'] = this.config.sessionToken - headers['npm-in-ci'] = String(this.config.isFromCI) - headers['user-agent'] = this.config.userAgent - if (this.config.scope) { - headers['npm-scope'] = this.config.scope - } - - return opts -} - -function getAgent (protocol) { - if (protocol === 'https:') { - if (!this.httpsAgent) { - this.httpsAgent = new HttpsAgent({ - keepAlive: true, - maxSockets: this.config.maxSockets, - localAddress: this.config.proxy.localAddress, - rejectUnauthorized: this.config.ssl.strict, - ca: this.config.ssl.ca, - cert: this.config.ssl.certificate, - key: this.config.ssl.key - }) - } - - return this.httpsAgent - } else { - if (!this.httpAgent) { - this.httpAgent = new HttpAgent({ - keepAlive: true, - maxSockets: this.config.maxSockets, - localAddress: this.config.proxy.localAddress - }) - } - - return this.httpAgent - } -} diff --git a/node_modules/npm-registry-client/lib/logout.js b/node_modules/npm-registry-client/lib/logout.js deleted file mode 100644 index e66e9b78ac5dd..0000000000000 --- a/node_modules/npm-registry-client/lib/logout.js +++ /dev/null @@ -1,23 +0,0 @@ -module.exports = logout - -var assert = require('assert') -var url = require('url') - -function logout (uri, params, cb) { - assert(typeof uri === 'string', 'must pass registry URI to logout') - assert(params && typeof params === 'object', 'must pass params to logout') - assert(typeof cb === 'function', 'must pass callback to star') - - var auth = params.auth - assert(auth && typeof auth === 'object', 'must pass auth to logout') - assert(typeof auth.token === 'string', 'can only log out for token auth') - - uri = url.resolve(uri, '-/user/token/' + auth.token) - var options = { - method: 'DELETE', - auth: auth - } - - this.log.verbose('logout', 'invalidating session token for user') - this.request(uri, options, cb) -} diff --git a/node_modules/npm-registry-client/lib/org.js b/node_modules/npm-registry-client/lib/org.js deleted file mode 100644 index 3072b3817a8cf..0000000000000 --- a/node_modules/npm-registry-client/lib/org.js +++ /dev/null @@ -1,62 +0,0 @@ -'use strict' - -module.exports = org - -var assert = require('assert') -var url = require('url') - -var subcommands = {} - -function org (subcommand, uri, params, cb) { - orgAssertions(subcommand, uri, params, cb) - return subcommands[subcommand].call(this, uri, params, cb) -} - -subcommands.set = subcommands.add = function (uri, params, cb) { - return this.request(apiUri(uri, 'org', params.org, 'user'), { - method: 'PUT', - auth: params.auth, - body: JSON.stringify({ - user: params.user, - role: params.role - }) - }, cb) -} - -subcommands.rm = function (uri, params, cb) { - return this.request(apiUri(uri, 'org', params.org, 'user'), { - method: 'DELETE', - auth: params.auth, - body: JSON.stringify({ - user: params.user - }) - }, cb) -} - -subcommands.ls = function (uri, params, cb) { - return this.request(apiUri(uri, 'org', params.org, 'user'), { - method: 'GET', - auth: params.auth - }, cb) -} - -function apiUri (registryUri) { - var path = Array.prototype.slice.call(arguments, 1) - .map(encodeURIComponent) - .join('/') - return url.resolve(registryUri, '-/' + path) -} - -function orgAssertions (subcommand, uri, params, cb) { - assert(subcommand, 'subcommand is required') - assert(subcommands.hasOwnProperty(subcommand), - 'org subcommand must be one of ' + Object.keys(subcommands)) - assert(typeof uri === 'string', 'registry URI is required') - assert(typeof params === 'object', 'params are required') - assert(typeof params.auth === 'object', 'auth is required') - assert(!cb || typeof cb === 'function', 'callback must be a function') - assert(typeof params.org === 'string', 'org name is required') - if (subcommand === 'rm' || subcommand === 'add' || subcommand === 'set') { - assert(typeof params.user === 'string', 'user is required') - } -} diff --git a/node_modules/npm-registry-client/lib/ping.js b/node_modules/npm-registry-client/lib/ping.js deleted file mode 100644 index 5ab98c52d2c75..0000000000000 --- a/node_modules/npm-registry-client/lib/ping.js +++ /dev/null @@ -1,21 +0,0 @@ -module.exports = ping - -var url = require('url') -var assert = require('assert') - -function ping (uri, params, cb) { - assert(typeof uri === 'string', 'must pass registry URI to ping') - assert(params && typeof params === 'object', 'must pass params to ping') - assert(typeof cb === 'function', 'must pass callback to ping') - - var auth = params.auth - assert(auth && typeof auth === 'object', 'must pass auth to ping') - - this.request(url.resolve(uri, '-/ping?write=true'), { auth: auth }, function (er, fullData, data, response) { - if (er || fullData) { - cb(er, fullData, data, response) - } else { - cb(new Error('No data received')) - } - }) -} diff --git a/node_modules/npm-registry-client/lib/publish.js b/node_modules/npm-registry-client/lib/publish.js deleted file mode 100644 index fd3adce126e81..0000000000000 --- a/node_modules/npm-registry-client/lib/publish.js +++ /dev/null @@ -1,194 +0,0 @@ -module.exports = publish - -var url = require('url') -var semver = require('semver') -var Stream = require('stream').Stream -var assert = require('assert') -var fixer = require('normalize-package-data').fixer -var concat = require('concat-stream') -var ssri = require('ssri') - -function escaped (name) { - return name.replace('/', '%2f') -} - -function publish (uri, params, cb) { - assert(typeof uri === 'string', 'must pass registry URI to publish') - assert(params && typeof params === 'object', 'must pass params to publish') - assert(typeof cb === 'function', 'must pass callback to publish') - - var access = params.access - assert( - (!access) || ['public', 'restricted'].indexOf(access) !== -1, - "if present, access level must be either 'public' or 'restricted'" - ) - - var auth = params.auth - assert(auth && typeof auth === 'object', 'must pass auth to publish') - if (!(auth.token || - (auth.password && auth.username && auth.email))) { - var er = new Error('auth required for publishing') - er.code = 'ENEEDAUTH' - return cb(er) - } - - var metadata = params.metadata - assert( - metadata && typeof metadata === 'object', - 'must pass package metadata to publish' - ) - try { - fixer.fixNameField(metadata, {strict: true, allowLegacyCase: true}) - } catch (er) { - return cb(er) - } - var version = semver.clean(metadata.version) - if (!version) return cb(new Error('invalid semver: ' + metadata.version)) - metadata.version = version - - var body = params.body - assert(body, 'must pass package body to publish') - assert(body instanceof Stream, 'package body passed to publish must be a stream') - var client = this - var sink = concat(function (tarbuffer) { - putFirst.call(client, uri, metadata, tarbuffer, access, auth, cb) - }) - sink.on('error', cb) - body.pipe(sink) -} - -function putFirst (registry, data, tarbuffer, access, auth, cb) { - // optimistically try to PUT all in one single atomic thing. - // If 409, then GET and merge, try again. - // If other error, then fail. - - var root = { - _id: data.name, - name: data.name, - description: data.description, - 'dist-tags': {}, - versions: {}, - readme: data.readme || '' - } - - if (access) root.access = access - - if (!auth.token) { - root.maintainers = [{ name: auth.username, email: auth.email }] - data.maintainers = JSON.parse(JSON.stringify(root.maintainers)) - } - - root.versions[ data.version ] = data - var tag = data.tag || this.config.defaultTag - root['dist-tags'][tag] = data.version - - var tbName = data.name + '-' + data.version + '.tgz' - var tbURI = data.name + '/-/' + tbName - var integrity = ssri.fromData(tarbuffer, { - algorithms: ['sha1', 'sha512'] - }) - - data._id = data.name + '@' + data.version - data.dist = data.dist || {} - // Don't bother having sha1 in the actual integrity field - data.dist.integrity = integrity['sha512'][0].toString() - // Legacy shasum support - data.dist.shasum = integrity['sha1'][0].hexDigest() - data.dist.tarball = url.resolve(registry, tbURI) - .replace(/^https:\/\//, 'http://') - - root._attachments = {} - root._attachments[ tbName ] = { - 'content_type': 'application/octet-stream', - 'data': tarbuffer.toString('base64'), - 'length': tarbuffer.length - } - - var fixed = url.resolve(registry, escaped(data.name)) - var client = this - var options = { - method: 'PUT', - body: root, - auth: auth - } - this.request(fixed, options, function (er, parsed, json, res) { - var r409 = 'must supply latest _rev to update existing package' - var r409b = 'Document update conflict.' - var conflict = res && res.statusCode === 409 - if (parsed && (parsed.reason === r409 || parsed.reason === r409b)) { - conflict = true - } - - // a 409 is typical here. GET the data and merge in. - if (er && !conflict) { - client.log.error('publish', 'Failed PUT ' + (res && res.statusCode)) - return cb(er) - } - - if (!er && !conflict) return cb(er, parsed, json, res) - - // let's see what versions are already published. - client.request(fixed + '?write=true', { auth: auth }, function (er, current) { - if (er) return cb(er) - - putNext.call(client, registry, data.version, root, current, auth, cb) - }) - }) -} - -function putNext (registry, newVersion, root, current, auth, cb) { - // already have the tardata on the root object - // just merge in existing stuff - var curVers = Object.keys(current.versions || {}).map(function (v) { - return semver.clean(v, true) - }).concat(Object.keys(current.time || {}).map(function (v) { - if (semver.valid(v, true)) return semver.clean(v, true) - }).filter(function (v) { - return v - })) - - if (curVers.indexOf(newVersion) !== -1) { - return cb(conflictError(root.name, newVersion)) - } - - current.versions[newVersion] = root.versions[newVersion] - current._attachments = current._attachments || {} - for (var i in root) { - switch (i) { - // objects that copy over the new stuffs - case 'dist-tags': - case 'versions': - case '_attachments': - for (var j in root[i]) { - current[i][j] = root[i][j] - } - break - - // ignore these - case 'maintainers': - break - - // copy - default: - current[i] = root[i] - } - } - var maint = JSON.parse(JSON.stringify(root.maintainers)) - root.versions[newVersion].maintainers = maint - - var uri = url.resolve(registry, escaped(root.name)) - var options = { - method: 'PUT', - body: current, - auth: auth - } - this.request(uri, options, cb) -} - -function conflictError (pkgid, version) { - var e = new Error('cannot modify pre-existing version') - e.code = 'EPUBLISHCONFLICT' - e.pkgid = pkgid - e.version = version - return e -} diff --git a/node_modules/npm-registry-client/lib/request.js b/node_modules/npm-registry-client/lib/request.js deleted file mode 100644 index 5987bfa6fb0e4..0000000000000 --- a/node_modules/npm-registry-client/lib/request.js +++ /dev/null @@ -1,336 +0,0 @@ -module.exports = regRequest - -// npm: means -// 1. https -// 2. send authorization -// 3. content-type is 'application/json' -- metadata -// -var assert = require('assert') -var url = require('url') -var zlib = require('zlib') -var Stream = require('stream').Stream -var STATUS_CODES = require('http').STATUS_CODES - -var request = require('request') -var once = require('once') - -function regRequest (uri, params, cb_) { - assert(typeof uri === 'string', 'must pass uri to request') - assert(params && typeof params === 'object', 'must pass params to request') - assert(typeof cb_ === 'function', 'must pass callback to request') - - params.method = params.method || 'GET' - this.log.verbose('request', 'uri', uri) - - // Since there are multiple places where an error could occur, - // don't let the cb be called more than once. - var cb = once(cb_) - - if (uri.match(/^\/?favicon.ico/)) { - return cb(new Error("favicon.ico isn't a package, it's a picture.")) - } - - var adduserChange = /\/?-\/user\/org\.couchdb\.user:([^/]+)\/-rev/ - var isUserChange = uri.match(adduserChange) - var adduserNew = /\/?-\/user\/org\.couchdb\.user:([^/?]+)$/ - var isNewUser = uri.match(adduserNew) - var alwaysAuth = params.auth && params.auth.alwaysAuth - var isDelete = params.method === 'DELETE' - var isWrite = params.body || isDelete - - if (isUserChange && !isWrite) { - return cb(new Error('trying to change user document without writing(?!)')) - } - - if (params.authed == null) { - // new users can *not* use auth, because they don't *have* auth yet - if (isUserChange) { - this.log.verbose('request', 'updating existing user; sending authorization') - params.authed = true - } else if (isNewUser) { - this.log.verbose('request', "new user, so can't send auth") - params.authed = false - } else if (alwaysAuth) { - this.log.verbose('request', 'always-auth set; sending authorization') - params.authed = true - } else if (isWrite) { - this.log.verbose('request', 'sending authorization for write operation') - params.authed = true - } else { - // most of the time we don't want to auth - this.log.verbose('request', 'no auth needed') - params.authed = false - } - } - - var self = this - this.attempt(function (operation) { - makeRequest.call(self, uri, params, function (er, parsed, raw, response) { - if (response) { - self.log.verbose('headers', response.headers) - if (response.headers['npm-notice']) { - self.log.warn('notice', response.headers['npm-notice']) - } - } - - if (!er || (er.message && er.message.match(/^SSL Error/))) { - if (er) er.code = 'ESSL' - return cb(er, parsed, raw, response) - } - - // Only retry on 408, 5xx or no `response`. - var statusCode = response && response.statusCode - - var timeout = statusCode === 408 - var serverError = statusCode >= 500 - var statusRetry = !statusCode || timeout || serverError - if (er && statusRetry && operation.retry(er)) { - self.log.info('retry', 'will retry, error on last attempt: ' + er) - return undefined - } - cb.apply(null, arguments) - }) - }) -} - -function makeRequest (uri, params, cb_) { - var socket - var cb = once(function (er, parsed, raw, response) { - if (socket) { - // The socket might be returned to a pool for re-use, so don’t keep - // the 'error' listener from here attached. - socket.removeListener('error', cb) - } - - return cb_(er, parsed, raw, response) - }) - - var parsed = url.parse(uri) - var headers = {} - - // metadata should be compressed - headers['accept-encoding'] = 'gzip' - - // metadata should be minified, if the registry supports it - - var er = this.authify(params.authed, parsed, headers, params.auth) - if (er) return cb_(er) - - var useCorgi = params.fullMetadata == null ? false : !params.fullMetadata - - var opts = this.initialize( - parsed, - params.method, - useCorgi ? 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*' : 'application/json', - headers - ) - - opts.followRedirect = (typeof params.follow === 'boolean' ? params.follow : true) - opts.encoding = null // tell request let body be Buffer instance - - if (params.etag) { - this.log.verbose('etag', params.etag) - headers[params.method === 'GET' ? 'if-none-match' : 'if-match'] = params.etag - } - - if (params.lastModified && params.method === 'GET') { - this.log.verbose('lastModified', params.lastModified) - headers['if-modified-since'] = params.lastModified - } - - // figure out wth body is - if (params.body) { - if (Buffer.isBuffer(params.body)) { - opts.body = params.body - headers['content-type'] = 'application/json' - headers['content-length'] = params.body.length - } else if (typeof params.body === 'string') { - opts.body = params.body - headers['content-type'] = 'application/json' - headers['content-length'] = Buffer.byteLength(params.body) - } else if (params.body instanceof Stream) { - headers['content-type'] = 'application/octet-stream' - if (params.body.size) headers['content-length'] = params.body.size - } else { - delete params.body._etag - delete params.body._lastModified - opts.json = params.body - } - } - - this.log.http('request', params.method, parsed.href || '/') - - var done = requestDone.call(this, params.method, uri, cb) - var req = request(opts, params.streaming ? undefined : decodeResponseBody(done)) - - req.on('error', cb) - - // This should not be necessary, as the HTTP implementation in Node - // passes errors occurring on the socket to the request itself. Being overly - // cautious comes at a low cost, though. - req.on('socket', function (s) { - socket = s - socket.on('error', cb) - }) - - if (params.streaming) { - req.on('response', function (response) { - if (response.statusCode >= 400) { - var parts = [] - response.on('data', function (data) { - parts.push(data) - }) - response.on('end', function () { - decodeResponseBody(done)(null, response, Buffer.concat(parts)) - }) - } else { - response.on('end', function () { - // don't ever re-use connections that had server errors. - // those sockets connect to the Bad Place! - if (response.socket && response.statusCode > 500) { - response.socket.destroy() - } - }) - - return cb(null, response) - } - }) - } - - if (params.body && (params.body instanceof Stream)) { - params.body.pipe(req) - } -} - -function decodeResponseBody (cb) { - return function (er, response, data) { - if (er) return cb(er, response, data) - - // don't ever re-use connections that had server errors. - // those sockets connect to the Bad Place! - if (response.socket && response.statusCode > 500) { - response.socket.destroy() - } - - if (response.headers['content-encoding'] !== 'gzip') { - return cb(er, response, data) - } - - zlib.gunzip(data, function (er, buf) { - if (er) return cb(er, response, data) - - cb(null, response, buf) - }) - } -} - -// cb(er, parsed, raw, response) -function requestDone (method, where, cb) { - return function (er, response, data) { - if (er) return cb(er) - - var urlObj = url.parse(where) - if (urlObj.auth) urlObj.auth = '***' - this.log.http(response.statusCode, url.format(urlObj)) - - if (Buffer.isBuffer(data)) { - data = data.toString() - } - - var parsed - if (data && typeof data === 'string' && response.statusCode !== 304) { - try { - parsed = JSON.parse(data) - } catch (ex) { - ex.message += '\n' + data - this.log.verbose('bad json', data) - this.log.error('registry', 'error parsing json') - return cb(ex, null, data, response) - } - } else if (data) { - parsed = data - data = JSON.stringify(parsed) - } - - // expect data with any error codes - if (!data && response.statusCode >= 400) { - var code = response.statusCode - return cb( - makeError(code + ' ' + STATUS_CODES[code], null, code), - null, - data, - response - ) - } - - er = null - if (parsed && response.headers.etag) { - parsed._etag = response.headers.etag - } - - if (parsed && response.headers['last-modified']) { - parsed._lastModified = response.headers['last-modified'] - } - - // for the search endpoint, the 'error' property can be an object - if ((parsed && parsed.error && typeof parsed.error !== 'object') || - response.statusCode >= 400) { - var w = url.parse(where).pathname.substr(1) - var name - if (!w.match(/^-/)) { - w = w.split('/') - var index = w.indexOf('_rewrite') - if (index === -1) { - index = w.length - 1 - } else { - index++ - } - name = decodeURIComponent(w[index]) - } - - if (!parsed.error) { - if (response.statusCode === 401 && response.headers['www-authenticate']) { - const auth = response.headers['www-authenticate'].split(/,\s*/).map(s => s.toLowerCase()) - if (auth.indexOf('ipaddress') !== -1) { - er = makeError('Login is not allowed from your IP address', name, response.statusCode, 'EAUTHIP') - } else if (auth.indexOf('otp') !== -1) { - er = makeError('OTP required for this operation', name, response.statusCode, 'EOTP') - } else { - er = makeError('Unable to authenticate, need: ' + response.headers['www-authenticate'], name, response.statusCode, 'EAUTHUNKNOWN') - } - } else { - const msg = parsed.message ? ': ' + parsed.message : '' - er = makeError( - 'Registry returned ' + response.statusCode + - ' for ' + method + - ' on ' + where + - msg, - name, - response.statusCode - ) - } - } else if (name && parsed.error === 'not_found') { - er = makeError('404 Not Found: ' + name, name, response.statusCode) - } else if (name && parsed.error === 'User not found') { - er = makeError('User not found. Check `npm whoami` and make sure you have a NPM account.', name, response.statusCode) - } else { - er = makeError( - parsed.error + ' ' + (parsed.reason || '') + ': ' + (name || w), - name, - response.statusCode - ) - } - } - return cb(er, parsed, data, response) - }.bind(this) -} - -function makeError (message, name, statusCode, code) { - var er = new Error(message) - if (name) er.pkgid = name - if (statusCode) { - er.statusCode = statusCode - er.code = code || 'E' + statusCode - } - return er -} diff --git a/node_modules/npm-registry-client/lib/send-anonymous-CLI-metrics.js b/node_modules/npm-registry-client/lib/send-anonymous-CLI-metrics.js deleted file mode 100644 index b5b7a1dca15e7..0000000000000 --- a/node_modules/npm-registry-client/lib/send-anonymous-CLI-metrics.js +++ /dev/null @@ -1,19 +0,0 @@ -module.exports = send - -var assert = require('assert') -var url = require('url') - -function send (registryUrl, params, cb) { - assert(typeof registryUrl === 'string', 'must pass registry URI') - assert(params && typeof params === 'object', 'must pass params') - assert(typeof cb === 'function', 'must pass callback') - - var uri = url.resolve(registryUrl, '-/npm/anon-metrics/v1/' + - encodeURIComponent(params.metricId)) - - this.request(uri, { - method: 'PUT', - body: JSON.stringify(params.metrics), - authed: false - }, cb) -} diff --git a/node_modules/npm-registry-client/lib/star.js b/node_modules/npm-registry-client/lib/star.js deleted file mode 100644 index 5c9224eaa21a7..0000000000000 --- a/node_modules/npm-registry-client/lib/star.js +++ /dev/null @@ -1,51 +0,0 @@ -module.exports = star - -var assert = require('assert') - -function star (uri, params, cb) { - assert(typeof uri === 'string', 'must pass registry URI to star') - assert(params && typeof params === 'object', 'must pass params to star') - assert(typeof cb === 'function', 'must pass callback to star') - - var starred = !!params.starred - - var auth = params.auth - assert(auth && typeof auth === 'object', 'must pass auth to star') - if (!(auth.token || (auth.password && auth.username && auth.email))) { - var er = new Error('Must be logged in to star/unstar packages') - er.code = 'ENEEDAUTH' - return cb(er) - } - - var client = this - this.request(uri + '?write=true', { auth: auth }, function (er, fullData) { - if (er) return cb(er) - - client.whoami(uri, params, function (er, username) { - if (er) return cb(er) - - var data = { - _id: fullData._id, - _rev: fullData._rev, - users: fullData.users || {} - } - - if (starred) { - client.log.info('starring', data._id) - data.users[username] = true - client.log.verbose('starring', data) - } else { - delete data.users[username] - client.log.info('unstarring', data._id) - client.log.verbose('unstarring', data) - } - - var options = { - method: 'PUT', - body: data, - auth: auth - } - return client.request(uri, options, cb) - }) - }) -} diff --git a/node_modules/npm-registry-client/lib/stars.js b/node_modules/npm-registry-client/lib/stars.js deleted file mode 100644 index ba47f2c1efc46..0000000000000 --- a/node_modules/npm-registry-client/lib/stars.js +++ /dev/null @@ -1,18 +0,0 @@ -module.exports = stars - -var assert = require('assert') -var url = require('url') - -function stars (uri, params, cb) { - assert(typeof uri === 'string', 'must pass registry URI to stars') - assert(params && typeof params === 'object', 'must pass params to stars') - assert(typeof cb === 'function', 'must pass callback to stars') - - var auth = params.auth - var name = params.username || (auth && auth.username) - if (!name) return cb(new Error('must pass either username or auth to stars')) - var encoded = encodeURIComponent(name) - var path = '-/_view/starredByUser?key="' + encoded + '"' - - this.request(url.resolve(uri, path), { auth: auth }, cb) -} diff --git a/node_modules/npm-registry-client/lib/tag.js b/node_modules/npm-registry-client/lib/tag.js deleted file mode 100644 index 3b6dad1df26ca..0000000000000 --- a/node_modules/npm-registry-client/lib/tag.js +++ /dev/null @@ -1,23 +0,0 @@ -module.exports = tag - -var assert = require('assert') - -function tag (uri, params, cb) { - assert(typeof uri === 'string', 'must pass registry URI to tag') - assert(params && typeof params === 'object', 'must pass params to tag') - assert(typeof cb === 'function', 'must pass callback to tag') - - assert(typeof params.version === 'string', 'must pass version to tag') - assert(typeof params.tag === 'string', 'must pass tag name to tag') - assert( - params.auth && typeof params.auth === 'object', - 'must pass auth to tag' - ) - - var options = { - method: 'PUT', - body: JSON.stringify(params.version), - auth: params.auth - } - this.request(uri + '/' + params.tag, options, cb) -} diff --git a/node_modules/npm-registry-client/lib/team.js b/node_modules/npm-registry-client/lib/team.js deleted file mode 100644 index 327fa9cd5abf1..0000000000000 --- a/node_modules/npm-registry-client/lib/team.js +++ /dev/null @@ -1,105 +0,0 @@ -module.exports = team - -var assert = require('assert') -var url = require('url') - -var subcommands = {} - -function team (sub, uri, params, cb) { - teamAssertions(sub, uri, params, cb) - return subcommands[sub].call(this, uri, params, cb) -} - -subcommands.create = function (uri, params, cb) { - return this.request(apiUri(uri, 'org', params.scope, 'team'), { - method: 'PUT', - auth: params.auth, - body: JSON.stringify({ - name: params.team - }) - }, cb) -} - -subcommands.destroy = function (uri, params, cb) { - return this.request(apiUri(uri, 'team', params.scope, params.team), { - method: 'DELETE', - auth: params.auth - }, cb) -} - -subcommands.add = function (uri, params, cb) { - return this.request(apiUri(uri, 'team', params.scope, params.team, 'user'), { - method: 'PUT', - auth: params.auth, - body: JSON.stringify({ - user: params.user - }) - }, cb) -} - -subcommands.rm = function (uri, params, cb) { - return this.request(apiUri(uri, 'team', params.scope, params.team, 'user'), { - method: 'DELETE', - auth: params.auth, - body: JSON.stringify({ - user: params.user - }) - }, cb) -} - -subcommands.ls = function (uri, params, cb) { - var uriParams = '?format=cli' - if (params.team) { - var reqUri = apiUri( - uri, 'team', params.scope, params.team, 'user') + uriParams - return this.request(reqUri, { - method: 'GET', - auth: params.auth - }, cb) - } else { - return this.request(apiUri(uri, 'org', params.scope, 'team') + uriParams, { - method: 'GET', - auth: params.auth - }, cb) - } -} - -// TODO - we punted this to v2 -// subcommands.edit = function (uri, params, cb) { -// return this.request(apiUri(uri, 'team', params.scope, params.team, 'user'), { -// method: 'POST', -// auth: params.auth, -// body: JSON.stringify({ -// users: params.users -// }) -// }, cb) -// } - -function apiUri (registryUri) { - var path = Array.prototype.slice.call(arguments, 1) - .map(encodeURIComponent) - .join('/') - return url.resolve(registryUri, '-/' + path) -} - -function teamAssertions (subcommand, uri, params, cb) { - assert(subcommand, 'subcommand is required') - assert(subcommands.hasOwnProperty(subcommand), - 'team subcommand must be one of ' + Object.keys(subcommands)) - assert(typeof uri === 'string', 'registry URI is required') - assert(typeof params === 'object', 'params are required') - assert(typeof params.auth === 'object', 'auth is required') - assert(typeof params.scope === 'string', 'scope is required') - assert(!cb || typeof cb === 'function', 'callback must be a function') - if (subcommand !== 'ls') { - assert(typeof params.team === 'string', 'team name is required') - } - if (subcommand === 'rm' || subcommand === 'add') { - assert(typeof params.user === 'string', 'user is required') - } - if (subcommand === 'edit') { - assert(typeof params.users === 'object' && - params.users.length != null, - 'users is required') - } -} diff --git a/node_modules/npm-registry-client/lib/unpublish.js b/node_modules/npm-registry-client/lib/unpublish.js deleted file mode 100644 index 05c5a4b611006..0000000000000 --- a/node_modules/npm-registry-client/lib/unpublish.js +++ /dev/null @@ -1,120 +0,0 @@ -module.exports = unpublish - -// fetch the data -// modify to remove the version in question -// If no versions remaining, then DELETE -// else, PUT the modified data -// delete the tarball - -var semver = require('semver') -var url = require('url') -var chain = require('slide').chain -var assert = require('assert') - -function unpublish (uri, params, cb) { - assert(typeof uri === 'string', 'must pass registry URI to unpublish') - assert(params && typeof params === 'object', 'must pass params to unpublish') - assert(typeof cb === 'function', 'must pass callback to unpublish') - - var ver = params.version - var auth = params.auth - assert(auth && typeof auth === 'object', 'must pass auth to unpublish') - - var options = { - timeout: -1, - follow: false, - auth: auth - } - this.get(uri + '?write=true', options, function (er, data) { - if (er) { - this.log.info('unpublish', uri + ' not published') - return cb() - } - // remove all if no version specified - if (!ver) { - this.log.info('unpublish', 'No version specified, removing all') - return this.request(uri + '/-rev/' + data._rev, { method: 'DELETE', auth: auth }, cb) - } - - var versions = data.versions || {} - var versionPublic = versions.hasOwnProperty(ver) - - var dist - if (!versionPublic) { - this.log.info('unpublish', uri + '@' + ver + ' not published') - } else { - dist = versions[ver].dist - this.log.verbose('unpublish', 'removing attachments', dist) - } - - delete versions[ver] - // if it was the only version, then delete the whole package. - if (!Object.keys(versions).length) { - this.log.info('unpublish', 'No versions remain, removing entire package') - return this.request(uri + '/-rev/' + data._rev, { method: 'DELETE', auth: auth }, cb) - } - - if (!versionPublic) return cb() - - var latestVer = data['dist-tags'].latest - for (var tag in data['dist-tags']) { - if (data['dist-tags'][tag] === ver) delete data['dist-tags'][tag] - } - - if (latestVer === ver) { - data['dist-tags'].latest = - Object.getOwnPropertyNames(versions).sort(semver.compareLoose).pop() - } - - var rev = data._rev - delete data._revisions - delete data._attachments - var cb_ = detacher.call(this, uri, data, dist, auth, cb) - - this.request(uri + '/-rev/' + rev, { method: 'PUT', body: data, auth: auth }, function (er) { - if (er) { - this.log.error('unpublish', 'Failed to update data') - } - cb_(er) - }.bind(this)) - }.bind(this)) -} - -function detacher (uri, data, dist, credentials, cb) { - return function (er) { - if (er) return cb(er) - this.get(escape(uri, data.name), { auth: credentials }, function (er, data) { - if (er) return cb(er) - - var tb = url.parse(dist.tarball) - - detach.call(this, uri, data, tb.pathname, data._rev, credentials, function (er) { - if (er || !dist.bin) return cb(er) - chain(Object.keys(dist.bin).map(function (bt) { - return function (cb) { - var d = dist.bin[bt] - detach.call(this, uri, data, url.parse(d.tarball).pathname, null, credentials, cb) - }.bind(this) - }, this), cb) - }.bind(this)) - }.bind(this)) - }.bind(this) -} - -function detach (uri, data, path, rev, credentials, cb) { - if (rev) { - path += '/-rev/' + rev - this.log.info('detach', path) - return this.request(url.resolve(uri, path), { method: 'DELETE', auth: credentials }, cb) - } - this.get(escape(uri, data.name), { auth: credentials }, function (er, data) { - rev = data._rev - if (!rev) return cb(new Error('No _rev found in ' + data._id)) - detach.call(this, data, path, rev, cb) - }.bind(this)) -} - -function escape (base, name) { - var escaped = name.replace(/\//, '%2f') - return url.resolve(base, escaped) -} diff --git a/node_modules/npm-registry-client/lib/whoami.js b/node_modules/npm-registry-client/lib/whoami.js deleted file mode 100644 index 68db49e59a402..0000000000000 --- a/node_modules/npm-registry-client/lib/whoami.js +++ /dev/null @@ -1,21 +0,0 @@ -module.exports = whoami - -var url = require('url') -var assert = require('assert') - -function whoami (uri, params, cb) { - assert(typeof uri === 'string', 'must pass registry URI to whoami') - assert(params && typeof params === 'object', 'must pass params to whoami') - assert(typeof cb === 'function', 'must pass callback to whoami') - - var auth = params.auth - assert(auth && typeof auth === 'object', 'must pass auth to whoami') - - if (auth.username) return process.nextTick(cb.bind(this, null, auth.username)) - - this.request(url.resolve(uri, '-/whoami'), { auth: auth }, function (er, userdata) { - if (er) return cb(er) - - cb(null, userdata.username) - }) -} diff --git a/node_modules/npm-registry-client/node_modules/retry/.npmignore b/node_modules/npm-registry-client/node_modules/retry/.npmignore deleted file mode 100644 index e7726a071b7f3..0000000000000 --- a/node_modules/npm-registry-client/node_modules/retry/.npmignore +++ /dev/null @@ -1,2 +0,0 @@ -/node_modules/* -npm-debug.log diff --git a/node_modules/npm-registry-client/node_modules/retry/License b/node_modules/npm-registry-client/node_modules/retry/License deleted file mode 100644 index 0b58de379fb30..0000000000000 --- a/node_modules/npm-registry-client/node_modules/retry/License +++ /dev/null @@ -1,21 +0,0 @@ -Copyright (c) 2011: -Tim Koschützki (tim@debuggable.com) -Felix Geisendörfer (felix@debuggable.com) - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in - all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - THE SOFTWARE. diff --git a/node_modules/npm-registry-client/node_modules/retry/Makefile b/node_modules/npm-registry-client/node_modules/retry/Makefile deleted file mode 100644 index eee21a99dfc9e..0000000000000 --- a/node_modules/npm-registry-client/node_modules/retry/Makefile +++ /dev/null @@ -1,22 +0,0 @@ -SHELL := /bin/bash - -test: - @node test/runner.js - -release-major: test - npm version major -m "Release %s" - git push - npm publish - -release-minor: test - npm version minor -m "Release %s" - git push - npm publish - -release-patch: test - npm version patch -m "Release %s" - git push - npm publish - -.PHONY: test - diff --git a/node_modules/npm-registry-client/node_modules/retry/README.md b/node_modules/npm-registry-client/node_modules/retry/README.md deleted file mode 100644 index eee05f7bb6153..0000000000000 --- a/node_modules/npm-registry-client/node_modules/retry/README.md +++ /dev/null @@ -1,215 +0,0 @@ -# retry - -Abstraction for exponential and custom retry strategies for failed operations. - -## Installation - - npm install retry - -## Current Status - -This module has been tested and is ready to be used. - -## Tutorial - -The example below will retry a potentially failing `dns.resolve` operation -`10` times using an exponential backoff strategy. With the default settings, this -means the last attempt is made after `17 minutes and 3 seconds`. - -``` javascript -var dns = require('dns'); -var retry = require('retry'); - -function faultTolerantResolve(address, cb) { - var operation = retry.operation(); - - operation.attempt(function(currentAttempt) { - dns.resolve(address, function(err, addresses) { - if (operation.retry(err)) { - return; - } - - cb(err ? operation.mainError() : null, addresses); - }); - }); -} - -faultTolerantResolve('nodejs.org', function(err, addresses) { - console.log(err, addresses); -}); -``` - -Of course you can also configure the factors that go into the exponential -backoff. See the API documentation below for all available settings. -currentAttempt is an int representing the number of attempts so far. - -``` javascript -var operation = retry.operation({ - retries: 5, - factor: 3, - minTimeout: 1 * 1000, - maxTimeout: 60 * 1000, - randomize: true, -}); -``` - -## API - -### retry.operation([options]) - -Creates a new `RetryOperation` object. `options` is the same as `retry.timeouts()`'s `options`, with two additions: - -* `forever`: Whether to retry forever, defaults to `false`. -* `unref`: Wether to [unref](https://nodejs.org/api/timers.html#timers_unref) the setTimeout's, defaults to `false`. - -### retry.timeouts([options]) - -Returns an array of timeouts. All time `options` and return values are in -milliseconds. If `options` is an array, a copy of that array is returned. - -`options` is a JS object that can contain any of the following keys: - -* `retries`: The maximum amount of times to retry the operation. Default is `10`. -* `factor`: The exponential factor to use. Default is `2`. -* `minTimeout`: The number of milliseconds before starting the first retry. Default is `1000`. -* `maxTimeout`: The maximum number of milliseconds between two retries. Default is `Infinity`. -* `randomize`: Randomizes the timeouts by multiplying with a factor between `1` to `2`. Default is `false`. - -The formula used to calculate the individual timeouts is: - -``` -Math.min(random * minTimeout * Math.pow(factor, attempt), maxTimeout) -``` - -Have a look at [this article][article] for a better explanation of approach. - -If you want to tune your `factor` / `times` settings to attempt the last retry -after a certain amount of time, you can use wolfram alpha. For example in order -to tune for `10` attempts in `5 minutes`, you can use this equation: - -![screenshot](https://github.com/tim-kos/node-retry/raw/master/equation.gif) - -Explaining the various values from left to right: - -* `k = 0 ... 9`: The `retries` value (10) -* `1000`: The `minTimeout` value in ms (1000) -* `x^k`: No need to change this, `x` will be your resulting factor -* `5 * 60 * 1000`: The desired total amount of time for retrying in ms (5 minutes) - -To make this a little easier for you, use wolfram alpha to do the calculations: - - - -[article]: http://dthain.blogspot.com/2009/02/exponential-backoff-in-distributed.html - -### retry.createTimeout(attempt, opts) - -Returns a new `timeout` (integer in milliseconds) based on the given parameters. - -`attempt` is an integer representing for which retry the timeout should be calculated. If your retry operation was executed 4 times you had one attempt and 3 retries. If you then want to calculate a new timeout, you should set `attempt` to 4 (attempts are zero-indexed). - -`opts` can include `factor`, `minTimeout`, `randomize` (boolean) and `maxTimeout`. They are documented above. - -`retry.createTimeout()` is used internally by `retry.timeouts()` and is public for you to be able to create your own timeouts for reinserting an item, see [issue #13](https://github.com/tim-kos/node-retry/issues/13). - -### retry.wrap(obj, [options], [methodNames]) - -Wrap all functions of the `obj` with retry. Optionally you can pass operation options and -an array of method names which need to be wrapped. - -``` -retry.wrap(obj) - -retry.wrap(obj, ['method1', 'method2']) - -retry.wrap(obj, {retries: 3}) - -retry.wrap(obj, {retries: 3}, ['method1', 'method2']) -``` -The `options` object can take any options that the usual call to `retry.operation` can take. - -### new RetryOperation(timeouts, [options]) - -Creates a new `RetryOperation` where `timeouts` is an array where each value is -a timeout given in milliseconds. - -Available options: -* `forever`: Whether to retry forever, defaults to `false`. -* `unref`: Wether to [unref](https://nodejs.org/api/timers.html#timers_unref) the setTimeout's, defaults to `false`. - -If `forever` is true, the following changes happen: -* `RetryOperation.errors()` will only output an array of one item: the last error. -* `RetryOperation` will repeatedly use the `timeouts` array. Once all of its timeouts have been used up, it restarts with the first timeout, then uses the second and so on. - -#### retryOperation.errors() - -Returns an array of all errors that have been passed to -`retryOperation.retry()` so far. - -#### retryOperation.mainError() - -A reference to the error object that occured most frequently. Errors are -compared using the `error.message` property. - -If multiple error messages occured the same amount of time, the last error -object with that message is returned. - -If no errors occured so far, the value is `null`. - -#### retryOperation.attempt(fn, timeoutOps) - -Defines the function `fn` that is to be retried and executes it for the first -time right away. The `fn` function can receive an optional `currentAttempt` callback that represents the number of attempts to execute `fn` so far. - -Optionally defines `timeoutOps` which is an object having a property `timeout` in miliseconds and a property `cb` callback function. -Whenever your retry operation takes longer than `timeout` to execute, the timeout callback function `cb` is called. - - -#### retryOperation.try(fn) - -This is an alias for `retryOperation.attempt(fn)`. This is deprecated. Please use `retryOperation.attempt(fn)` instead. - -#### retryOperation.start(fn) - -This is an alias for `retryOperation.attempt(fn)`. This is deprecated. Please use `retryOperation.attempt(fn)` instead. - -#### retryOperation.retry(error) - -Returns `false` when no `error` value is given, or the maximum amount of retries -has been reached. - -Otherwise it returns `true`, and retries the operation after the timeout for -the current attempt number. - -#### retryOperation.stop() - -Allows you to stop the operation being retried. Useful for aborting the operation on a fatal error etc. - -#### retryOperation.attempts() - -Returns an int representing the number of attempts it took to call `fn` before it was successful. - -## License - -retry is licensed under the MIT license. - - -# Changelog - -0.10.0 Adding `stop` functionality, thanks to @maxnachlinger. - -0.9.0 Adding `unref` functionality, thanks to @satazor. - -0.8.0 Implementing retry.wrap. - -0.7.0 Some bug fixes and made retry.createTimeout() public. Fixed issues [#10](https://github.com/tim-kos/node-retry/issues/10), [#12](https://github.com/tim-kos/node-retry/issues/12), and [#13](https://github.com/tim-kos/node-retry/issues/13). - -0.6.0 Introduced optional timeOps parameter for the attempt() function which is an object having a property timeout in milliseconds and a property cb callback function. Whenever your retry operation takes longer than timeout to execute, the timeout callback function cb is called. - -0.5.0 Some minor refactoring. - -0.4.0 Changed retryOperation.try() to retryOperation.attempt(). Deprecated the aliases start() and try() for it. - -0.3.0 Added retryOperation.start() which is an alias for retryOperation.try(). - -0.2.0 Added attempts() function and parameter to retryOperation.try() representing the number of attempts it took to call fn(). diff --git a/node_modules/npm-registry-client/node_modules/retry/equation.gif b/node_modules/npm-registry-client/node_modules/retry/equation.gif deleted file mode 100644 index 97107237ba19f..0000000000000 Binary files a/node_modules/npm-registry-client/node_modules/retry/equation.gif and /dev/null differ diff --git a/node_modules/npm-registry-client/node_modules/retry/example/dns.js b/node_modules/npm-registry-client/node_modules/retry/example/dns.js deleted file mode 100644 index 446729b6f9af6..0000000000000 --- a/node_modules/npm-registry-client/node_modules/retry/example/dns.js +++ /dev/null @@ -1,31 +0,0 @@ -var dns = require('dns'); -var retry = require('../lib/retry'); - -function faultTolerantResolve(address, cb) { - var opts = { - retries: 2, - factor: 2, - minTimeout: 1 * 1000, - maxTimeout: 2 * 1000, - randomize: true - }; - var operation = retry.operation(opts); - - operation.attempt(function(currentAttempt) { - dns.resolve(address, function(err, addresses) { - if (operation.retry(err)) { - return; - } - - cb(operation.mainError(), operation.errors(), addresses); - }); - }); -} - -faultTolerantResolve('nodejs.org', function(err, errors, addresses) { - console.warn('err:'); - console.log(err); - - console.warn('addresses:'); - console.log(addresses); -}); \ No newline at end of file diff --git a/node_modules/npm-registry-client/node_modules/retry/example/stop.js b/node_modules/npm-registry-client/node_modules/retry/example/stop.js deleted file mode 100644 index e1ceafeebafc5..0000000000000 --- a/node_modules/npm-registry-client/node_modules/retry/example/stop.js +++ /dev/null @@ -1,40 +0,0 @@ -var retry = require('../lib/retry'); - -function attemptAsyncOperation(someInput, cb) { - var opts = { - retries: 2, - factor: 2, - minTimeout: 1 * 1000, - maxTimeout: 2 * 1000, - randomize: true - }; - var operation = retry.operation(opts); - - operation.attempt(function(currentAttempt) { - failingAsyncOperation(someInput, function(err, result) { - - if (err && err.message === 'A fatal error') { - operation.stop(); - return cb(err); - } - - if (operation.retry(err)) { - return; - } - - cb(operation.mainError(), operation.errors(), result); - }); - }); -} - -attemptAsyncOperation('test input', function(err, errors, result) { - console.warn('err:'); - console.log(err); - - console.warn('result:'); - console.log(result); -}); - -function failingAsyncOperation(input, cb) { - return setImmediate(cb.bind(null, new Error('A fatal error'))); -} diff --git a/node_modules/npm-registry-client/node_modules/retry/index.js b/node_modules/npm-registry-client/node_modules/retry/index.js deleted file mode 100644 index ee62f3a112c28..0000000000000 --- a/node_modules/npm-registry-client/node_modules/retry/index.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('./lib/retry'); \ No newline at end of file diff --git a/node_modules/npm-registry-client/node_modules/retry/lib/retry.js b/node_modules/npm-registry-client/node_modules/retry/lib/retry.js deleted file mode 100644 index 77428cfd0006f..0000000000000 --- a/node_modules/npm-registry-client/node_modules/retry/lib/retry.js +++ /dev/null @@ -1,99 +0,0 @@ -var RetryOperation = require('./retry_operation'); - -exports.operation = function(options) { - var timeouts = exports.timeouts(options); - return new RetryOperation(timeouts, { - forever: options && options.forever, - unref: options && options.unref - }); -}; - -exports.timeouts = function(options) { - if (options instanceof Array) { - return [].concat(options); - } - - var opts = { - retries: 10, - factor: 2, - minTimeout: 1 * 1000, - maxTimeout: Infinity, - randomize: false - }; - for (var key in options) { - opts[key] = options[key]; - } - - if (opts.minTimeout > opts.maxTimeout) { - throw new Error('minTimeout is greater than maxTimeout'); - } - - var timeouts = []; - for (var i = 0; i < opts.retries; i++) { - timeouts.push(this.createTimeout(i, opts)); - } - - if (options && options.forever && !timeouts.length) { - timeouts.push(this.createTimeout(i, opts)); - } - - // sort the array numerically ascending - timeouts.sort(function(a,b) { - return a - b; - }); - - return timeouts; -}; - -exports.createTimeout = function(attempt, opts) { - var random = (opts.randomize) - ? (Math.random() + 1) - : 1; - - var timeout = Math.round(random * opts.minTimeout * Math.pow(opts.factor, attempt)); - timeout = Math.min(timeout, opts.maxTimeout); - - return timeout; -}; - -exports.wrap = function(obj, options, methods) { - if (options instanceof Array) { - methods = options; - options = null; - } - - if (!methods) { - methods = []; - for (var key in obj) { - if (typeof obj[key] === 'function') { - methods.push(key); - } - } - } - - for (var i = 0; i < methods.length; i++) { - var method = methods[i]; - var original = obj[method]; - - obj[method] = function retryWrapper() { - var op = exports.operation(options); - var args = Array.prototype.slice.call(arguments); - var callback = args.pop(); - - args.push(function(err) { - if (op.retry(err)) { - return; - } - if (err) { - arguments[0] = op.mainError(); - } - callback.apply(this, arguments); - }); - - op.attempt(function() { - original.apply(obj, args); - }); - }; - obj[method].options = options; - } -}; diff --git a/node_modules/npm-registry-client/node_modules/retry/lib/retry_operation.js b/node_modules/npm-registry-client/node_modules/retry/lib/retry_operation.js deleted file mode 100644 index 2b3db8e177697..0000000000000 --- a/node_modules/npm-registry-client/node_modules/retry/lib/retry_operation.js +++ /dev/null @@ -1,143 +0,0 @@ -function RetryOperation(timeouts, options) { - // Compatibility for the old (timeouts, retryForever) signature - if (typeof options === 'boolean') { - options = { forever: options }; - } - - this._timeouts = timeouts; - this._options = options || {}; - this._fn = null; - this._errors = []; - this._attempts = 1; - this._operationTimeout = null; - this._operationTimeoutCb = null; - this._timeout = null; - - if (this._options.forever) { - this._cachedTimeouts = this._timeouts.slice(0); - } -} -module.exports = RetryOperation; - -RetryOperation.prototype.stop = function() { - if (this._timeout) { - clearTimeout(this._timeout); - } - - this._timeouts = []; - this._cachedTimeouts = null; -}; - -RetryOperation.prototype.retry = function(err) { - if (this._timeout) { - clearTimeout(this._timeout); - } - - if (!err) { - return false; - } - - this._errors.push(err); - - var timeout = this._timeouts.shift(); - if (timeout === undefined) { - if (this._cachedTimeouts) { - // retry forever, only keep last error - this._errors.splice(this._errors.length - 1, this._errors.length); - this._timeouts = this._cachedTimeouts.slice(0); - timeout = this._timeouts.shift(); - } else { - return false; - } - } - - var self = this; - var timer = setTimeout(function() { - self._attempts++; - - if (self._operationTimeoutCb) { - self._timeout = setTimeout(function() { - self._operationTimeoutCb(self._attempts); - }, self._operationTimeout); - - if (this._options.unref) { - self._timeout.unref(); - } - } - - self._fn(self._attempts); - }, timeout); - - if (this._options.unref) { - timer.unref(); - } - - return true; -}; - -RetryOperation.prototype.attempt = function(fn, timeoutOps) { - this._fn = fn; - - if (timeoutOps) { - if (timeoutOps.timeout) { - this._operationTimeout = timeoutOps.timeout; - } - if (timeoutOps.cb) { - this._operationTimeoutCb = timeoutOps.cb; - } - } - - var self = this; - if (this._operationTimeoutCb) { - this._timeout = setTimeout(function() { - self._operationTimeoutCb(); - }, self._operationTimeout); - } - - this._fn(this._attempts); -}; - -RetryOperation.prototype.try = function(fn) { - console.log('Using RetryOperation.try() is deprecated'); - this.attempt(fn); -}; - -RetryOperation.prototype.start = function(fn) { - console.log('Using RetryOperation.start() is deprecated'); - this.attempt(fn); -}; - -RetryOperation.prototype.start = RetryOperation.prototype.try; - -RetryOperation.prototype.errors = function() { - return this._errors; -}; - -RetryOperation.prototype.attempts = function() { - return this._attempts; -}; - -RetryOperation.prototype.mainError = function() { - if (this._errors.length === 0) { - return null; - } - - var counts = {}; - var mainError = null; - var mainErrorCount = 0; - - for (var i = 0; i < this._errors.length; i++) { - var error = this._errors[i]; - var message = error.message; - var count = (counts[message] || 0) + 1; - - counts[message] = count; - - if (count >= mainErrorCount) { - mainError = error; - mainErrorCount = count; - } - } - - return mainError; -}; diff --git a/node_modules/npm-registry-client/node_modules/retry/package.json b/node_modules/npm-registry-client/node_modules/retry/package.json deleted file mode 100644 index 26f1daa8ca136..0000000000000 --- a/node_modules/npm-registry-client/node_modules/retry/package.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "_from": "retry@^0.10.0", - "_id": "retry@0.10.1", - "_inBundle": false, - "_integrity": "sha1-52OI0heZLCUnUCQdPTlW/tmNj/Q=", - "_location": "/npm-registry-client/retry", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "retry@^0.10.0", - "name": "retry", - "escapedName": "retry", - "rawSpec": "^0.10.0", - "saveSpec": null, - "fetchSpec": "^0.10.0" - }, - "_requiredBy": [ - "/npm-registry-client" - ], - "_resolved": "https://registry.npmjs.org/retry/-/retry-0.10.1.tgz", - "_shasum": "e76388d217992c252750241d3d3956fed98d8ff4", - "_spec": "retry@^0.10.0", - "_where": "/Users/rebecca/code/npm/node_modules/npm-registry-client", - "author": { - "name": "Tim Koschützki", - "email": "tim@debuggable.com", - "url": "http://debuggable.com/" - }, - "bugs": { - "url": "https://github.com/tim-kos/node-retry/issues" - }, - "bundleDependencies": false, - "dependencies": {}, - "deprecated": false, - "description": "Abstraction for exponential and custom retry strategies for failed operations.", - "devDependencies": { - "fake": "0.2.0", - "far": "0.0.1" - }, - "directories": { - "lib": "./lib" - }, - "engines": { - "node": "*" - }, - "homepage": "https://github.com/tim-kos/node-retry", - "license": "MIT", - "main": "index", - "name": "retry", - "repository": { - "type": "git", - "url": "git://github.com/tim-kos/node-retry.git" - }, - "version": "0.10.1" -} diff --git a/node_modules/npm-registry-client/node_modules/retry/test/common.js b/node_modules/npm-registry-client/node_modules/retry/test/common.js deleted file mode 100644 index 224720696ebac..0000000000000 --- a/node_modules/npm-registry-client/node_modules/retry/test/common.js +++ /dev/null @@ -1,10 +0,0 @@ -var common = module.exports; -var path = require('path'); - -var rootDir = path.join(__dirname, '..'); -common.dir = { - lib: rootDir + '/lib' -}; - -common.assert = require('assert'); -common.fake = require('fake'); \ No newline at end of file diff --git a/node_modules/npm-registry-client/node_modules/retry/test/integration/test-forever.js b/node_modules/npm-registry-client/node_modules/retry/test/integration/test-forever.js deleted file mode 100644 index b41307cb529f1..0000000000000 --- a/node_modules/npm-registry-client/node_modules/retry/test/integration/test-forever.js +++ /dev/null @@ -1,24 +0,0 @@ -var common = require('../common'); -var assert = common.assert; -var retry = require(common.dir.lib + '/retry'); - -(function testForeverUsesFirstTimeout() { - var operation = retry.operation({ - retries: 0, - minTimeout: 100, - maxTimeout: 100, - forever: true - }); - - operation.attempt(function(numAttempt) { - console.log('>numAttempt', numAttempt); - var err = new Error("foo"); - if (numAttempt == 10) { - operation.stop(); - } - - if (operation.retry(err)) { - return; - } - }); -})(); diff --git a/node_modules/npm-registry-client/node_modules/retry/test/integration/test-retry-operation.js b/node_modules/npm-registry-client/node_modules/retry/test/integration/test-retry-operation.js deleted file mode 100644 index 916936424f073..0000000000000 --- a/node_modules/npm-registry-client/node_modules/retry/test/integration/test-retry-operation.js +++ /dev/null @@ -1,176 +0,0 @@ -var common = require('../common'); -var assert = common.assert; -var fake = common.fake.create(); -var retry = require(common.dir.lib + '/retry'); - -(function testErrors() { - var operation = retry.operation(); - - var error = new Error('some error'); - var error2 = new Error('some other error'); - operation._errors.push(error); - operation._errors.push(error2); - - assert.deepEqual(operation.errors(), [error, error2]); -})(); - -(function testMainErrorReturnsMostFrequentError() { - var operation = retry.operation(); - var error = new Error('some error'); - var error2 = new Error('some other error'); - - operation._errors.push(error); - operation._errors.push(error2); - operation._errors.push(error); - - assert.strictEqual(operation.mainError(), error); -})(); - -(function testMainErrorReturnsLastErrorOnEqualCount() { - var operation = retry.operation(); - var error = new Error('some error'); - var error2 = new Error('some other error'); - - operation._errors.push(error); - operation._errors.push(error2); - - assert.strictEqual(operation.mainError(), error2); -})(); - -(function testAttempt() { - var operation = retry.operation(); - var fn = new Function(); - - var timeoutOpts = { - timeout: 1, - cb: function() {} - }; - operation.attempt(fn, timeoutOpts); - - assert.strictEqual(fn, operation._fn); - assert.strictEqual(timeoutOpts.timeout, operation._operationTimeout); - assert.strictEqual(timeoutOpts.cb, operation._operationTimeoutCb); -})(); - -(function testRetry() { - var times = 3; - var error = new Error('some error'); - var operation = retry.operation([1, 2, 3]); - var attempts = 0; - - var finalCallback = fake.callback('finalCallback'); - fake.expectAnytime(finalCallback); - - var fn = function() { - operation.attempt(function(currentAttempt) { - attempts++; - assert.equal(currentAttempt, attempts); - if (operation.retry(error)) { - return; - } - - assert.strictEqual(attempts, 4); - assert.strictEqual(operation.attempts(), attempts); - assert.strictEqual(operation.mainError(), error); - finalCallback(); - }); - }; - - fn(); -})(); - -(function testRetryForever() { - var error = new Error('some error'); - var operation = retry.operation({ retries: 3, forever: true }); - var attempts = 0; - - var finalCallback = fake.callback('finalCallback'); - fake.expectAnytime(finalCallback); - - var fn = function() { - operation.attempt(function(currentAttempt) { - attempts++; - assert.equal(currentAttempt, attempts); - if (attempts !== 6 && operation.retry(error)) { - return; - } - - assert.strictEqual(attempts, 6); - assert.strictEqual(operation.attempts(), attempts); - assert.strictEqual(operation.mainError(), error); - finalCallback(); - }); - }; - - fn(); -})(); - -(function testRetryForeverNoRetries() { - var error = new Error('some error'); - var delay = 50 - var operation = retry.operation({ - retries: null, - forever: true, - minTimeout: delay, - maxTimeout: delay - }); - - var attempts = 0; - var startTime = new Date().getTime(); - - var finalCallback = fake.callback('finalCallback'); - fake.expectAnytime(finalCallback); - - var fn = function() { - operation.attempt(function(currentAttempt) { - attempts++; - assert.equal(currentAttempt, attempts); - if (attempts !== 4 && operation.retry(error)) { - return; - } - - var endTime = new Date().getTime(); - var minTime = startTime + (delay * 3); - var maxTime = minTime + 20 // add a little headroom for code execution time - assert(endTime > minTime) - assert(endTime < maxTime) - assert.strictEqual(attempts, 4); - assert.strictEqual(operation.attempts(), attempts); - assert.strictEqual(operation.mainError(), error); - finalCallback(); - }); - }; - - fn(); -})(); - -(function testStop() { - var error = new Error('some error'); - var operation = retry.operation([1, 2, 3]); - var attempts = 0; - - var finalCallback = fake.callback('finalCallback'); - fake.expectAnytime(finalCallback); - - var fn = function() { - operation.attempt(function(currentAttempt) { - attempts++; - assert.equal(currentAttempt, attempts); - - if (attempts === 2) { - operation.stop(); - - assert.strictEqual(attempts, 2); - assert.strictEqual(operation.attempts(), attempts); - assert.strictEqual(operation.mainError(), error); - finalCallback(); - } - - if (operation.retry(error)) { - return; - } - }); - }; - - fn(); -})(); diff --git a/node_modules/npm-registry-client/node_modules/retry/test/integration/test-retry-wrap.js b/node_modules/npm-registry-client/node_modules/retry/test/integration/test-retry-wrap.js deleted file mode 100644 index 7ca8bc7eb596b..0000000000000 --- a/node_modules/npm-registry-client/node_modules/retry/test/integration/test-retry-wrap.js +++ /dev/null @@ -1,77 +0,0 @@ -var common = require('../common'); -var assert = common.assert; -var fake = common.fake.create(); -var retry = require(common.dir.lib + '/retry'); - -function getLib() { - return { - fn1: function() {}, - fn2: function() {}, - fn3: function() {} - }; -} - -(function wrapAll() { - var lib = getLib(); - retry.wrap(lib); - assert.equal(lib.fn1.name, 'retryWrapper'); - assert.equal(lib.fn2.name, 'retryWrapper'); - assert.equal(lib.fn3.name, 'retryWrapper'); -}()); - -(function wrapAllPassOptions() { - var lib = getLib(); - retry.wrap(lib, {retries: 2}); - assert.equal(lib.fn1.name, 'retryWrapper'); - assert.equal(lib.fn2.name, 'retryWrapper'); - assert.equal(lib.fn3.name, 'retryWrapper'); - assert.equal(lib.fn1.options.retries, 2); - assert.equal(lib.fn2.options.retries, 2); - assert.equal(lib.fn3.options.retries, 2); -}()); - -(function wrapDefined() { - var lib = getLib(); - retry.wrap(lib, ['fn2', 'fn3']); - assert.notEqual(lib.fn1.name, 'retryWrapper'); - assert.equal(lib.fn2.name, 'retryWrapper'); - assert.equal(lib.fn3.name, 'retryWrapper'); -}()); - -(function wrapDefinedAndPassOptions() { - var lib = getLib(); - retry.wrap(lib, {retries: 2}, ['fn2', 'fn3']); - assert.notEqual(lib.fn1.name, 'retryWrapper'); - assert.equal(lib.fn2.name, 'retryWrapper'); - assert.equal(lib.fn3.name, 'retryWrapper'); - assert.equal(lib.fn2.options.retries, 2); - assert.equal(lib.fn3.options.retries, 2); -}()); - -(function runWrappedWithoutError() { - var callbackCalled; - var lib = {method: function(a, b, callback) { - assert.equal(a, 1); - assert.equal(b, 2); - assert.equal(typeof callback, 'function'); - callback(); - }}; - retry.wrap(lib); - lib.method(1, 2, function() { - callbackCalled = true; - }); - assert.ok(callbackCalled); -}()); - -(function runWrappedWithError() { - var callbackCalled; - var lib = {method: function(callback) { - callback(new Error('Some error')); - }}; - retry.wrap(lib, {retries: 1}); - lib.method(function(err) { - callbackCalled = true; - assert.ok(err instanceof Error); - }); - assert.ok(!callbackCalled); -}()); diff --git a/node_modules/npm-registry-client/node_modules/retry/test/integration/test-timeouts.js b/node_modules/npm-registry-client/node_modules/retry/test/integration/test-timeouts.js deleted file mode 100644 index 7206b0fb0b01d..0000000000000 --- a/node_modules/npm-registry-client/node_modules/retry/test/integration/test-timeouts.js +++ /dev/null @@ -1,69 +0,0 @@ -var common = require('../common'); -var assert = common.assert; -var retry = require(common.dir.lib + '/retry'); - -(function testDefaultValues() { - var timeouts = retry.timeouts(); - - assert.equal(timeouts.length, 10); - assert.equal(timeouts[0], 1000); - assert.equal(timeouts[1], 2000); - assert.equal(timeouts[2], 4000); -})(); - -(function testDefaultValuesWithRandomize() { - var minTimeout = 5000; - var timeouts = retry.timeouts({ - minTimeout: minTimeout, - randomize: true - }); - - assert.equal(timeouts.length, 10); - assert.ok(timeouts[0] > minTimeout); - assert.ok(timeouts[1] > timeouts[0]); - assert.ok(timeouts[2] > timeouts[1]); -})(); - -(function testPassedTimeoutsAreUsed() { - var timeoutsArray = [1000, 2000, 3000]; - var timeouts = retry.timeouts(timeoutsArray); - assert.deepEqual(timeouts, timeoutsArray); - assert.notStrictEqual(timeouts, timeoutsArray); -})(); - -(function testTimeoutsAreWithinBoundaries() { - var minTimeout = 1000; - var maxTimeout = 10000; - var timeouts = retry.timeouts({ - minTimeout: minTimeout, - maxTimeout: maxTimeout - }); - for (var i = 0; i < timeouts; i++) { - assert.ok(timeouts[i] >= minTimeout); - assert.ok(timeouts[i] <= maxTimeout); - } -})(); - -(function testTimeoutsAreIncremental() { - var timeouts = retry.timeouts(); - var lastTimeout = timeouts[0]; - for (var i = 0; i < timeouts; i++) { - assert.ok(timeouts[i] > lastTimeout); - lastTimeout = timeouts[i]; - } -})(); - -(function testTimeoutsAreIncrementalForFactorsLessThanOne() { - var timeouts = retry.timeouts({ - retries: 3, - factor: 0.5 - }); - - var expected = [250, 500, 1000]; - assert.deepEqual(expected, timeouts); -})(); - -(function testRetries() { - var timeouts = retry.timeouts({retries: 2}); - assert.strictEqual(timeouts.length, 2); -})(); diff --git a/node_modules/npm-registry-client/node_modules/retry/test/runner.js b/node_modules/npm-registry-client/node_modules/retry/test/runner.js deleted file mode 100644 index e0ee2f570fe3c..0000000000000 --- a/node_modules/npm-registry-client/node_modules/retry/test/runner.js +++ /dev/null @@ -1,5 +0,0 @@ -var far = require('far').create(); - -far.add(__dirname); -far.include(/\/test-.*\.js$/); -far.execute(); diff --git a/node_modules/npm-registry-client/node_modules/ssri/CHANGELOG.md b/node_modules/npm-registry-client/node_modules/ssri/CHANGELOG.md deleted file mode 100644 index 5c068948814ed..0000000000000 --- a/node_modules/npm-registry-client/node_modules/ssri/CHANGELOG.md +++ /dev/null @@ -1,256 +0,0 @@ -# Change Log - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - - -# [5.3.0](https://github.com/zkat/ssri/compare/v5.2.4...v5.3.0) (2018-03-13) - - -### Features - -* **checkData:** optionally throw when checkData fails ([bf26b84](https://github.com/zkat/ssri/commit/bf26b84)) - - - - -## [5.2.4](https://github.com/zkat/ssri/compare/v5.2.3...v5.2.4) (2018-02-16) - - - - -## [5.2.3](https://github.com/zkat/ssri/compare/v5.2.2...v5.2.3) (2018-02-16) - - -### Bug Fixes - -* **hashes:** filter hash priority list by available hashes ([2fa30b8](https://github.com/zkat/ssri/commit/2fa30b8)) -* **integrityStream:** dedupe algorithms to generate ([d56c654](https://github.com/zkat/ssri/commit/d56c654)) - - - - -## [5.2.2](https://github.com/zkat/ssri/compare/v5.2.1...v5.2.2) (2018-02-14) - - -### Bug Fixes - -* **security:** tweak strict SRI regex ([#10](https://github.com/zkat/ssri/issues/10)) ([d0ebcdc](https://github.com/zkat/ssri/commit/d0ebcdc)) - - - - -## [5.2.1](https://github.com/zkat/ssri/compare/v5.2.0...v5.2.1) (2018-02-06) - - - - -# [5.2.0](https://github.com/zkat/ssri/compare/v5.1.0...v5.2.0) (2018-02-06) - - -### Features - -* **match:** add integrity.match() ([3c49cc4](https://github.com/zkat/ssri/commit/3c49cc4)) - - - - -# [5.1.0](https://github.com/zkat/ssri/compare/v5.0.0...v5.1.0) (2018-01-18) - - -### Bug Fixes - -* **checkStream:** integrityStream now takes opts.integrity algos into account ([d262910](https://github.com/zkat/ssri/commit/d262910)) - - -### Features - -* **sha3:** do some guesswork about upcoming sha3 ([7fdd9df](https://github.com/zkat/ssri/commit/7fdd9df)) - - - - -# [5.0.0](https://github.com/zkat/ssri/compare/v4.1.6...v5.0.0) (2017-10-23) - - -### Features - -* **license:** relicense to ISC (#9) ([c82983a](https://github.com/zkat/ssri/commit/c82983a)) - - -### BREAKING CHANGES - -* **license:** the license has been changed from CC0-1.0 to ISC. - - - - -## [4.1.6](https://github.com/zkat/ssri/compare/v4.1.5...v4.1.6) (2017-06-07) - - -### Bug Fixes - -* **checkStream:** make sure to pass all opts through ([0b1bcbe](https://github.com/zkat/ssri/commit/0b1bcbe)) - - - - -## [4.1.5](https://github.com/zkat/ssri/compare/v4.1.4...v4.1.5) (2017-06-05) - - -### Bug Fixes - -* **integrityStream:** stop crashing if opts.algorithms and opts.integrity have an algo mismatch ([fb1293e](https://github.com/zkat/ssri/commit/fb1293e)) - - - - -## [4.1.4](https://github.com/zkat/ssri/compare/v4.1.3...v4.1.4) (2017-05-31) - - -### Bug Fixes - -* **node:** older versions of node[@4](https://github.com/4) do not support base64buffer string parsing ([513df4e](https://github.com/zkat/ssri/commit/513df4e)) - - - - -## [4.1.3](https://github.com/zkat/ssri/compare/v4.1.2...v4.1.3) (2017-05-24) - - -### Bug Fixes - -* **check:** handle various bad hash corner cases better ([c2c262b](https://github.com/zkat/ssri/commit/c2c262b)) - - - - -## [4.1.2](https://github.com/zkat/ssri/compare/v4.1.1...v4.1.2) (2017-04-18) - - -### Bug Fixes - -* **stream:** _flush can be called multiple times. use on("end") ([b1c4805](https://github.com/zkat/ssri/commit/b1c4805)) - - - - -## [4.1.1](https://github.com/zkat/ssri/compare/v4.1.0...v4.1.1) (2017-04-12) - - -### Bug Fixes - -* **pickAlgorithm:** error if pickAlgorithm() is used in an empty Integrity ([fab470e](https://github.com/zkat/ssri/commit/fab470e)) - - - - -# [4.1.0](https://github.com/zkat/ssri/compare/v4.0.0...v4.1.0) (2017-04-07) - - -### Features - -* adding ssri.create for a crypto style interface (#2) ([96f52ad](https://github.com/zkat/ssri/commit/96f52ad)) - - - - -# [4.0.0](https://github.com/zkat/ssri/compare/v3.0.2...v4.0.0) (2017-04-03) - - -### Bug Fixes - -* **integrity:** should have changed the error code before. oops ([8381afa](https://github.com/zkat/ssri/commit/8381afa)) - - -### BREAKING CHANGES - -* **integrity:** EBADCHECKSUM -> EINTEGRITY for verification errors - - - - -## [3.0.2](https://github.com/zkat/ssri/compare/v3.0.1...v3.0.2) (2017-04-03) - - - - -## [3.0.1](https://github.com/zkat/ssri/compare/v3.0.0...v3.0.1) (2017-04-03) - - -### Bug Fixes - -* **package.json:** really should have these in the keywords because search ([a6ac6d0](https://github.com/zkat/ssri/commit/a6ac6d0)) - - - - -# [3.0.0](https://github.com/zkat/ssri/compare/v2.0.0...v3.0.0) (2017-04-03) - - -### Bug Fixes - -* **hashes:** IntegrityMetadata -> Hash ([d04aa1f](https://github.com/zkat/ssri/commit/d04aa1f)) - - -### Features - -* **check:** return IntegrityMetadata on check success ([2301e74](https://github.com/zkat/ssri/commit/2301e74)) -* **fromHex:** ssri.fromHex to make it easier to generate them from hex valus ([049b89e](https://github.com/zkat/ssri/commit/049b89e)) -* **hex:** utility function for getting hex version of digest ([a9f021c](https://github.com/zkat/ssri/commit/a9f021c)) -* **hexDigest:** added hexDigest method to Integrity objects too ([85208ba](https://github.com/zkat/ssri/commit/85208ba)) -* **integrity:** add .isIntegrity and .isIntegrityMetadata ([1b29e6f](https://github.com/zkat/ssri/commit/1b29e6f)) -* **integrityStream:** new stream that can both generate and check streamed data ([fd23e1b](https://github.com/zkat/ssri/commit/fd23e1b)) -* **parse:** allow parsing straight into a single IntegrityMetadata object ([c8ddf48](https://github.com/zkat/ssri/commit/c8ddf48)) -* **pickAlgorithm:** Intergrity#pickAlgorithm() added ([b97a796](https://github.com/zkat/ssri/commit/b97a796)) -* **size:** calculate and update stream sizes ([02ed1ad](https://github.com/zkat/ssri/commit/02ed1ad)) - - -### BREAKING CHANGES - -* **hashes:** `.isIntegrityMetadata` is now `.isHash`. Also, any references to `IntegrityMetadata` now refer to `Hash`. -* **integrityStream:** createCheckerStream has been removed and replaced with a general-purpose integrityStream. - -To convert existing createCheckerStream code, move the `sri` argument into `opts.integrity` in integrityStream. All other options should be the same. -* **check:** `checkData`, `checkStream`, and `createCheckerStream` now yield a whole IntegrityMetadata instance representing the first successful hash match. - - - - -# [2.0.0](https://github.com/zkat/ssri/compare/v1.0.0...v2.0.0) (2017-03-24) - - -### Bug Fixes - -* **strict-mode:** make regexes more rigid ([122a32c](https://github.com/zkat/ssri/commit/122a32c)) - - -### Features - -* **api:** added serialize alias for unparse ([999b421](https://github.com/zkat/ssri/commit/999b421)) -* **concat:** add Integrity#concat() ([cae12c7](https://github.com/zkat/ssri/commit/cae12c7)) -* **pickAlgo:** pick the strongest algorithm provided, by default ([58c18f7](https://github.com/zkat/ssri/commit/58c18f7)) -* **strict-mode:** strict SRI support ([3f0b64c](https://github.com/zkat/ssri/commit/3f0b64c)) -* **stringify:** replaced unparse/serialize with stringify ([4acad30](https://github.com/zkat/ssri/commit/4acad30)) -* **verification:** add opts.pickAlgorithm ([f72e658](https://github.com/zkat/ssri/commit/f72e658)) - - -### BREAKING CHANGES - -* **pickAlgo:** ssri will prioritize specific hashes now -* **stringify:** serialize and unparse have been removed. Use ssri.stringify instead. -* **strict-mode:** functions that accepted an optional `sep` argument now expect `opts.sep`. - - - - -# 1.0.0 (2017-03-23) - - -### Features - -* **api:** implemented initial api ([4fbb16b](https://github.com/zkat/ssri/commit/4fbb16b)) - - -### BREAKING CHANGES - -* **api:** Initial API established. diff --git a/node_modules/npm-registry-client/node_modules/ssri/LICENSE.md b/node_modules/npm-registry-client/node_modules/ssri/LICENSE.md deleted file mode 100644 index 8d28acf866d93..0000000000000 --- a/node_modules/npm-registry-client/node_modules/ssri/LICENSE.md +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-registry-client/node_modules/ssri/README.md b/node_modules/npm-registry-client/node_modules/ssri/README.md deleted file mode 100644 index a6c07e7409b81..0000000000000 --- a/node_modules/npm-registry-client/node_modules/ssri/README.md +++ /dev/null @@ -1,488 +0,0 @@ -# ssri [![npm version](https://img.shields.io/npm/v/ssri.svg)](https://npm.im/ssri) [![license](https://img.shields.io/npm/l/ssri.svg)](https://npm.im/ssri) [![Travis](https://img.shields.io/travis/zkat/ssri.svg)](https://travis-ci.org/zkat/ssri) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/ssri?svg=true)](https://ci.appveyor.com/project/zkat/ssri) [![Coverage Status](https://coveralls.io/repos/github/zkat/ssri/badge.svg?branch=latest)](https://coveralls.io/github/zkat/ssri?branch=latest) - -[`ssri`](https://github.com/zkat/ssri), short for Standard Subresource -Integrity, is a Node.js utility for parsing, manipulating, serializing, -generating, and verifying [Subresource -Integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) hashes. - -## Install - -`$ npm install --save ssri` - -## Table of Contents - -* [Example](#example) -* [Features](#features) -* [Contributing](#contributing) -* [API](#api) - * Parsing & Serializing - * [`parse`](#parse) - * [`stringify`](#stringify) - * [`Integrity#concat`](#integrity-concat) - * [`Integrity#toString`](#integrity-to-string) - * [`Integrity#toJSON`](#integrity-to-json) - * [`Integrity#match`](#integrity-match) - * [`Integrity#pickAlgorithm`](#integrity-pick-algorithm) - * [`Integrity#hexDigest`](#integrity-hex-digest) - * Integrity Generation - * [`fromHex`](#from-hex) - * [`fromData`](#from-data) - * [`fromStream`](#from-stream) - * [`create`](#create) - * Integrity Verification - * [`checkData`](#check-data) - * [`checkStream`](#check-stream) - * [`integrityStream`](#integrity-stream) - -### Example - -```javascript -const ssri = require('ssri') - -const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo' - -// Parsing and serializing -const parsed = ssri.parse(integrity) -ssri.stringify(parsed) // === integrity (works on non-Integrity objects) -parsed.toString() // === integrity - -// Async stream functions -ssri.checkStream(fs.createReadStream('./my-file'), integrity).then(...) -ssri.fromStream(fs.createReadStream('./my-file')).then(sri => { - sri.toString() === integrity -}) -fs.createReadStream('./my-file').pipe(ssri.createCheckerStream(sri)) - -// Sync data functions -ssri.fromData(fs.readFileSync('./my-file')) // === parsed -ssri.checkData(fs.readFileSync('./my-file'), integrity) // => 'sha512' -``` - -### Features - -* Parses and stringifies SRI strings. -* Generates SRI strings from raw data or Streams. -* Strict standard compliance. -* `?foo` metadata option support. -* Multiple entries for the same algorithm. -* Object-based integrity hash manipulation. -* Small footprint: no dependencies, concise implementation. -* Full test coverage. -* Customizable algorithm picker. - -### Contributing - -The ssri team enthusiastically welcomes contributions and project participation! -There's a bunch of things you can do if you want to contribute! The [Contributor -Guide](CONTRIBUTING.md) has all the information you need for everything from -reporting bugs to contributing entire new features. Please don't hesitate to -jump in if you'd like to, or even ask us questions if something isn't clear. - -### API - -#### `> ssri.parse(sri, [opts]) -> Integrity` - -Parses `sri` into an `Integrity` data structure. `sri` can be an integrity -string, an `Hash`-like with `digest` and `algorithm` fields and an optional -`options` field, or an `Integrity`-like object. The resulting object will be an -`Integrity` instance that has this shape: - -```javascript -{ - 'sha1': [{algorithm: 'sha1', digest: 'deadbeef', options: []}], - 'sha512': [ - {algorithm: 'sha512', digest: 'c0ffee', options: []}, - {algorithm: 'sha512', digest: 'bad1dea', options: ['foo']} - ], -} -``` - -If `opts.single` is truthy, a single `Hash` object will be returned. That is, a -single object that looks like `{algorithm, digest, options}`, as opposed to a -larger object with multiple of these. - -If `opts.strict` is truthy, the resulting object will be filtered such that -it strictly follows the Subresource Integrity spec, throwing away any entries -with any invalid components. This also means a restricted set of algorithms -will be used -- the spec limits them to `sha256`, `sha384`, and `sha512`. - -Strict mode is recommended if the integrity strings are intended for use in -browsers, or in other situations where strict adherence to the spec is needed. - -##### Example - -```javascript -ssri.parse('sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo') // -> Integrity object -``` - -#### `> ssri.stringify(sri, [opts]) -> String` - -This function is identical to [`Integrity#toString()`](#integrity-to-string), -except it can be used on _any_ object that [`parse`](#parse) can handle -- that -is, a string, an `Hash`-like, or an `Integrity`-like. - -The `opts.sep` option defines the string to use when joining multiple entries -together. To be spec-compliant, this _must_ be whitespace. The default is a -single space (`' '`). - -If `opts.strict` is true, the integrity string will be created using strict -parsing rules. See [`ssri.parse`](#parse). - -##### Example - -```javascript -// Useful for cleaning up input SRI strings: -ssri.stringify('\n\rsha512-foo\n\t\tsha384-bar') -// -> 'sha512-foo sha384-bar' - -// Hash-like: only a single entry. -ssri.stringify({ - algorithm: 'sha512', - digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==', - options: ['foo'] -}) -// -> -// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo' - -// Integrity-like: full multi-entry syntax. Similar to output of `ssri.parse` -ssri.stringify({ - 'sha512': [ - { - algorithm: 'sha512', - digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==', - options: ['foo'] - } - ] -}) -// -> -// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo' -``` - -#### `> Integrity#concat(otherIntegrity, [opts]) -> Integrity` - -Concatenates an `Integrity` object with another IntegrityLike, or an integrity -string. - -This is functionally equivalent to concatenating the string format of both -integrity arguments, and calling [`ssri.parse`](#ssri-parse) on the new string. - -If `opts.strict` is true, the new `Integrity` will be created using strict -parsing rules. See [`ssri.parse`](#parse). - -##### Example - -```javascript -// This will combine the integrity checks for two different versions of -// your index.js file so you can use a single integrity string and serve -// either of these to clients, from a single ` - -``` - -For version 3 uuids: - -```html - - -``` - -For version 4 uuids: - -```html - - -``` - -For version 5 uuids: - -```html - - -``` - -## API - -### Version 1 - -```javascript -const uuidv1 = require('uuid/v1'); - -// Incantations -uuidv1(); -uuidv1(options); -uuidv1(options, buffer, offset); -``` - -Generate and return a RFC4122 v1 (timestamp-based) UUID. - -* `options` - (Object) Optional uuid state to apply. Properties may include: - - * `node` - (Array) Node id as Array of 6 bytes (per 4.1.6). Default: Randomly generated ID. See note 1. - * `clockseq` - (Number between 0 - 0x3fff) RFC clock sequence. Default: An internally maintained clockseq is used. - * `msecs` - (Number) Time in milliseconds since unix Epoch. Default: The current time is used. - * `nsecs` - (Number between 0-9999) additional time, in 100-nanosecond units. Ignored if `msecs` is unspecified. Default: internal uuid counter is used, as per 4.2.1.2. - -* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. -* `offset` - (Number) Starting index in `buffer` at which to begin writing. - -Returns `buffer`, if specified, otherwise the string form of the UUID - -Note: The id is generated guaranteed to stay constant for the lifetime of the current JS runtime. (Future versions of this module may use persistent storage mechanisms to extend this guarantee.) - -Example: Generate string UUID with fully-specified options - -```javascript --run v1 -const v1options = { - node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], - clockseq: 0x1234, - msecs: new Date('2011-11-01').getTime(), - nsecs: 5678 -}; -uuidv1(v1options); // RESULT -``` - -Example: In-place generation of two binary IDs - -```javascript --run v1 -// Generate two ids in an array -const arr = new Array(); -uuidv1(null, arr, 0); // RESULT -uuidv1(null, arr, 16); // RESULT -``` - -### Version 3 - -```javascript -const uuidv3 = require('uuid/v3'); - -// Incantations -uuidv3(name, namespace); -uuidv3(name, namespace, buffer); -uuidv3(name, namespace, buffer, offset); -``` - -Generate and return a RFC4122 v3 UUID. - -* `name` - (String | Array[]) "name" to create UUID with -* `namespace` - (String | Array[]) "namespace" UUID either as a String or Array[16] of byte values -* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. -* `offset` - (Number) Starting index in `buffer` at which to begin writing. Default = 0 - -Returns `buffer`, if specified, otherwise the string form of the UUID - -Example: - -```javascript --run v3 -uuidv3('hello world', MY_NAMESPACE); // RESULT -``` - -### Version 4 - -```javascript -const uuidv4 = require('uuid/v4') - -// Incantations -uuidv4(); -uuidv4(options); -uuidv4(options, buffer, offset); -``` - -Generate and return a RFC4122 v4 UUID. - -* `options` - (Object) Optional uuid state to apply. Properties may include: - * `random` - (Number[16]) Array of 16 numbers (0-255) to use in place of randomly generated values - * `rng` - (Function) Random # generator function that returns an Array[16] of byte values (0-255) -* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. -* `offset` - (Number) Starting index in `buffer` at which to begin writing. - -Returns `buffer`, if specified, otherwise the string form of the UUID - -Example: Generate string UUID with predefined `random` values - -```javascript --run v4 -const v4options = { - random: [ - 0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea, - 0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36 - ] -}; -uuidv4(v4options); // RESULT -``` - -Example: Generate two IDs in a single buffer - -```javascript --run v4 -const buffer = new Array(); -uuidv4(null, buffer, 0); // RESULT -uuidv4(null, buffer, 16); // RESULT -``` - -### Version 5 - -```javascript -const uuidv5 = require('uuid/v5'); - -// Incantations -uuidv5(name, namespace); -uuidv5(name, namespace, buffer); -uuidv5(name, namespace, buffer, offset); -``` - -Generate and return a RFC4122 v5 UUID. - -* `name` - (String | Array[]) "name" to create UUID with -* `namespace` - (String | Array[]) "namespace" UUID either as a String or Array[16] of byte values -* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. -* `offset` - (Number) Starting index in `buffer` at which to begin writing. Default = 0 - -Returns `buffer`, if specified, otherwise the string form of the UUID - -Example: - -```javascript --run v5 -uuidv5('hello world', MY_NAMESPACE); // RESULT -``` - -## Command Line - -UUIDs can be generated from the command line with the `uuid` command. - -```shell -$ uuid -ddeb27fb-d9a0-4624-be4d-4615062daed4 - -$ uuid v1 -02d37060-d446-11e7-a9fa-7bdae751ebe1 -``` - -Type `uuid --help` for usage details - -## Testing - -```shell -npm test -``` diff --git a/node_modules/uuid/package.json b/node_modules/uuid/package.json index 81ea5a2682f68..2ffe04e3d7ea5 100644 --- a/node_modules/uuid/package.json +++ b/node_modules/uuid/package.json @@ -1,29 +1,30 @@ { - "_from": "uuid@3.3.2", - "_id": "uuid@3.3.2", + "_from": "uuid@3.3.3", + "_id": "uuid@3.3.3", "_inBundle": false, - "_integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==", + "_integrity": "sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ==", "_location": "/uuid", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "uuid@3.3.2", + "raw": "uuid@3.3.3", "name": "uuid", "escapedName": "uuid", - "rawSpec": "3.3.2", + "rawSpec": "3.3.3", "saveSpec": null, - "fetchSpec": "3.3.2" + "fetchSpec": "3.3.3" }, "_requiredBy": [ "#USER", "/", + "/nyc", "/request" ], - "_resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "_shasum": "1b4af4955eb3077c501c23872fc6513811587131", - "_spec": "uuid@3.3.2", - "_where": "/Users/zkat/Documents/code/work/npm", + "_resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.3.tgz", + "_shasum": "4568f0216e78760ee1dbf3a4d2cf53e224112866", + "_spec": "uuid@3.3.3", + "_where": "/Users/mperrotte/npminc/cli", "bin": { "uuid": "./bin/uuid" }, @@ -66,13 +67,13 @@ "deprecated": false, "description": "RFC4122 (v1, v4, and v5) UUIDs", "devDependencies": { - "@commitlint/cli": "7.0.0", - "@commitlint/config-conventional": "7.0.1", - "eslint": "4.19.1", - "husky": "0.14.3", - "mocha": "5.2.0", - "runmd": "1.0.1", - "standard-version": "4.4.0" + "@commitlint/cli": "8.1.0", + "@commitlint/config-conventional": "8.1.0", + "eslint": "6.2.0", + "husky": "3.0.4", + "mocha": "6.2.0", + "runmd": "1.2.1", + "standard-version": "7.0.0" }, "homepage": "https://github.com/kelektiv/node-uuid#readme", "keywords": [ @@ -87,11 +88,11 @@ "url": "git+https://github.com/kelektiv/node-uuid.git" }, "scripts": { - "commitmsg": "commitlint -E GIT_PARAMS", + "commitmsg": "commitlint -E HUSKY_GIT_PARAMS", "md": "runmd --watch --output=README.md README_js.md", "prepare": "runmd --output=README.md README_js.md", "release": "standard-version", "test": "mocha test/test.js" }, - "version": "3.3.2" + "version": "3.3.3" } diff --git a/node_modules/worker-farm/.travis.yml b/node_modules/worker-farm/.travis.yml index 1c9e2b559d5da..7af56429c010b 100644 --- a/node_modules/worker-farm/.travis.yml +++ b/node_modules/worker-farm/.travis.yml @@ -1,9 +1,9 @@ language: node_js node_js: - - 4 - 6 - 8 - - 9 + - 10 + - 12 branches: only: - master diff --git a/node_modules/worker-farm/README.md b/node_modules/worker-farm/README.md index 982b37cb5dfb9..c52689ed9fc58 100644 --- a/node_modules/worker-farm/README.md +++ b/node_modules/worker-farm/README.md @@ -1,4 +1,4 @@ -# Worker Farm [![Build Status](https://secure.travis-ci.org/rvagg/node-worker-farm.png)](http://travis-ci.org/rvagg/node-worker-farm) +# Worker Farm [![Build Status](https://secure.travis-ci.org/rvagg/node-worker-farm.svg)](http://travis-ci.org/rvagg/node-worker-farm) [![NPM](https://nodei.co/npm/worker-farm.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/worker-farm/) [![NPM](https://nodei.co/npm-dl/worker-farm.png?months=6&height=3)](https://nodei.co/npm/worker-farm/) @@ -111,6 +111,7 @@ If you don't provide an `options` object then the following defaults will be use , maxCallTime : Infinity , maxRetries : Infinity , autoStart : false + , onChild : function() {} } ``` @@ -130,6 +131,8 @@ If you don't provide an `options` object then the following defaults will be use * **autoStart** when set to `true` will start the workers as early as possible. Use this when your workers have to do expensive initialization. That way they'll be ready when the first request comes through. + * **onChild** when new child process starts this callback will be called with subprocess object as an argument. Use this when you need to add some custom communication with child processes. + ### workerFarm.end(farm) Child processes stay alive waiting for jobs indefinitely and your farm manager will stay alive managing its workers, so if you need it to stop then you have to do so explicitly. If you send your farm API to `workerFarm.end()` then it'll cleanly end your worker processes. Note though that it's a *soft* ending so it'll wait for child processes to finish what they are working on before asking them to die. diff --git a/node_modules/worker-farm/index.d.ts b/node_modules/worker-farm/index.d.ts index 682c21f410c51..310e91503fda4 100644 --- a/node_modules/worker-farm/index.d.ts +++ b/node_modules/worker-farm/index.d.ts @@ -1,17 +1,15 @@ -interface Workers { - (callback: WorkerCallback): void; - (arg1: any, callback: WorkerCallback): void; - (arg1: any, arg2: any, callback: WorkerCallback): void; - (arg1: any, arg2: any, arg3: any, callback: WorkerCallback): void; - (arg1: any, arg2: any, arg3: any, arg4: any, callback: WorkerCallback): void; -} +import { ForkOptions } from "child_process"; + +export = Farm; -type WorkerCallback = - | WorkerCallback0 - | WorkerCallback1 - | WorkerCallback2 - | WorkerCallback3 - | WorkerCallback4; +declare function Farm(name: string): Farm.Workers; +declare function Farm(name: string, exportedMethods: string[]): Farm.Workers; +declare function Farm(options: Farm.FarmOptions, name: string): Farm.Workers; +declare function Farm( + options: Farm.FarmOptions, + name: string, + exportedMethods: string[], +): Farm.Workers; type WorkerCallback0 = () => void; type WorkerCallback1 = (arg1: any) => void; @@ -19,26 +17,39 @@ type WorkerCallback2 = (arg1: any, arg2: any) => void; type WorkerCallback3 = (arg1: any, arg2: any, arg3: any) => void; type WorkerCallback4 = (arg1: any, arg2: any, arg3: any, arg4: any) => void; -interface FarmOptions { - maxCallsPerWorker?: number - maxConcurrentWorkers?: number - maxConcurrentCallsPerWorker?: number - maxConcurrentCalls?: number - maxCallTime?: number - maxRetries?: number - autoStart?: boolean -} +declare namespace Farm { + export function end(workers: Workers, callback?: Function): void; -interface WorkerFarm { - (name: string): Workers; - (name: string, exportedMethods: string[]): Workers; - (options: FarmOptions, name: string): Workers; - (options: FarmOptions, name: string, exportedMethods: string[]): Workers; + export interface Workers { + [x: string]: Workers, + (callback: WorkerCallback): void; + (arg1: any, callback: WorkerCallback): void; + (arg1: any, arg2: any, callback: WorkerCallback): void; + (arg1: any, arg2: any, arg3: any, callback: WorkerCallback): void; + ( + arg1: any, + arg2: any, + arg3: any, + arg4: any, + callback: WorkerCallback, + ): void; + } - end: (workers: Workers) => void; -} + export interface FarmOptions { + maxCallsPerWorker?: number; + maxConcurrentWorkers?: number; + maxConcurrentCallsPerWorker?: number; + maxConcurrentCalls?: number; + maxCallTime?: number; + maxRetries?: number; + autoStart?: boolean; + workerOptions?: ForkOptions; + } -declare module "worker-farm" { - const workerFarm: WorkerFarm; - export = workerFarm; + export type WorkerCallback = + | WorkerCallback0 + | WorkerCallback1 + | WorkerCallback2 + | WorkerCallback3 + | WorkerCallback4; } diff --git a/node_modules/worker-farm/lib/child/index.js b/node_modules/worker-farm/lib/child/index.js index f91e08433ab1f..78f63371392af 100644 --- a/node_modules/worker-farm/lib/child/index.js +++ b/node_modules/worker-farm/lib/child/index.js @@ -29,7 +29,7 @@ function handle (data) { _args[0][key] = e[key] }) } - process.send({ idx: idx, child: child, args: _args }) + process.send({ owner: 'farm', idx: idx, child: child, args: _args }) } , exec @@ -46,7 +46,11 @@ function handle (data) { process.on('message', function (data) { + if (data.owner !== 'farm') { + return; + } + if (!$module) return $module = require(data.module) - if (data == 'die') return process.exit(0) + if (data.event == 'die') return process.exit(0) handle(data) }) diff --git a/node_modules/worker-farm/lib/farm.js b/node_modules/worker-farm/lib/farm.js index ef0ab0e1052c7..60720dc561f6c 100644 --- a/node_modules/worker-farm/lib/farm.js +++ b/node_modules/worker-farm/lib/farm.js @@ -10,6 +10,7 @@ const DEFAULT_OPTIONS = { , maxRetries : Infinity , forcedKillTime : 100 , autoStart : false + , onChild : function() {} } const fork = require('./fork') @@ -29,8 +30,8 @@ function Farm (options, path) { Farm.prototype.mkhandle = function (method) { return function () { let args = Array.prototype.slice.call(arguments) - if (this.activeCalls >= this.options.maxConcurrentCalls) { - let err = new MaxConcurrentCallsError('Too many concurrent calls (' + this.activeCalls + ')') + if (this.activeCalls + this.callQueue.length >= this.options.maxConcurrentCalls) { + let err = new MaxConcurrentCallsError('Too many concurrent calls (active: ' + this.activeCalls + ', queued: ' + this.callQueue.length + ')') if (typeof args[args.length - 1] == 'function') return process.nextTick(args[args.length - 1].bind(null, err)) throw err @@ -113,7 +114,14 @@ Farm.prototype.startChild = function () { , exitCode : null } - forked.child.on('message', this.receive.bind(this)) + this.options.onChild(forked.child); + + forked.child.on('message', function(data) { + if (data.owner !== 'farm') { + return; + } + this.receive(data); + }.bind(this)) forked.child.once('exit', function (code) { c.exitCode = code this.onExit(id) @@ -128,7 +136,7 @@ Farm.prototype.startChild = function () { Farm.prototype.stopChild = function (childId) { let child = this.children[childId] if (child) { - child.send('die') + child.send({owner: 'farm', event: 'die'}) setTimeout(function () { if (child.exitCode === null) child.child.kill('SIGKILL') @@ -234,7 +242,8 @@ Farm.prototype.send = function (childId, call) { this.activeCalls++ child.send({ - idx : idx + owner : 'farm' + , idx : idx , child : childId , method : call.method , args : call.args diff --git a/node_modules/worker-farm/lib/fork.js b/node_modules/worker-farm/lib/fork.js index 2843df48474c5..5a035d9749e88 100644 --- a/node_modules/worker-farm/lib/fork.js +++ b/node_modules/worker-farm/lib/fork.js @@ -20,7 +20,7 @@ function fork (forkModule, workerOptions) { // this *should* be picked up by onExit and the operation requeued }) - child.send({ module: forkModule }) + child.send({ owner: 'farm', module: forkModule }) // return a send() function for this child return { diff --git a/node_modules/worker-farm/lib/index.js b/node_modules/worker-farm/lib/index.js index fe574e59b52f7..8c142227355a7 100644 --- a/node_modules/worker-farm/lib/index.js +++ b/node_modules/worker-farm/lib/index.js @@ -26,7 +26,7 @@ function end (api, callback) { for (let i = 0; i < farms.length; i++) if (farms[i] && farms[i].api === api) return farms[i].farm.end(callback) - process.nextTick(callback.bind(null, 'Worker farm not found!')) + process.nextTick(callback.bind(null, new Error('Worker farm not found!'))) } diff --git a/node_modules/worker-farm/package.json b/node_modules/worker-farm/package.json index ba43c44d4bea4..3d6181ae95783 100644 --- a/node_modules/worker-farm/package.json +++ b/node_modules/worker-farm/package.json @@ -1,45 +1,43 @@ { - "_args": [ - [ - "worker-farm@1.6.0", - "/Users/rebecca/code/npm" - ] - ], - "_from": "worker-farm@1.6.0", - "_id": "worker-farm@1.6.0", + "_from": "worker-farm@1.7.0", + "_id": "worker-farm@1.7.0", "_inBundle": false, - "_integrity": "sha512-6w+3tHbM87WnSWnENBUvA2pxJPLhQUg5LKwUQHq3r+XPhIM+Gh2R5ycbwPCyuGbNg+lPgdcnQUhuC02kJCvffQ==", + "_integrity": "sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw==", "_location": "/worker-farm", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "worker-farm@1.6.0", + "raw": "worker-farm@1.7.0", "name": "worker-farm", "escapedName": "worker-farm", - "rawSpec": "1.6.0", + "rawSpec": "1.7.0", "saveSpec": null, - "fetchSpec": "1.6.0" + "fetchSpec": "1.7.0" }, "_requiredBy": [ + "#USER", "/", "/libcipm" ], - "_resolved": "https://registry.npmjs.org/worker-farm/-/worker-farm-1.6.0.tgz", - "_spec": "1.6.0", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/worker-farm/-/worker-farm-1.7.0.tgz", + "_shasum": "26a94c5391bbca926152002f69b84a4bf772e5a8", + "_spec": "worker-farm@1.7.0", + "_where": "/Users/isaacs/dev/npm/cli", "authors": [ "Rod Vagg @rvagg (https://github.com/rvagg)" ], "bugs": { "url": "https://github.com/rvagg/node-worker-farm/issues" }, + "bundleDependencies": false, "dependencies": { "errno": "~0.1.7" }, + "deprecated": false, "description": "Distribute processing tasks to child processes with an über-simple API and baked-in durability & custom concurrency options.", "devDependencies": { - "tape": "~4.9.0" + "tape": "~4.10.1" }, "homepage": "https://github.com/rvagg/node-worker-farm", "keywords": [ @@ -59,5 +57,5 @@ "test": "node ./tests/" }, "types": "./index.d.ts", - "version": "1.6.0" + "version": "1.7.0" } diff --git a/node_modules/worker-farm/tests/index.js b/node_modules/worker-farm/tests/index.js index ec9deabedda6c..e6be7bb7aabd6 100644 --- a/node_modules/worker-farm/tests/index.js +++ b/node_modules/worker-farm/tests/index.js @@ -51,6 +51,22 @@ tape('simple, exports.fn test', function (t) { }) +tape('on child', function (t) { + t.plan(2) + + let child = workerFarm({ onChild: function(subprocess) { childPid = subprocess.pid } }, childPath) + , childPid = null; + + child(0, function(err, pid) { + t.equal(childPid, pid) + }) + + workerFarm.end(child, function () { + t.ok(true, 'workerFarm ended') + }) +}) + + // use the returned pids to check that we're using a single child process // when maxConcurrentWorkers = 1 tape('single worker', function (t) { @@ -303,10 +319,12 @@ tape('multiple concurrent calls', function (t) { child(defer, function () { if (++cbc == count) { let time = Date.now() - start - // (defer * (count / callsPerWorker + 1)) - if precise it'd be count/callsPerWorker + let min = defer * 1.5 + // (defer * (count / callsPerWorker + 2)) - if precise it'd be count/callsPerWorker // but accounting for IPC and other overhead, we need to give it a bit of extra time, - // hence the +1 - t.ok(time > (defer * 1.5) && time < (defer * (count / callsPerWorker + 1)), 'processed tasks concurrently (' + time + 'ms)') + // hence the +2 + let max = defer * (count / callsPerWorker + 2) + t.ok(time > min && time < max, 'processed tasks concurrently (' + time + ' > ' + min + ' && ' + time + ' < ' + max + ')') workerFarm.end(child, function () { t.ok(true, 'workerFarm ended') }) @@ -474,6 +492,40 @@ tape('test maxConcurrentCalls', function (t) { }) +tape('test maxConcurrentCalls + queue', function (t) { + t.plan(13) + + let child = workerFarm({ maxConcurrentCalls: 4, maxConcurrentWorkers: 2, maxConcurrentCallsPerWorker: 1 }, childPath) + + child(20, function (err) { console.log('ended short1'); t.notOk(err, 'no error, short call 1') }) + child(20, function (err) { console.log('ended short2'); t.notOk(err, 'no error, short call 2') }) + child(300, function (err) { t.notOk(err, 'no error, long call 1') }) + child(300, function (err) { t.notOk(err, 'no error, long call 2') }) + child(20, function (err) { + t.ok(err, 'short call 3 should error') + t.equal(err.type, 'MaxConcurrentCallsError', 'correct error type') + }) + child(20, function (err) { + t.ok(err, 'short call 4 should error') + t.equal(err.type, 'MaxConcurrentCallsError', 'correct error type') + }) + + // cross fingers and hope the two short jobs have ended + setTimeout(function () { + child(20, function (err) { t.notOk(err, 'no error, delayed short call 1') }) + child(20, function (err) { t.notOk(err, 'no error, delayed short call 2') }) + child(20, function (err) { + t.ok(err, 'delayed short call 3 should error') + t.equal(err.type, 'MaxConcurrentCallsError', 'correct error type') + }) + + workerFarm.end(child, function () { + t.ok(true, 'workerFarm ended') + }) + }, 250) +}) + + // this test should not keep the process running! if the test process // doesn't die then the problem is here tape('test timeout kill', function (t) { @@ -529,12 +581,12 @@ tape('custom arguments can be passed to "fork"', function (t) { let cwd = fs.realpathSync(os.tmpdir()) , workerOptions = { cwd : cwd - , execArgv : ['--no-warnings'] + , execArgv : ['--expose-gc'] } , child = workerFarm({ maxConcurrentWorkers: 1, maxRetries: 5, workerOptions: workerOptions}, childPath, ['args']) child.args(function (err, result) { - t.equal(result.execArgv[0], '--no-warnings', 'flags passed (overridden default)') + t.equal(result.execArgv[0], '--expose-gc', 'flags passed (overridden default)') t.equal(result.cwd, cwd, 'correct cwd folder') }) diff --git a/node_modules/write-file-atomic/CHANGELOG.md b/node_modules/write-file-atomic/CHANGELOG.md new file mode 100644 index 0000000000000..920ae2cb4d1fd --- /dev/null +++ b/node_modules/write-file-atomic/CHANGELOG.md @@ -0,0 +1,25 @@ +# 2.4.3 + +* Ignore errors raised by `fs.closeSync` when cleaning up after a write + error. + +# 2.4.2 + +* A pair of patches to fix some fd leaks. We would leak fds with sync use + when errors occured and with async use any time fsync was not in use. (#34) + +# 2.4.1 + +* Fix a bug where `signal-exit` instances would be leaked. This was fixed when addressing #35. + +# 2.4.0 + +## Features + +* Allow chown and mode options to be set to false to disable the defaulting behavior. (#20) +* Support passing encoding strings in options slot for compat with Node.js API. (#31) +* Add support for running inside of worker threads (#37) + +## Fixes + +* Remove unneeded call when returning success (#36) diff --git a/node_modules/write-file-atomic/README.md b/node_modules/write-file-atomic/README.md index af385f3b70c69..ca28e99a2b08e 100644 --- a/node_modules/write-file-atomic/README.md +++ b/node_modules/write-file-atomic/README.md @@ -8,20 +8,21 @@ atomic and allows you set ownership (uid/gid of the file). * filename **String** * data **String** | **Buffer** -* options **Object** - * chown **Object** +* options **Object** | **String** + * chown **Object** default, uid & gid of existing file, if any * uid **Number** * gid **Number** * encoding **String** | **Null** default = 'utf8' * fsync **Boolean** default = true - * mode **Number** default = 438 (aka 0666 in Octal) + * mode **Number** default, from existing file, if any * Promise **Object** default = native Promise object -callback **Function** +* callback **Function** Atomically and asynchronously writes data to a file, replacing the file if it already exists. data can be a string or a buffer. The file is initially named `filename + "." + murmurhex(__filename, process.pid, ++invocations)`. +Note that `require('worker_threads').threadId` is used in addition to `process.pid` if running inside of a worker thread. If writeFile completes successfully then, if passed the **chown** option it will change the ownership of the file. Finally it renames the file back to the filename you specified. If it encounters errors at any of these steps it will attempt to unlink the temporary file and then @@ -29,9 +30,15 @@ pass the error back to the caller. If multiple writes are concurrently issued to the same file, the write operations are put into a queue and serialized in the order they were called, using Promises. Native promises are used by default, but you can inject your own promise-like object with the **Promise** option. Writes to different files are still executed in parallel. If provided, the **chown** option requires both **uid** and **gid** properties or else -you'll get an error. +you'll get an error. If **chown** is not specified it will default to using +the owner of the previous file. To prevent chown from being ran you can +also pass `false`, in which case the file will be created with the current user's credentials. -The **encoding** option is ignored if **data** is a buffer. It defaults to 'utf8'. +If **mode** is not specified, it will default to using the permissions from +an existing file, if any. Expicitly setting this to `false` remove this default, resulting +in a file created with the system default permissions. + +If options is a String, it's assumed to be the **encoding** option. The **encoding** option is ignored if **data** is a buffer. It defaults to 'utf8'. If the **fsync** option is **false**, writeFile will skip the final fsync call. diff --git a/node_modules/write-file-atomic/index.js b/node_modules/write-file-atomic/index.js index 3b5607d154f46..64ae987c011a9 100644 --- a/node_modules/write-file-atomic/index.js +++ b/node_modules/write-file-atomic/index.js @@ -10,11 +10,26 @@ var onExit = require('signal-exit') var path = require('path') var activeFiles = {} +// if we run inside of a worker_thread, `process.pid` is not unique +/* istanbul ignore next */ +var threadId = (function getId () { + try { + var workerThreads = require('worker_threads') + + /// if we are in main thread, this is set to `0` + return workerThreads.threadId + } catch (e) { + // worker_threads are not available, fallback to 0 + return 0 + } +})() + var invocations = 0 function getTmpname (filename) { return filename + '.' + MurmurHash3(__filename) .hash(String(process.pid)) + .hash(String(threadId)) .hash(String(++invocations)) .result() } @@ -28,17 +43,23 @@ function cleanupOnExit (tmpfile) { } function writeFile (filename, data, options, callback) { - if (options instanceof Function) { - callback = options - options = null + if (options) { + if (options instanceof Function) { + callback = options + options = {} + } else if (typeof options === 'string') { + options = { encoding: options } + } + } else { + options = {} } - if (!options) options = {} var Promise = options.Promise || global.Promise var truename var fd var tmpfile - var removeOnExit = cleanupOnExit(() => tmpfile) + /* istanbul ignore next -- The closure only gets called when onExit triggers */ + var removeOnExitHandler = onExit(cleanupOnExit(() => tmpfile)) var absoluteName = path.resolve(filename) new Promise(function serializeSameFile (resolve) { @@ -66,10 +87,10 @@ function writeFile (filename, data, options, callback) { else { options = Object.assign({}, options) - if (!options.mode) { + if (options.mode == null) { options.mode = stats.mode } - if (!options.chown && process.getuid) { + if (options.chown == null && process.getuid) { options.chown = { uid: stats.uid, gid: stats.gid } } resolve() @@ -100,15 +121,18 @@ function writeFile (filename, data, options, callback) { } else resolve() }) }).then(function syncAndClose () { - if (options.fsync !== false) { - return new Promise(function (resolve, reject) { + return new Promise(function (resolve, reject) { + if (options.fsync !== false) { fs.fsync(fd, function (err) { - if (err) reject(err) + if (err) fs.close(fd, () => reject(err)) else fs.close(fd, resolve) }) - }) - } + } else { + fs.close(fd, resolve) + } + }) }).then(function chown () { + fd = null if (options.chown) { return new Promise(function (resolve, reject) { fs.chown(tmpfile, options.chown.uid, options.chown.gid, function (err) { @@ -134,12 +158,16 @@ function writeFile (filename, data, options, callback) { }) }) }).then(function success () { - removeOnExit() + removeOnExitHandler() callback() - }).catch(function fail (err) { - removeOnExit() - fs.unlink(tmpfile, function () { - callback(err) + }, function fail (err) { + return new Promise(resolve => { + return fd ? fs.close(fd, resolve) : resolve() + }).then(() => { + removeOnExitHandler() + fs.unlink(tmpfile, function () { + callback(err) + }) }) }).then(function checkQueue () { activeFiles[absoluteName].shift() // remove the element added by serializeSameFile @@ -150,7 +178,8 @@ function writeFile (filename, data, options, callback) { } function writeFileSync (filename, data, options) { - if (!options) options = {} + if (typeof options === 'string') options = { encoding: options } + else if (!options) options = {} try { filename = fs.realpathSync(filename) } catch (ex) { @@ -158,26 +187,29 @@ function writeFileSync (filename, data, options) { } var tmpfile = getTmpname(filename) - try { - if (!options.mode || !options.chown) { - // Either mode or chown is not explicitly set - // Default behavior is to copy it from original file - try { - var stats = fs.statSync(filename) - options = Object.assign({}, options) - if (!options.mode) { - options.mode = stats.mode - } - if (!options.chown && process.getuid) { - options.chown = { uid: stats.uid, gid: stats.gid } - } - } catch (ex) { - // ignore stat errors + if (!options.mode || !options.chown) { + // Either mode or chown is not explicitly set + // Default behavior is to copy it from original file + try { + var stats = fs.statSync(filename) + options = Object.assign({}, options) + if (!options.mode) { + options.mode = stats.mode } + if (!options.chown && process.getuid) { + options.chown = { uid: stats.uid, gid: stats.gid } + } + } catch (ex) { + // ignore stat errors } + } - var removeOnExit = onExit(cleanupOnExit(tmpfile)) - var fd = fs.openSync(tmpfile, 'w', options.mode) + var fd + var cleanup = cleanupOnExit(tmpfile) + var removeOnExitHandler = onExit(cleanup) + + try { + fd = fs.openSync(tmpfile, 'w', options.mode) if (Buffer.isBuffer(data)) { fs.writeSync(fd, data, 0, data.length, 0) } else if (data != null) { @@ -190,10 +222,17 @@ function writeFileSync (filename, data, options) { if (options.chown) fs.chownSync(tmpfile, options.chown.uid, options.chown.gid) if (options.mode) fs.chmodSync(tmpfile, options.mode) fs.renameSync(tmpfile, filename) - removeOnExit() + removeOnExitHandler() } catch (err) { - removeOnExit() - try { fs.unlinkSync(tmpfile) } catch (e) {} + if (fd) { + try { + fs.closeSync(fd) + } catch (ex) { + // ignore close errors at this stage, error may have closed fd already. + } + } + removeOnExitHandler() + cleanup() throw err } } diff --git a/node_modules/write-file-atomic/package.json b/node_modules/write-file-atomic/package.json index cdf47414a40d1..bffa0bbbb3472 100644 --- a/node_modules/write-file-atomic/package.json +++ b/node_modules/write-file-atomic/package.json @@ -1,35 +1,32 @@ { - "_args": [ - [ - "write-file-atomic@2.3.0", - "/Users/rebecca/code/npm" - ] - ], - "_from": "write-file-atomic@2.3.0", - "_id": "write-file-atomic@2.3.0", + "_from": "write-file-atomic@^2.4.2", + "_id": "write-file-atomic@2.4.3", "_inBundle": false, - "_integrity": "sha512-xuPeK4OdjWqtfi59ylvVL0Yn35SF3zgcAcv7rBPFHVaEapaDr4GdGgm3j7ckTwH9wHL7fGmgfAnb0+THrHb8tA==", + "_integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", "_location": "/write-file-atomic", "_phantomChildren": {}, "_requested": { - "type": "version", + "type": "range", "registry": true, - "raw": "write-file-atomic@2.3.0", + "raw": "write-file-atomic@^2.4.2", "name": "write-file-atomic", "escapedName": "write-file-atomic", - "rawSpec": "2.3.0", + "rawSpec": "^2.4.2", "saveSpec": null, - "fetchSpec": "2.3.0" + "fetchSpec": "^2.4.2" }, "_requiredBy": [ + "#USER", "/", "/bin-links", + "/caching-transform", "/configstore", "/tap" ], - "_resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.3.0.tgz", - "_spec": "2.3.0", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", + "_shasum": "1fd2e9ae1df3e75b8d8c367443c692d4ca81f481", + "_spec": "write-file-atomic@^2.4.2", + "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Rebecca Turner", "email": "me@re-becca.org", @@ -38,18 +35,20 @@ "bugs": { "url": "https://github.com/iarna/write-file-atomic/issues" }, + "bundleDependencies": false, "dependencies": { "graceful-fs": "^4.1.11", "imurmurhash": "^0.1.4", "signal-exit": "^3.0.2" }, + "deprecated": false, "description": "Write files in an atomic fashion w/configurable ownership", "devDependencies": { "mkdirp": "^0.5.1", "require-inject": "^1.4.0", "rimraf": "^2.5.4", - "standard": "^10.0.2", - "tap": "^10.3.2" + "standard": "^12.0.1", + "tap": "^12.1.3" }, "files": [ "index.js" @@ -67,7 +66,10 @@ "url": "git+ssh://git@github.com/iarna/write-file-atomic.git" }, "scripts": { + "postpublish": "git push origin --follow-tags", + "postversion": "npm publish", + "preversion": "npm test", "test": "standard && tap --100 test/*.js" }, - "version": "2.3.0" + "version": "2.4.3" } diff --git a/node_modules/yallist/iterator.js b/node_modules/yallist/iterator.js index 4a15bf22c4003..d41c97a19f984 100644 --- a/node_modules/yallist/iterator.js +++ b/node_modules/yallist/iterator.js @@ -1,7 +1,8 @@ -var Yallist = require('./yallist.js') - -Yallist.prototype[Symbol.iterator] = function* () { - for (let walker = this.head; walker; walker = walker.next) { - yield walker.value +'use strict' +module.exports = function (Yallist) { + Yallist.prototype[Symbol.iterator] = function* () { + for (let walker = this.head; walker; walker = walker.next) { + yield walker.value + } } } diff --git a/node_modules/yallist/package.json b/node_modules/yallist/package.json index f56d6b6a33497..a478663faf070 100644 --- a/node_modules/yallist/package.json +++ b/node_modules/yallist/package.json @@ -1,27 +1,27 @@ { - "_from": "yallist@^2.1.2", - "_id": "yallist@2.1.2", + "_from": "yallist@^3.0.2", + "_id": "yallist@3.0.3", "_inBundle": false, - "_integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=", + "_integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==", "_location": "/yallist", "_phantomChildren": {}, "_requested": { "type": "range", "registry": true, - "raw": "yallist@^2.1.2", + "raw": "yallist@^3.0.2", "name": "yallist", "escapedName": "yallist", - "rawSpec": "^2.1.2", + "rawSpec": "^3.0.2", "saveSpec": null, - "fetchSpec": "^2.1.2" + "fetchSpec": "^3.0.2" }, "_requiredBy": [ "/lru-cache" ], - "_resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "_shasum": "1c11f9218f076089a47dd512f93c6699a6a81d52", - "_spec": "yallist@^2.1.2", - "_where": "/Users/rebecca/code/npm/node_modules/lru-cache", + "_resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", + "_shasum": "b4b049e314be545e3ce802236d6cd22cd91c3de9", + "_spec": "yallist@^3.0.2", + "_where": "/Users/isaacs/dev/npm/cli/node_modules/lru-cache", "author": { "name": "Isaac Z. Schlueter", "email": "i@izs.me", @@ -35,7 +35,7 @@ "deprecated": false, "description": "Yet Another Linked List", "devDependencies": { - "tap": "^10.3.0" + "tap": "^12.1.0" }, "directories": { "test": "test" @@ -58,5 +58,5 @@ "preversion": "npm test", "test": "tap test/*.js --100" }, - "version": "2.1.2" + "version": "3.0.3" } diff --git a/node_modules/yallist/yallist.js b/node_modules/yallist/yallist.js index 518d23330b936..b0ab36cf31b7a 100644 --- a/node_modules/yallist/yallist.js +++ b/node_modules/yallist/yallist.js @@ -1,3 +1,4 @@ +'use strict' module.exports = Yallist Yallist.Node = Node @@ -368,3 +369,8 @@ function Node (value, prev, next, list) { this.next = null } } + +try { + // add if support for Symbol.iterator is present + require('./iterator.js')(Yallist) +} catch (er) {} diff --git a/node_modules/yargs/CHANGELOG.md b/node_modules/yargs/CHANGELOG.md index 0eaf2f33edddb..2cccca04c4113 100644 --- a/node_modules/yargs/CHANGELOG.md +++ b/node_modules/yargs/CHANGELOG.md @@ -2,6 +2,25 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +# [11.1.1](https://github.com/yargs/yargs/compare/v11.1.0...v11.1.1) (2019-10-06) + +* backport security fix for os-locale. + + +# [11.1.0](https://github.com/yargs/yargs/compare/v11.0.0...v11.1.0) (2018-03-04) + + +### Bug Fixes + +* choose correct config directory when require.main does not exist ([#1056](https://github.com/yargs/yargs/issues/1056)) ([a04678c](https://github.com/yargs/yargs/commit/a04678c)) + + +### Features + +* allow hidden options to be displayed with --show-hidden ([#1061](https://github.com/yargs/yargs/issues/1061)) ([ea862ae](https://github.com/yargs/yargs/commit/ea862ae)) +* extend *.rc files in addition to json ([#1080](https://github.com/yargs/yargs/issues/1080)) ([11691a6](https://github.com/yargs/yargs/commit/11691a6)) + # [11.0.0](https://github.com/yargs/yargs/compare/v10.1.2...v11.0.0) (2018-01-22) diff --git a/node_modules/yargs/lib/apply-extends.js b/node_modules/yargs/lib/apply-extends.js index 3005848e3cdb7..530b022ac57b5 100644 --- a/node_modules/yargs/lib/apply-extends.js +++ b/node_modules/yargs/lib/apply-extends.js @@ -6,9 +6,9 @@ const YError = require('./yerror') let previouslyVisitedConfigs = [] -function checkForCircularExtends (path) { - if (previouslyVisitedConfigs.indexOf(path) > -1) { - throw new YError(`Circular extended configurations: '${path}'.`) +function checkForCircularExtends (cfgPath) { + if (previouslyVisitedConfigs.indexOf(cfgPath) > -1) { + throw new YError(`Circular extended configurations: '${cfgPath}'.`) } } diff --git a/node_modules/yargs/package.json b/node_modules/yargs/package.json index 1fe51fedd4392..fd0a1265c23dd 100644 --- a/node_modules/yargs/package.json +++ b/node_modules/yargs/package.json @@ -1,8 +1,8 @@ { "_from": "yargs@^11.0.0", - "_id": "yargs@11.0.0", + "_id": "yargs@11.1.1", "_inBundle": false, - "_integrity": "sha512-Rjp+lMYQOWtgqojx1dEWorjCofi1YN7AoFvYV7b1gx/7dAAeuI4kN5SZiEvr0ZmsZTOpDRcCqrpI10L31tFkBw==", + "_integrity": "sha512-PRU7gJrJaXv3q3yQZ/+/X6KBswZiaQ+zOmdprZcouPYtQgvNU35i+68M4b1ZHLZtYFT5QObFLV+ZkmJYcwKdiw==", "_location": "/yargs", "_phantomChildren": {}, "_requested": { @@ -18,10 +18,10 @@ "_requiredBy": [ "/libnpx" ], - "_resolved": "https://registry.npmjs.org/yargs/-/yargs-11.0.0.tgz", - "_shasum": "c052931006c5eee74610e5fc0354bedfd08a201b", + "_resolved": "https://registry.npmjs.org/yargs/-/yargs-11.1.1.tgz", + "_shasum": "5052efe3446a4df5ed669c995886cc0f13702766", "_spec": "yargs@^11.0.0", - "_where": "/Users/rebecca/code/npm/node_modules/libnpx", + "_where": "/Users/mperrotte/npminc/cli/node_modules/libnpx", "bugs": { "url": "https://github.com/yargs/yargs/issues" }, @@ -31,7 +31,7 @@ "decamelize": "^1.1.1", "find-up": "^2.1.0", "get-caller-file": "^1.0.1", - "os-locale": "^2.0.0", + "os-locale": "^3.1.0", "require-directory": "^2.1.1", "require-main-filename": "^1.0.1", "set-blocking": "^2.0.0", @@ -47,7 +47,7 @@ "chalk": "^1.1.3", "coveralls": "^2.11.11", "cpr": "^2.0.0", - "cross-spawn": "^5.0.1", + "cross-spawn": "^6.0.4", "es6-promise": "^4.0.2", "hashish": "0.0.4", "mocha": "^3.0.1", @@ -97,5 +97,5 @@ "**/example/**" ] }, - "version": "11.0.0" + "version": "11.1.1" } diff --git a/node_modules/yargs/yargs.js b/node_modules/yargs/yargs.js index 9b50b8bfeb0a4..38f1b2ea6e67f 100644 --- a/node_modules/yargs/yargs.js +++ b/node_modules/yargs/yargs.js @@ -476,17 +476,17 @@ function Yargs (processArgs, cwd, parentRequire) { return self } - self.pkgConf = function pkgConf (key, path) { - argsert(' [string]', [key, path], arguments.length) + self.pkgConf = function pkgConf (key, rootPath) { + argsert(' [string]', [key, rootPath], arguments.length) let conf = null // prefer cwd to require-main-filename in this method // since we're looking for e.g. "nyc" config in nyc consumer // rather than "yargs" config in nyc (where nyc is the main filename) - const obj = pkgUp(path || cwd) + const obj = pkgUp(rootPath || cwd) // If an object exists in the key, add it to options.configObjects if (obj[key] && typeof obj[key] === 'object') { - conf = applyExtends(obj[key], path || cwd) + conf = applyExtends(obj[key], rootPath || cwd) options.configObjects = (options.configObjects || []).concat(conf) } @@ -494,16 +494,24 @@ function Yargs (processArgs, cwd, parentRequire) { } const pkgs = {} - function pkgUp (path) { - const npath = path || '*' + function pkgUp (rootPath) { + const npath = rootPath || '*' if (pkgs[npath]) return pkgs[npath] const findUp = require('find-up') let obj = {} try { + let startDir = rootPath || require('require-main-filename')(parentRequire || require) + + // When called in an environment that lacks require.main.filename, such as a jest test runner, + // startDir is already process.cwd(), and should not be shortened. + // Whether or not it is _actually_ a directory (e.g., extensionless bin) is irrelevant, find-up handles it. + if (!rootPath && path.extname(startDir)) { + startDir = path.dirname(startDir) + } + const pkgJsonPath = findUp.sync('package.json', { - cwd: path || require('path').dirname(require('require-main-filename')(parentRequire || require)), - normalize: false + cwd: startDir }) obj = JSON.parse(fs.readFileSync(pkgJsonPath)) } catch (noop) {} diff --git a/package-lock.json b/package-lock.json index 11e773be4e829..e759c9b974987 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,52 +1,188 @@ { "name": "npm", - "version": "6.5.0", + "version": "6.14.2", "lockfileVersion": 1, "requires": true, "dependencies": { - "@types/caseless": { - "version": "0.12.1", - "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.1.tgz", - "integrity": "sha512-FhlMa34NHp9K5MY1Uz8yb+ZvuX0pnvn3jScRSNAb75KHGB8d3rEU6hqMs3Z2vjuytcMfRg6c5CHMc3wtYyD2/A==", + "@babel/code-frame": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.0.0.tgz", + "integrity": "sha512-OfC2uemaknXr87bdLUkWog7nYuliM9Ij5HUcajsVcMCpQrcLmtxRbVFTIqmcSkSeYRBFBRxs2FiUqFJDLdiebA==", + "dev": true, + "requires": { + "@babel/highlight": "^7.0.0" + } + }, + "@babel/generator": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.4.4.tgz", + "integrity": "sha512-53UOLK6TVNqKxf7RUh8NE851EHRxOOeVXKbK2bivdb+iziMyk03Sr4eaE9OELCbyZAAafAKPDwF2TPUES5QbxQ==", + "dev": true, + "requires": { + "@babel/types": "^7.4.4", + "jsesc": "^2.5.1", + "lodash": "^4.17.11", + "source-map": "^0.5.0", + "trim-right": "^1.0.1" + } + }, + "@babel/helper-function-name": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.1.0.tgz", + "integrity": "sha512-A95XEoCpb3TO+KZzJ4S/5uW5fNe26DjBGqf1o9ucyLyCmi1dXq/B3c8iaWTfBk3VvetUxl16e8tIrd5teOCfGw==", + "dev": true, + "requires": { + "@babel/helper-get-function-arity": "^7.0.0", + "@babel/template": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "@babel/helper-get-function-arity": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.0.0.tgz", + "integrity": "sha512-r2DbJeg4svYvt3HOS74U4eWKsUAMRH01Z1ds1zx8KNTPtpTL5JAsdFv8BNyOpVqdFhHkkRDIg5B4AsxmkjAlmQ==", + "dev": true, + "requires": { + "@babel/types": "^7.0.0" + } + }, + "@babel/helper-split-export-declaration": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.4.4.tgz", + "integrity": "sha512-Ro/XkzLf3JFITkW6b+hNxzZ1n5OQ80NvIUdmHspih1XAhtN3vPTuUFT4eQnela+2MaZ5ulH+iyP513KJrxbN7Q==", + "dev": true, + "requires": { + "@babel/types": "^7.4.4" + } + }, + "@babel/highlight": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.0.0.tgz", + "integrity": "sha512-UFMC4ZeFC48Tpvj7C8UgLvtkaUuovQX+5xNWrsIoMG8o2z+XFKjKaN9iVmS84dPwVN00W4wPmqvYoZF3EGAsfw==", + "dev": true, + "requires": { + "chalk": "^2.0.0", + "esutils": "^2.0.2", + "js-tokens": "^4.0.0" + }, + "dependencies": { + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + } + } + }, + "@babel/parser": { + "version": "7.4.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.4.5.tgz", + "integrity": "sha512-9mUqkL1FF5T7f0WDFfAoDdiMVPWsdD1gZYzSnaXsxUCUqzuch/8of9G3VUSNiZmMBoRxT3neyVsqeiL/ZPcjew==", "dev": true }, - "@types/form-data": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@types/form-data/-/form-data-2.2.1.tgz", - "integrity": "sha512-JAMFhOaHIciYVh8fb5/83nmuO/AHwmto+Hq7a9y8FzLDcC1KCU344XDOMEmahnrTFlHjgh4L0WJFczNIX2GxnQ==", + "@babel/template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.4.4.tgz", + "integrity": "sha512-CiGzLN9KgAvgZsnivND7rkA+AeJ9JB0ciPOD4U59GKbQP2iQl+olF1l76kJOupqidozfZ32ghwBEJDhnk9MEcw==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "@babel/parser": "^7.4.4", + "@babel/types": "^7.4.4" + } + }, + "@babel/traverse": { + "version": "7.4.5", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.4.5.tgz", + "integrity": "sha512-Vc+qjynwkjRmIFGxy0KYoPj4FdVDxLej89kMHFsWScq999uX+pwcX4v9mWRjW0KcAYTPAuVQl2LKP1wEVLsp+A==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "@babel/generator": "^7.4.4", + "@babel/helper-function-name": "^7.1.0", + "@babel/helper-split-export-declaration": "^7.4.4", + "@babel/parser": "^7.4.5", + "@babel/types": "^7.4.4", + "debug": "^4.1.0", + "globals": "^11.1.0", + "lodash": "^4.17.11" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + } + } + }, + "@babel/types": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.4.4.tgz", + "integrity": "sha512-dOllgYdnEFOebhkKCjzSVFqw/PmmB8pH6RGOWkY4GsboQNd47b1fBThBSwlHAq9alF9vc1M3+6oqR47R50L0tQ==", "dev": true, "requires": { - "@types/node": "*" + "esutils": "^2.0.2", + "lodash": "^4.17.11", + "to-fast-properties": "^2.0.0" } }, + "@blueoak/list": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@blueoak/list/-/list-1.0.2.tgz", + "integrity": "sha512-KyqT0kkdxgbGys9mvo/1Mgdt/LGvUFPCZIK9pWPIfOM2mYzMDd/eVYy4sMP1YqvVI129k0alxRyM53H2MAs/Nw==", + "dev": true + }, + "@types/caseless": { + "version": "0.12.2", + "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.2.tgz", + "integrity": "sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w==", + "dev": true + }, "@types/node": { - "version": "10.7.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.7.0.tgz", - "integrity": "sha512-dmYIvoQEZWnyQfgrwPCoxztv/93NYQGEiOoQhuI56rJahv9de6Q2apZl3bufV46YJ0OAXdaktIuw4RIRl4DTeA==", + "version": "12.6.8", + "resolved": "https://registry.npmjs.org/@types/node/-/node-12.6.8.tgz", + "integrity": "sha512-aX+gFgA5GHcDi89KG5keey2zf0WfZk/HAQotEamsK2kbey+8yGKcson0hbK8E+v0NArlCJQCqMP161YhV6ZXLg==", "dev": true }, "@types/request": { - "version": "2.47.1", - "resolved": "https://registry.npmjs.org/@types/request/-/request-2.47.1.tgz", - "integrity": "sha512-TV3XLvDjQbIeVxJ1Z3oCTDk/KuYwwcNKVwz2YaT0F5u86Prgc4syDAp6P96rkTQQ4bIdh+VswQIC9zS6NjY7/g==", + "version": "2.48.2", + "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.2.tgz", + "integrity": "sha512-gP+PSFXAXMrd5PcD7SqHeUjdGshAI8vKQ3+AvpQr3ht9iQea+59LOKvKITcQI+Lg+1EIkDP6AFSBUJPWG8GDyA==", "dev": true, "requires": { "@types/caseless": "*", - "@types/form-data": "*", "@types/node": "*", - "@types/tough-cookie": "*" + "@types/tough-cookie": "*", + "form-data": "^2.5.0" + }, + "dependencies": { + "form-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.0.tgz", + "integrity": "sha512-WXieX3G/8side6VIqx44ablyULoGruSde5PNTxoUyo5CeyAMX6nVWUd0rgist/EuX655cjhUhTo1Fo3tRYqbcA==", + "dev": true, + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + } } }, "@types/tough-cookie": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.3.tgz", - "integrity": "sha512-MDQLxNFRLasqS4UlkWMSACMKeSm1x4Q3TxzUC7KQUsh6RK1ZrQ0VEyE3yzXcBu+K8ejVj4wuX32eUG02yNp+YQ==", + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-2.3.5.tgz", + "integrity": "sha512-SCcK7mvGi3+ZNz833RRjFIxrn4gI1PPR3NtuIS+6vMkvmsGjosqTJwRt5bAEFLRz+wtJMWv8+uOnZf2hi2QXTg==", "dev": true }, "JSONStream": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.4.tgz", - "integrity": "sha512-Y7vfi3I5oMOYIr+WxV8NZxDSwcbNgzdKYsTNInmycOq9bUYwGg9ryu57Wg5NLmCjqdFPNUmpMBo3kSJN9tCbXg==", + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz", + "integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==", "requires": { "jsonparse": "^1.2.0", "through": ">=2.2.7 <3" @@ -58,9 +194,9 @@ "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" }, "acorn": { - "version": "5.5.3", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.5.3.tgz", - "integrity": "sha512-jd5MkIUlbbmb07nXH0DT3y7rDVtkzDi4XZOUVWAer8ajmF/DTSSbl5oNFyDOl/OXA33Bl79+ypHhl2pN20VeOQ==", + "version": "5.7.3", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.3.tgz", + "integrity": "sha512-T/zvzYRfbVojPWahDsE5evJdHb3oJoQfFbsrKM7w5Zcs++Tr257tia3BmMP8XYVjp1S9RZXQMh7gao96BlqZOw==", "dev": true }, "acorn-jsx": { @@ -81,17 +217,17 @@ } }, "agent-base": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.2.0.tgz", - "integrity": "sha512-c+R/U5X+2zz2+UCrCFv6odQzJdoqI+YecuhnAJLa1zYaMc13zPfwMwZrr91Pd1DYNo/yPRbiM4WVf9whgwFsIg==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", + "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", "requires": { "es6-promisify": "^5.0.0" } }, "agentkeepalive": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-3.4.1.tgz", - "integrity": "sha512-MPIwsZU9PP9kOrZpyu2042kYA8Fdt/AedQYkYXucHgF9QoD9dXVp0ypuGnHXSR0hTstBxdt85Xkh4JolYfK5wg==", + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-3.5.2.tgz", + "integrity": "sha512-e0L/HNe6qkQ7H19kTlRRqUibEAwDK5AFk6y3PtMsuut2VAH6+Q4xZml1tNDJD7kSAyqmbG/K08K5WEJYtUrSlQ==", "requires": { "humanize-ms": "^1.2.1" } @@ -150,10 +286,19 @@ "resolved": "https://registry.npmjs.org/ansistyles/-/ansistyles-0.1.3.tgz", "integrity": "sha1-XeYEFb2gcbs3EnhUyGT0GyMlRTk=" }, + "append-transform": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-1.0.0.tgz", + "integrity": "sha512-P009oYkeHyU742iSZJzZZywj4QRJdnTWffaKuJQLablCZ1uz6/cW4yaRgcDaoQ+uwOxxnt0gRUcwfsNP2ri0gw==", + "dev": true, + "requires": { + "default-require-extensions": "^2.0.0" + } + }, "aproba": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", - "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", + "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==" }, "archy": { "version": "1.0.0", @@ -167,8 +312,38 @@ "requires": { "delegates": "^1.0.0", "readable-stream": "^2.0.6" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + } } }, + "arg": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.0.tgz", + "integrity": "sha512-ZWc51jO3qegGkVh8Hwpv636EkbesNV5ZNQPCtRa+0qytRYPEs9IYT9qITY9buezqUH5uqyzlWLcufrzU2rffdg==", + "dev": true + }, "argparse": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", @@ -202,27 +377,6 @@ "es-abstract": "^1.7.0" } }, - "array-union": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", - "integrity": "sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=", - "dev": true, - "requires": { - "array-uniq": "^1.0.1" - } - }, - "array-uniq": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/array-uniq/-/array-uniq-1.0.3.tgz", - "integrity": "sha1-r2rId6Jcx/dOBYiUdThY39sk/bY=", - "dev": true - }, - "arrify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", - "dev": true - }, "asap": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", @@ -309,14 +463,15 @@ } }, "bin-links": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/bin-links/-/bin-links-1.1.2.tgz", - "integrity": "sha512-8eEHVgYP03nILphilltWjeIjMbKyJo3wvp9K816pHbhP301ismzw15mxAAEVQ/USUwcP++1uNrbERbp8lOA6Fg==", - "requires": { - "bluebird": "^3.5.0", - "cmd-shim": "^2.0.2", - "gentle-fs": "^2.0.0", - "graceful-fs": "^4.1.11", + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/bin-links/-/bin-links-1.1.7.tgz", + "integrity": "sha512-/eaLaTu7G7/o7PV04QPy1HRT65zf+1tFkPGv0sPTV0tRwufooYBQO3zrcyGgm+ja+ZtBf2GEuKjDRJ2pPG+yqA==", + "requires": { + "bluebird": "^3.5.3", + "cmd-shim": "^3.0.0", + "gentle-fs": "^2.3.0", + "graceful-fs": "^4.1.15", + "npm-normalize-package-bin": "^1.0.0", "write-file-atomic": "^2.3.0" } }, @@ -326,18 +481,19 @@ "integrity": "sha512-3/qRXczDi2Cdbz6jE+W3IflJOutRVica8frpBn14de1mBOkzDo+6tY33kNhvkw54Kn3PzRRD2VnGbGPcTAk4sw==", "dev": true }, - "block-stream": { - "version": "0.0.9", - "resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.9.tgz", - "integrity": "sha1-E+v+d4oDIFz+A3UUgeu0szAMEmo=", + "bl": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-3.0.0.tgz", + "integrity": "sha512-EUAyP5UHU5hxF8BPT0LKW8gjYLhq1DQIcneOX/pL/m2Alo+OYDQAJlHq+yseMP50Os2nHXOSic6Ss3vSQeyf4A==", + "dev": true, "requires": { - "inherits": "~2.0.0" + "readable-stream": "^3.0.1" } }, "bluebird": { - "version": "3.5.3", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.3.tgz", - "integrity": "sha512-/qKPUQlaW1OyR51WeCPBvRnAlnZFUJkCSG5HzGnuIqhgyJtF+T94lFnn33eiazjRm2LAHVy2guNnaq48X9SJuw==" + "version": "3.5.5", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.5.tgz", + "integrity": "sha512-5am6HnnfN+urzt4yfg7IgTbotDjIT/u8AJpEt0sIU9FtXfVeezXAPKswrG+xKUCOYAINpSdgZVDU6QFh+cuH3w==" }, "boxen": { "version": "1.3.0", @@ -362,6 +518,12 @@ "concat-map": "0.0.1" } }, + "browser-process-hrtime": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz", + "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==", + "dev": true + }, "browser-request": { "version": "0.3.3", "resolved": "https://registry.npmjs.org/browser-request/-/browser-request-0.3.3.tgz", @@ -376,7 +538,8 @@ "builtin-modules": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz", - "integrity": "sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8=" + "integrity": "sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8=", + "dev": true }, "builtins": { "version": "1.0.3", @@ -389,35 +552,66 @@ "integrity": "sha1-dBxSFkaOrcRXsDQQEYrXfejB3bE=" }, "byte-size": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/byte-size/-/byte-size-4.0.3.tgz", - "integrity": "sha512-JGC3EV2bCzJH/ENSh3afyJrH4vwxbHTuO5ljLoI5+2iJOcEpMgP8T782jH9b5qGxf2mSUIp1lfGnfKNrRHpvVg==" + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/byte-size/-/byte-size-5.0.1.tgz", + "integrity": "sha512-/XuKeqWocKsYa/cBY1YbSJSWWqTi4cFgr9S6OyM7PBaPbr9zvNGwWP33vt0uqGhwDdN+y3yhbXVILEUpnwEWGw==" }, "cacache": { - "version": "11.2.0", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-11.2.0.tgz", - "integrity": "sha512-IFWl6lfK6wSeYCHUXh+N1lY72UDrpyrYQJNIVQf48paDuWbv5RbAtJYf/4gUQFObTCHZwdZ5sI8Iw7nqwP6nlQ==", + "version": "12.0.3", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-12.0.3.tgz", + "integrity": "sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw==", "requires": { - "bluebird": "^3.5.1", - "chownr": "^1.0.1", - "figgy-pudding": "^3.1.0", - "glob": "^7.1.2", - "graceful-fs": "^4.1.11", - "lru-cache": "^4.1.3", + "bluebird": "^3.5.5", + "chownr": "^1.1.1", + "figgy-pudding": "^3.5.1", + "glob": "^7.1.4", + "graceful-fs": "^4.1.15", + "infer-owner": "^1.0.3", + "lru-cache": "^5.1.1", "mississippi": "^3.0.0", "mkdirp": "^0.5.1", "move-concurrently": "^1.0.1", "promise-inflight": "^1.0.1", - "rimraf": "^2.6.2", - "ssri": "^6.0.0", - "unique-filename": "^1.1.0", + "rimraf": "^2.6.3", + "ssri": "^6.0.1", + "unique-filename": "^1.1.1", "y18n": "^4.0.0" } }, + "caching-transform": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-3.0.2.tgz", + "integrity": "sha512-Mtgcv3lh3U0zRii/6qVgQODdPA4G3zhG+jtbCWj39RXuUFTMzH0vcdMtaJS1jPowd+It2Pqr6y3NJMQqOqCE2w==", + "dev": true, + "requires": { + "hasha": "^3.0.0", + "make-dir": "^2.0.0", + "package-hash": "^3.0.0", + "write-file-atomic": "^2.4.2" + }, + "dependencies": { + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + } + }, + "pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true + } + } + }, "call-limit": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/call-limit/-/call-limit-1.1.0.tgz", - "integrity": "sha1-b9YbA/PaQqLNDsK2DwK9DnGZH+o=" + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/call-limit/-/call-limit-1.1.1.tgz", + "integrity": "sha512-5twvci5b9eRBw2wCfPtN0GmlR2/gadZqyFpPhOK6CvMFoFgA+USnZ6Jpu1lhG9h85pQ3Ouil3PfXWRD4EUaRiQ==" }, "caller": { "version": "1.0.1", @@ -472,19 +666,19 @@ "dev": true }, "chownr": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.0.1.tgz", - "integrity": "sha1-4qdQQqlVGQi+vSW4Uj1fl2nXkYE=" + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" }, "ci-info": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-1.6.0.tgz", - "integrity": "sha512-vsGdkwSCDpWmP80ncATX7iea5DWQemg1UgCW5J8tqjU3lYw4FBYuj89J0CTVomA7BEfvSZd84GmHko+MxFQU2A==" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", + "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==" }, "cidr-regex": { - "version": "2.0.9", - "resolved": "https://registry.npmjs.org/cidr-regex/-/cidr-regex-2.0.9.tgz", - "integrity": "sha512-F7/fBRUU45FnvSPjXdpIrc++WRSBdCiSTlyq4ZNhLKOlHFNWgtzZ0Fd+zrqI/J1j0wmlx/f5ZQDmD2GcbrNcmw==", + "version": "2.0.10", + "resolved": "https://registry.npmjs.org/cidr-regex/-/cidr-regex-2.0.10.tgz", + "integrity": "sha512-sB3ogMQXWvreNPbJUZMRApxuRYd+KoIo4RGQ81VatjmMW6WJPo+IJZ2846FGItr9VzKo5w7DXzijPLGtSd0N3Q==", "requires": { "ip-regex": "^2.1.0" } @@ -525,9 +719,9 @@ } }, "cli-table3": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.5.0.tgz", - "integrity": "sha512-c7YHpUyO1SaKaO7kYtxd5NZ8FjAmSK3LpKkuzdwn+2CwpFxBpdoQLm+OAnnCfoEl7onKhN9PKQi1lsHuAIUqGQ==", + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.5.1.tgz", + "integrity": "sha512-7Qg2Jrep1S/+Q3EceiZtQcDPWxhAvBw+ERf1162v4sikJrvojMHFqXt8QIVha8UlH9rgU0BeWPytZ9/TzYqlUw==", "requires": { "colors": "^1.1.2", "object-assign": "^4.1.0", @@ -571,20 +765,31 @@ "integrity": "sha1-2jCcwmPfFZlMaIypAheco8fNfH4=" }, "cloudant-follow": { - "version": "0.17.0", - "resolved": "https://registry.npmjs.org/cloudant-follow/-/cloudant-follow-0.17.0.tgz", - "integrity": "sha512-JQ1xvKAHh8rsnSVBjATLCjz/vQw1sWBGadxr2H69yFMwD7hShUGDwwEefdypaxroUJ/w6t1cSwilp/hRUxEW8w==", + "version": "0.18.2", + "resolved": "https://registry.npmjs.org/cloudant-follow/-/cloudant-follow-0.18.2.tgz", + "integrity": "sha512-qu/AmKxDqJds+UmT77+0NbM7Yab2K3w0qSeJRzsq5dRWJTEJdWeb+XpG4OpKuTE9RKOa/Awn2gR3TTnvNr3TeA==", "dev": true, "requires": { "browser-request": "~0.3.0", - "debug": "^3.0.0", - "request": "^2.83.0" + "debug": "^4.0.1", + "request": "^2.88.0" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + } } }, "cmd-shim": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/cmd-shim/-/cmd-shim-2.0.2.tgz", - "integrity": "sha1-b8vamUg6j9FdfTChlspp1oii79s=", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/cmd-shim/-/cmd-shim-3.0.3.tgz", + "integrity": "sha512-DtGg+0xiFhQIntSBRzL2fRQBnmtAVwXIDo4Qq46HPpObYquxMaZS4sb82U9nH91qJrlosC1wa9gwr0QyL/HypA==", "requires": { "graceful-fs": "^4.1.2", "mkdirp": "~0.5.0" @@ -626,9 +831,9 @@ "dev": true }, "colors": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/colors/-/colors-1.1.2.tgz", - "integrity": "sha1-FopHAXVran9RoSzgyXv6KMCE7WM=", + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.3.3.tgz", + "integrity": "sha512-mmGt/1pZqYRjMxB1axhTo16/snVZ5krrKkcmMeVKxzECMMXoCgnvTPp10QgHfcbQZw8Dq2jMNG6je4JlWU0gWg==", "optional": true }, "columnify": { @@ -648,6 +853,19 @@ "delayed-stream": "~1.0.0" } }, + "commander": { + "version": "2.20.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.0.tgz", + "integrity": "sha512-7j2y+40w61zy6YC2iRNpUe/NwhNyoXrYpHMrSunaMG64nRnaf96zO/KMQR4OyN/UnE5KLyEBnKHd4aG3rskjpQ==", + "dev": true, + "optional": true + }, + "commondir": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", + "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=", + "dev": true + }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -662,6 +880,30 @@ "inherits": "^2.0.3", "readable-stream": "^2.2.2", "typedarray": "^0.0.6" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + } } }, "config-chain": { @@ -687,14 +929,14 @@ } }, "connect": { - "version": "3.6.6", - "resolved": "https://registry.npmjs.org/connect/-/connect-3.6.6.tgz", - "integrity": "sha1-Ce/2xVr3I24TcTWnJXSFi2eG9SQ=", + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/connect/-/connect-3.7.0.tgz", + "integrity": "sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ==", "dev": true, "requires": { "debug": "2.6.9", - "finalhandler": "1.1.0", - "parseurl": "~1.3.2", + "finalhandler": "1.1.2", + "parseurl": "~1.3.3", "utils-merge": "1.0.1" }, "dependencies": { @@ -726,6 +968,15 @@ "integrity": "sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo=", "dev": true }, + "convert-source-map": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.6.0.tgz", + "integrity": "sha512-eFu7XigvxdZ1ETfbgPBohgyQ/Z++C0eEhTor0qRwBw9unw+L0/6V8wkSuGgzdThkiS5lSpdptOQPD8Ak40a+7A==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.1" + } + }, "copy-concurrently": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/copy-concurrently/-/copy-concurrently-1.0.5.tgz", @@ -739,6 +990,11 @@ "run-queue": "^1.0.0" }, "dependencies": { + "aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" + }, "iferr": { "version": "0.1.5", "resolved": "https://registry.npmjs.org/iferr/-/iferr-0.1.5.tgz", @@ -746,17 +1002,20 @@ } } }, - "core-js": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-1.2.7.tgz", - "integrity": "sha1-ZSKUwUZR2yj6k70tX/KYOk8IxjY=", - "dev": true - }, "core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, + "correct-license-metadata": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/correct-license-metadata/-/correct-license-metadata-1.4.0.tgz", + "integrity": "sha512-nvbNpK/aYCbztZWGi9adIPqR+ZcQmZTWNT7eMYLvkaVGroN1nTHiVuuNPl7pK6ZNx1mvDztlRBJtfUdrVwKJ5A==", + "dev": true, + "requires": { + "spdx-expression-validate": "^2.0.0" + } + }, "couchapp": { "version": "0.11.0", "resolved": "https://registry.npmjs.org/couchapp/-/couchapp-0.11.0.tgz", @@ -773,16 +1032,17 @@ } }, "coveralls": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/coveralls/-/coveralls-3.0.1.tgz", - "integrity": "sha512-FAzXwiDOYLGDWH+zgoIA+8GbWv50hlx+kpEJyvzLKOdnIBv9uWoVl4DhqGgyUHpiRjAlF8KYZSipWXYtllWH6Q==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/coveralls/-/coveralls-3.0.4.tgz", + "integrity": "sha512-eyqUWA/7RT0JagiL0tThVhjbIjoiEUyWCjtUJoOPcWoeofP5WK/jb2OJYoBFrR6DvplR+AxOyuBqk4JHkk5ykA==", "dev": true, "requires": { - "js-yaml": "^3.6.1", + "growl": "~> 1.10.0", + "js-yaml": "^3.11.0", "lcov-parse": "^0.0.10", - "log-driver": "^1.2.5", + "log-driver": "^1.2.7", "minimist": "^1.2.0", - "request": "^2.79.0" + "request": "^2.86.0" }, "dependencies": { "minimist": { @@ -793,6 +1053,37 @@ } } }, + "cp-file": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/cp-file/-/cp-file-6.2.0.tgz", + "integrity": "sha512-fmvV4caBnofhPe8kOcitBwSn2f39QLjnAnGq3gO9dfd75mUytzKNZB1hde6QHunW2Rt+OwuBOMc3i1tNElbszA==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "make-dir": "^2.0.0", + "nested-error-stacks": "^2.0.0", + "pify": "^4.0.1", + "safe-buffer": "^5.0.1" + }, + "dependencies": { + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + } + }, + "pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true + } + } + }, "create-error-class": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/create-error-class/-/create-error-class-3.0.2.tgz", @@ -809,6 +1100,22 @@ "lru-cache": "^4.0.1", "shebang-command": "^1.2.0", "which": "^1.2.9" + }, + "dependencies": { + "lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "requires": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" + } } }, "crypto-random-string": { @@ -882,6 +1189,15 @@ "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=", "dev": true }, + "default-require-extensions": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-2.0.0.tgz", + "integrity": "sha1-9fj7sYp9bVCyH2QfZJ67Uiz+JPc=", + "dev": true, + "requires": { + "strip-bom": "^3.0.0" + } + }, "defaults": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.3.tgz", @@ -891,19 +1207,17 @@ } }, "define-properties": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.2.tgz", - "integrity": "sha1-g6c/L+pWmJj7c3GTyPhzyvbUXJQ=", - "dev": true, + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", + "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", "requires": { - "foreach": "^2.0.5", - "object-keys": "^1.0.8" + "object-keys": "^1.0.12" } }, "deglob": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/deglob/-/deglob-2.1.0.tgz", - "integrity": "sha1-TUSr4W7zLHebSXK9FBqAMlApoUo=", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/deglob/-/deglob-2.1.1.tgz", + "integrity": "sha512-2kjwuGGonL7gWE1XU4Fv79+vVzpoQCl0V+boMwWtOQJV2AGDabCwez++nB1Nli/8BabAfZQ/UuHPlp6AymKdWw==", "dev": true, "requires": { "find-root": "^1.0.0", @@ -914,29 +1228,6 @@ "uniq": "^1.0.1" } }, - "del": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/del/-/del-2.2.2.tgz", - "integrity": "sha1-wSyYHQZ4RshLyvhiz/kw2Qf/0ag=", - "dev": true, - "requires": { - "globby": "^5.0.0", - "is-path-cwd": "^1.0.0", - "is-path-in-cwd": "^1.0.0", - "object-assign": "^4.0.1", - "pify": "^2.0.0", - "pinkie-promise": "^2.0.0", - "rimraf": "^2.2.8" - }, - "dependencies": { - "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", - "dev": true - } - } - }, "delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -987,6 +1278,12 @@ "esutils": "^2.0.2" } }, + "domain-browser": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/domain-browser/-/domain-browser-1.2.0.tgz", + "integrity": "sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA==", + "dev": true + }, "dot-prop": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-4.2.0.tgz", @@ -1014,6 +1311,30 @@ "inherits": "^2.0.1", "readable-stream": "^2.0.0", "stream-shift": "^1.0.0" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + } } }, "ecc-jsbn": { @@ -1037,6 +1358,12 @@ "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=", "dev": true }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, "encodeurl": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", @@ -1059,6 +1386,11 @@ "once": "^1.4.0" } }, + "env-paths": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.0.tgz", + "integrity": "sha512-6u0VYSCo/OW6IoD5WCLLy9JUGARbamfSavcNXry/eu8aHVFei6CD3Sw+VGX5alea1i9pgPHW0mbu6Xj0uBh7gA==" + }, "err-code": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/err-code/-/err-code-1.1.2.tgz", @@ -1073,9 +1405,9 @@ } }, "error-ex": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.1.tgz", - "integrity": "sha1-+FWobOYa3E6GIcPNoh56dhLDqNw=", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", "dev": true, "requires": { "is-arrayish": "^0.2.1" @@ -1088,10 +1420,9 @@ "dev": true }, "es-abstract": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.11.0.tgz", - "integrity": "sha512-ZnQrE/lXTTQ39ulXZ+J1DTFazV9qBy61x2bY071B+qGco8Z8q1QddsLdt/EF8Ai9hcWH72dWS0kFqXLxOxqslA==", - "dev": true, + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.12.0.tgz", + "integrity": "sha512-C8Fx/0jFmV5IPoMOFPA9P9G5NtqW+4cOPit3MIuvR2t7Ag2K15EJTpxnHAYTzL+aYQJIESYeXZmDBfOBE1HcpA==", "requires": { "es-to-primitive": "^1.1.1", "function-bind": "^1.1.1", @@ -1101,20 +1432,25 @@ } }, "es-to-primitive": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.1.1.tgz", - "integrity": "sha1-RTVSSKiJeQNLZ5Lhm7gfK3l13Q0=", - "dev": true, + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.0.tgz", + "integrity": "sha512-qZryBOJjV//LaxLTV6UC//WewneB3LcXOL9NP++ozKVXsIIIpm/2c13UDiD9Jp2eThsecw9m3jPqDwTyobcdbg==", "requires": { - "is-callable": "^1.1.1", + "is-callable": "^1.1.4", "is-date-object": "^1.0.1", - "is-symbol": "^1.0.1" + "is-symbol": "^1.0.2" } }, + "es6-error": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", + "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==", + "dev": true + }, "es6-promise": { - "version": "4.2.4", - "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.4.tgz", - "integrity": "sha512-/NdNZVJg+uZgtm9eS3O6lrOLYmQag2DjdEXuPaHlZ6RuVqgqaVZfgYCepEIKsLqwdQArOPtC3XzRLqGGfT8KQQ==" + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", + "integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==" }, "es6-promisify": { "version": "5.0.0", @@ -1186,12 +1522,6 @@ "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", "dev": true }, - "lodash": { - "version": "4.17.10", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.10.tgz", - "integrity": "sha512-UejweD1pDoXu+AD825lWwp4ZGtSwgnpZxb3JDViD7StjQz+Nb/6l093lx4OQ0foGWNRoc19mWy7BzL+UAK2iVg==", - "dev": true - }, "strip-ansi": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", @@ -1306,12 +1636,6 @@ "isarray": "^1.0.0" } }, - "lodash": { - "version": "4.17.10", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.10.tgz", - "integrity": "sha512-UejweD1pDoXu+AD825lWwp4ZGtSwgnpZxb3JDViD7StjQz+Nb/6l093lx4OQ0foGWNRoc19mWy7BzL+UAK2iVg==", - "dev": true - }, "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", @@ -1357,9 +1681,9 @@ "dev": true }, "eslint-scope": { - "version": "3.7.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-3.7.1.tgz", - "integrity": "sha1-PWPD7f2gLgbgGkUq2IyqzHzctug=", + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-3.7.3.tgz", + "integrity": "sha512-W+B0SvF4gamyCTmUc+uITPY0989iXVfKvhwtmJocTaYoc/3khEHmEmvfY/Gn9HA9VV75jrQECsHizkNw1b68FA==", "dev": true, "requires": { "esrecurse": "^4.1.0", @@ -1372,6 +1696,12 @@ "integrity": "sha512-qzm/XxIbxm/FHyH341ZrbnMUpe+5Bocte9xkmFMzPMjRaZMcXww+MpBptFvtU+79L362nqiLhekCxCxDPaUMBQ==", "dev": true }, + "esm": { + "version": "3.2.25", + "resolved": "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz", + "integrity": "sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==", + "dev": true + }, "espree": { "version": "3.5.4", "resolved": "https://registry.npmjs.org/espree/-/espree-3.5.4.tgz", @@ -1383,9 +1713,9 @@ } }, "esprima": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.0.tgz", - "integrity": "sha512-oftTcaMu/EGrEIu904mWteKIv8vMuOgGYo7EhVJJN00R/EED9DCua/xxHRdYnKtcECzVg7xOWhflvJMnqcFZjw==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", "dev": true }, "esquery": { @@ -1436,6 +1766,13 @@ "p-finally": "^1.0.0", "signal-exit": "^3.0.0", "strip-eof": "^1.0.0" + }, + "dependencies": { + "get-stream": { + "version": "3.0.0", + "resolved": "http://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" + } } }, "extend": { @@ -1475,21 +1812,6 @@ "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", "dev": true }, - "fbjs": { - "version": "0.8.16", - "resolved": "https://registry.npmjs.org/fbjs/-/fbjs-0.8.16.tgz", - "integrity": "sha1-XmdDL1UNxBtXK/VYR7ispk5TN9s=", - "dev": true, - "requires": { - "core-js": "^1.0.0", - "isomorphic-fetch": "^2.1.1", - "loose-envify": "^1.0.0", - "object-assign": "^4.1.0", - "promise": "^7.1.1", - "setimmediate": "^1.0.5", - "ua-parser-js": "^0.7.9" - } - }, "figgy-pudding": { "version": "3.5.1", "resolved": "https://registry.npmjs.org/figgy-pudding/-/figgy-pudding-3.5.1.tgz", @@ -1515,17 +1837,17 @@ } }, "finalhandler": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.0.tgz", - "integrity": "sha1-zgtoVbRYU+eRsvzGgARtiCU91/U=", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", + "integrity": "sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==", "dev": true, "requires": { "debug": "2.6.9", - "encodeurl": "~1.0.1", + "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "on-finished": "~2.3.0", - "parseurl": "~1.3.2", - "statuses": "~1.3.1", + "parseurl": "~1.3.3", + "statuses": "~1.5.0", "unpipe": "~1.0.0" }, "dependencies": { @@ -1546,6 +1868,87 @@ } } }, + "find-cache-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", + "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "dev": true, + "requires": { + "commondir": "^1.0.1", + "make-dir": "^2.0.0", + "pkg-dir": "^3.0.0" + }, + "dependencies": { + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + } + }, + "p-limit": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz", + "integrity": "sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true + }, + "pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "dev": true, + "requires": { + "find-up": "^3.0.0" + } + } + } + }, "find-npm-prefix": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/find-npm-prefix/-/find-npm-prefix-1.0.2.tgz", @@ -1566,14 +1969,14 @@ } }, "flat-cache": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-1.3.0.tgz", - "integrity": "sha1-0wMLMrOBVPTjt+nHCfSQ9++XxIE=", + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-1.3.4.tgz", + "integrity": "sha512-VwyB3Lkgacfik2vhqR4uv2rvebqmDvFu4jlN/C1RzWoJEo8I7z4Q404oiqYCkq41mni8EzQnm95emU9seckwtg==", "dev": true, "requires": { "circular-json": "^0.3.1", - "del": "^2.0.2", "graceful-fs": "^4.1.2", + "rimraf": "~2.6.2", "write": "^0.2.1" } }, @@ -1584,14 +1987,32 @@ "requires": { "inherits": "^2.0.1", "readable-stream": "^2.0.4" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + } } }, - "foreach": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/foreach/-/foreach-2.0.5.tgz", - "integrity": "sha1-C+4AUBiusmDQo6865ljdATbsG5k=", - "dev": true - }, "foreground-child": { "version": "1.5.6", "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-1.5.6.tgz", @@ -1610,7 +2031,25 @@ "requires": { "lru-cache": "^4.0.1", "which": "^1.2.9" + }, + "dependencies": { + "lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "dev": true, + "requires": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + } } + }, + "yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=", + "dev": true } } }, @@ -1636,17 +2075,47 @@ "requires": { "inherits": "^2.0.1", "readable-stream": "^2.0.0" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + } } }, "fs-access": { - "version": "1.0.1", - "resolved": "http://registry.npmjs.org/fs-access/-/fs-access-1.0.1.tgz", - "integrity": "sha1-1qh/JiJxzv6+wwxVNAf7mV2od3o=", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fs-access/-/fs-access-2.0.0.tgz", + "integrity": "sha512-Vt45hBKJrYDQeAD9ja43liw8JfK75uB7XexIXWEtDKwFLQNmzmvuulh28hRxexxuFm0zsGGq7nISGQSK6KnGrA==", "dev": true, "requires": { "null-check": "^1.0.0" } }, + "fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", + "dev": true + }, "fs-exists-cached": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs-exists-cached/-/fs-exists-cached-1.0.0.tgz", @@ -1654,11 +2123,22 @@ "dev": true }, "fs-minipass": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.5.tgz", - "integrity": "sha512-JhBl0skXjUPCFH7x6x61gQxrKyXsxB5gcgePLZCwfyCGGsTISMoIeObbrvVeP6Xmyaudw4TT43qV2Gz+iyd2oQ==", + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.7.tgz", + "integrity": "sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA==", "requires": { - "minipass": "^2.2.1" + "minipass": "^2.6.0" + }, + "dependencies": { + "minipass": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz", + "integrity": "sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==", + "requires": { + "safe-buffer": "^5.1.2", + "yallist": "^3.0.0" + } + } } }, "fs-vacuum": { @@ -1686,6 +2166,28 @@ "version": "0.1.5", "resolved": "https://registry.npmjs.org/iferr/-/iferr-0.1.5.tgz", "integrity": "sha1-xg7taebY/bazEEofy8ocGS3FtQE=" + }, + "readable-stream": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } } } }, @@ -1694,27 +2196,15 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, - "fstream": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.11.tgz", - "integrity": "sha1-XB+x8RdHcRTwYyoOtLcbPLD9MXE=", - "requires": { - "graceful-fs": "^4.1.2", - "inherits": "~2.0.0", - "mkdirp": ">=0.5 0", - "rimraf": "2" - } - }, "function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", - "dev": true + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" }, "function-loop": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/function-loop/-/function-loop-1.0.1.tgz", - "integrity": "sha1-gHa7MF6OajzO7ikgdl8zDRkPNAw=", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/function-loop/-/function-loop-1.0.2.tgz", + "integrity": "sha512-Iw4MzMfS3udk/rqxTiDDCllhGwlOrsr50zViTOO/W6lS/9y6B1J0BD2VZzrnWUYBJsl3aeqjgR5v7bWWhZSYbA==", "dev": true }, "functional-red-black-tree": { @@ -1738,6 +2228,11 @@ "wide-align": "^1.1.0" }, "dependencies": { + "aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" + }, "string-width": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", @@ -1751,25 +2246,33 @@ } }, "genfun": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/genfun/-/genfun-4.0.1.tgz", - "integrity": "sha1-7RAEHy5KfxsKOEZtF6XD4n3x38E=" + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/genfun/-/genfun-5.0.0.tgz", + "integrity": "sha512-KGDOARWVga7+rnB3z9Sd2Letx515owfk0hSxHGuqjANb1M+x2bGZGqHLiozPsYMdM2OubeMni/Hpwmjq6qIUhA==" }, "gentle-fs": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/gentle-fs/-/gentle-fs-2.0.1.tgz", - "integrity": "sha512-cEng5+3fuARewXktTEGbwsktcldA+YsnUEaXZwcK/3pjSE1X9ObnTs+/8rYf8s+RnIcQm2D5x3rwpN7Zom8Bew==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/gentle-fs/-/gentle-fs-2.3.0.tgz", + "integrity": "sha512-3k2CgAmPxuz7S6nKK+AqFE2AdM1QuwqKLPKzIET3VRwK++3q96MsNFobScDjlCrq97ZJ8y5R725MOlm6ffUCjg==", "requires": { "aproba": "^1.1.2", + "chownr": "^1.1.2", + "cmd-shim": "^3.0.3", "fs-vacuum": "^1.2.10", "graceful-fs": "^4.1.11", "iferr": "^0.1.5", + "infer-owner": "^1.0.4", "mkdirp": "^0.5.1", "path-is-inside": "^1.0.2", "read-cmd-shim": "^1.0.1", "slide": "^1.1.6" }, "dependencies": { + "aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" + }, "iferr": { "version": "0.1.5", "resolved": "https://registry.npmjs.org/iferr/-/iferr-0.1.5.tgz", @@ -1778,9 +2281,9 @@ } }, "get-caller-file": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.2.tgz", - "integrity": "sha1-9wLmMSfn4jHBYKgMFVSstw1QR+U=" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", + "integrity": "sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==" }, "get-stdin": { "version": "6.0.0", @@ -1789,9 +2292,12 @@ "dev": true }, "get-stream": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", - "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", + "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", + "requires": { + "pump": "^3.0.0" + } }, "getpass": { "version": "0.1.7", @@ -1802,9 +2308,9 @@ } }, "glob": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", - "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", "requires": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -1823,33 +2329,11 @@ } }, "globals": { - "version": "11.5.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.5.0.tgz", - "integrity": "sha512-hYyf+kI8dm3nORsiiXUQigOU62hDLfJ9G01uyGMxhc6BKsircrUhC4uJPQPUSuq2GrTmiiEt7ewxlMdBewfmKQ==", + "version": "11.9.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.9.0.tgz", + "integrity": "sha512-5cJVtyXWH8PiJPVLZzzoIizXx944O4OmRro5MWKx5fT4MgcN7OfaMutPeaTdJCCURwbWdhhcCWcKIffPnmTzBg==", "dev": true }, - "globby": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-5.0.0.tgz", - "integrity": "sha1-69hGZ8oNuzMLmbz8aOrCvFQ3Dg0=", - "dev": true, - "requires": { - "array-union": "^1.0.1", - "arrify": "^1.0.0", - "glob": "^7.0.3", - "object-assign": "^4.0.1", - "pify": "^2.0.0", - "pinkie-promise": "^2.0.0" - }, - "dependencies": { - "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", - "dev": true - } - } - }, "got": { "version": "6.7.1", "resolved": "https://registry.npmjs.org/got/-/got-6.7.1.tgz", @@ -1866,12 +2350,61 @@ "timed-out": "^4.0.0", "unzip-response": "^2.0.1", "url-parse-lax": "^1.0.0" + }, + "dependencies": { + "get-stream": { + "version": "3.0.0", + "resolved": "http://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" + } } }, "graceful-fs": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.15.tgz", - "integrity": "sha512-6uHUhOPEBgQ24HM+r6b/QwWfZq+yiFcipKFrOFiBEnWdy5sdzYoi+pJeQaPI5qOLRFqWmAXUPQNsielzdLoecA==" + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", + "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==" + }, + "growl": { + "version": "1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", + "dev": true + }, + "handlebars": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.1.2.tgz", + "integrity": "sha512-nvfrjqvt9xQ8Z/w0ijewdD/vvWDTOweBUm96NTr66Wfvo1mJenBLwcYmPs3TIBP5ruzYGD7Hx/DaM9RmhroGPw==", + "dev": true, + "requires": { + "neo-async": "^2.6.0", + "optimist": "^0.6.1", + "source-map": "^0.6.1", + "uglify-js": "^3.1.4" + }, + "dependencies": { + "optimist": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", + "dev": true, + "requires": { + "minimist": "~0.0.1", + "wordwrap": "~0.0.2" + } + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, + "wordwrap": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", + "dev": true + } + } }, "har-schema": { "version": "2.0.0", @@ -1888,12 +2421,11 @@ } }, "has": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.1.tgz", - "integrity": "sha1-hGFzP1OLCDfJNh45qauelwTcLyg=", - "dev": true, + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", "requires": { - "function-bind": "^1.0.2" + "function-bind": "^1.1.1" } }, "has-ansi": { @@ -1910,11 +2442,25 @@ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" }, + "has-symbols": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.0.tgz", + "integrity": "sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q=" + }, "has-unicode": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=" }, + "hasha": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hasha/-/hasha-3.0.0.tgz", + "integrity": "sha1-UqMvq4Vp1BymmmH/GiFPjrfIvTk=", + "dev": true, + "requires": { + "is-stream": "^1.0.1" + } + }, "hock": { "version": "0.2.5", "resolved": "https://registry.npmjs.org/hock/-/hock-0.2.5.tgz", @@ -1933,9 +2479,9 @@ } }, "hosted-git-info": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.7.1.tgz", - "integrity": "sha512-7T/BxH19zbcCTa8XkMlbK5lTo1WtgkFi3GvdWEyNuc4Vex7/9Dqbnpsf4JMydcfj9HCg4zUWFTL3Za6lapg5/w==" + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", + "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==" }, "http-cache-semantics": { "version": "3.8.1", @@ -1981,11 +2527,11 @@ } }, "https-proxy-agent": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.1.tgz", - "integrity": "sha512-HPCTS1LW51bcyMYbxUIOO4HEOlQ1/1qRaFWcyxvwaqUS9TY88aoEuHUY33kuAh1YhVVaDQhLZsnPd+XNARWZlQ==", + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz", + "integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==", "requires": { - "agent-base": "^4.1.0", + "agent-base": "^4.3.0", "debug": "^3.1.0" } }, @@ -2011,15 +2557,15 @@ "integrity": "sha512-9AfeLfji44r5TKInjhz3W9DyZI1zR1JAf2hVBMGhddAKPqBsupb89jGfbCTHIGZd6fGZl9WlHdn4AObygyMKwg==" }, "ignore": { - "version": "3.3.8", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.8.tgz", - "integrity": "sha512-pUh+xUQQhQzevjRHHFqqcTy0/dP/kS9I8HSrUydhihjuD09W6ldVWFtIrwhXdUJHis3i2rZNqEHpZH/cbinFbg==", + "version": "3.3.10", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", + "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==", "dev": true }, "ignore-walk": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.1.tgz", - "integrity": "sha512-DTVlMx3IYPe0/JJcYP7Gxg7ttZZu3IInhuEhbchuqneY9wWe5Ojy2mXLBaQFUQmo0AW2r3qG7m1mg86js+gnlQ==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.3.tgz", + "integrity": "sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==", "requires": { "minimatch": "^3.0.4" } @@ -2034,6 +2580,11 @@ "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=" }, + "infer-owner": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/infer-owner/-/infer-owner-1.0.4.tgz", + "integrity": "sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==" + }, "inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", @@ -2044,9 +2595,9 @@ } }, "inherits": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "ini": { "version": "1.3.5", @@ -2096,12 +2647,6 @@ "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", "dev": true }, - "lodash": { - "version": "4.17.10", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.10.tgz", - "integrity": "sha512-UejweD1pDoXu+AD825lWwp4ZGtSwgnpZxb3JDViD7StjQz+Nb/6l093lx4OQ0foGWNRoc19mWy7BzL+UAK2iVg==", - "dev": true - }, "strip-ansi": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", @@ -2114,9 +2659,9 @@ } }, "invert-kv": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-1.0.0.tgz", - "integrity": "sha1-EEqOSqym09jNFXqO+L+rLXo//bY=" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-2.0.0.tgz", + "integrity": "sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA==" }, "ip": { "version": "1.1.5", @@ -2134,19 +2679,10 @@ "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", "dev": true }, - "is-builtin-module": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-1.0.0.tgz", - "integrity": "sha1-VAVy0096wxGfj3bDDLwbHgN6/74=", - "requires": { - "builtin-modules": "^1.0.0" - } - }, "is-callable": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.3.tgz", - "integrity": "sha1-hut1OSgF3cM69xySoO7fdO52BLI=", - "dev": true + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz", + "integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA==" }, "is-ci": { "version": "1.1.0", @@ -2154,21 +2690,27 @@ "integrity": "sha512-c7TnwxLePuqIlxHgr7xtxzycJPegNHFuIrBkwbf8hc58//+Op1CqFkyS+xnIMkwn9UsJIwc174BIjkyBmSpjKg==", "requires": { "ci-info": "^1.0.0" + }, + "dependencies": { + "ci-info": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-1.6.0.tgz", + "integrity": "sha512-vsGdkwSCDpWmP80ncATX7iea5DWQemg1UgCW5J8tqjU3lYw4FBYuj89J0CTVomA7BEfvSZd84GmHko+MxFQU2A==" + } } }, "is-cidr": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/is-cidr/-/is-cidr-2.0.6.tgz", - "integrity": "sha512-A578p1dV22TgPXn6NCaDAPj6vJvYsBgAzUrAd28a4oldeXJjWqEUuSZOLIW3im51mazOKsoyVp8NU/OItlWacw==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-cidr/-/is-cidr-3.0.0.tgz", + "integrity": "sha512-8Xnnbjsb0x462VoYiGlhEi+drY8SFwrHiSYuzc/CEwco55vkehTaxAyIjEdpi3EMvLPPJAJi9FlzP+h+03gp0Q==", "requires": { - "cidr-regex": "^2.0.8" + "cidr-regex": "^2.0.10" } }, "is-date-object": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.1.tgz", - "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=", - "dev": true + "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=" }, "is-fullwidth-code-point": { "version": "1.0.0", @@ -2197,21 +2739,6 @@ "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", "integrity": "sha1-PkcprB9f3gJc19g6iW2rn09n2w8=" }, - "is-path-cwd": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-1.0.0.tgz", - "integrity": "sha1-0iXsIxMuie3Tj9p2dHLmLmXxEG0=", - "dev": true - }, - "is-path-in-cwd": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-path-in-cwd/-/is-path-in-cwd-1.0.1.tgz", - "integrity": "sha512-FjV1RTW48E7CWM7eE/J2NJvAEEVektecDBVBE5Hh3nM1Jd0kvhHtX68Pr3xsDf857xt3Y4AkwVULK1Vku62aaQ==", - "dev": true, - "requires": { - "is-path-inside": "^1.0.0" - } - }, "is-path-inside": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-1.0.1.tgz", @@ -2235,7 +2762,6 @@ "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz", "integrity": "sha1-VRdIm1RwkbCTDglWVM7SXul+lJE=", - "dev": true, "requires": { "has": "^1.0.1" } @@ -2257,11 +2783,13 @@ "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=" }, "is-symbol": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.1.tgz", - "integrity": "sha1-PMWfAAJRlLarLjjbrmaJJWtmBXI=", - "dev": true - }, + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.2.tgz", + "integrity": "sha512-HS8bZ9ox60yCJLH9snBpIwv9pYUAkcuLhSA1oero1UB5y9aiQpRA8y2ex945AOtCZL1lJDeIk3G5LthswI46Lw==", + "requires": { + "has-symbols": "^1.0.0" + } + }, "is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", @@ -2277,21 +2805,142 @@ "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" }, - "isomorphic-fetch": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/isomorphic-fetch/-/isomorphic-fetch-2.2.1.tgz", - "integrity": "sha1-YRrhrPFPXoH3KVB0coGf6XM1WKk=", - "dev": true, - "requires": { - "node-fetch": "^1.0.1", - "whatwg-fetch": ">=0.10.0" - } - }, "isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" }, + "istanbul-lib-coverage": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.5.tgz", + "integrity": "sha512-8aXznuEPCJvGnMSRft4udDRDtb1V3pkQkMMI5LI+6HuQz5oQ4J2UFn1H82raA3qJtyOLkkwVqICBQkjnGtn5mA==", + "dev": true + }, + "istanbul-lib-hook": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-2.0.7.tgz", + "integrity": "sha512-vrRztU9VRRFDyC+aklfLoeXyNdTfga2EI3udDGn4cZ6fpSXpHLV9X6CHvfoMCPtggg8zvDDmC4b9xfu0z6/llA==", + "dev": true, + "requires": { + "append-transform": "^1.0.0" + } + }, + "istanbul-lib-instrument": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-3.3.0.tgz", + "integrity": "sha512-5nnIN4vo5xQZHdXno/YDXJ0G+I3dAm4XgzfSVTPLQpj/zAV2dV6Juy0yaf10/zrJOJeHoN3fraFe+XRq2bFVZA==", + "dev": true, + "requires": { + "@babel/generator": "^7.4.0", + "@babel/parser": "^7.4.3", + "@babel/template": "^7.4.0", + "@babel/traverse": "^7.4.3", + "@babel/types": "^7.4.0", + "istanbul-lib-coverage": "^2.0.5", + "semver": "^6.0.0" + }, + "dependencies": { + "semver": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.1.2.tgz", + "integrity": "sha512-z4PqiCpomGtWj8633oeAdXm1Kn1W++3T8epkZYnwiVgIYIJ0QHszhInYSJTYxebByQH7KVCEAn8R9duzZW2PhQ==", + "dev": true + } + } + }, + "istanbul-lib-report": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-2.0.8.tgz", + "integrity": "sha512-fHBeG573EIihhAblwgxrSenp0Dby6tJMFR/HvlerBsrCTD5bkUuoNtn3gVh29ZCS824cGGBPn7Sg7cNk+2xUsQ==", + "dev": true, + "requires": { + "istanbul-lib-coverage": "^2.0.5", + "make-dir": "^2.1.0", + "supports-color": "^6.1.0" + }, + "dependencies": { + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + } + }, + "pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true + }, + "supports-color": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", + "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + } + } + }, + "istanbul-lib-source-maps": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-3.0.6.tgz", + "integrity": "sha512-R47KzMtDJH6X4/YW9XTx+jrLnZnscW4VpNN+1PViSYTejLVPWv7oov+Duf8YQSPyVRUvueQqz1TcsC6mooZTXw==", + "dev": true, + "requires": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^2.0.5", + "make-dir": "^2.1.0", + "rimraf": "^2.6.3", + "source-map": "^0.6.1" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + } + }, + "pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } + } + }, + "istanbul-reports": { + "version": "2.2.6", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-2.2.6.tgz", + "integrity": "sha512-SKi4rnMyLBKe0Jy2uUdx28h8oG7ph2PPuQPvIAh31d+Ci+lSiEu4C+h3oBPuJ9+mPKhOyW0M8gY4U5NM1WLeXA==", + "dev": true, + "requires": { + "handlebars": "^4.1.2" + } + }, "js-tokens": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-3.0.2.tgz", @@ -2299,9 +2948,9 @@ "dev": true }, "js-yaml": { - "version": "3.11.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.11.0.tgz", - "integrity": "sha512-saJstZWv7oNeOyBh3+Dx1qWzhW0+e6/8eDzo7p5rDFqxntSztloLtuKu+Ejhtq82jsilwOIZYsCz+lIjthg1Hw==", + "version": "3.12.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.12.0.tgz", + "integrity": "sha512-PIt2cnwmPfL4hKNwqeiuz4bKfnzHTBv6HyVgjahA6mPLwPDzjDWrplJBMjHUFxku/N3FlmrbyPclad+I+4mJ3A==", "dev": true, "requires": { "argparse": "^1.0.7", @@ -2314,6 +2963,12 @@ "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", "optional": true }, + "jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "dev": true + }, "json": { "version": "9.0.6", "resolved": "https://registry.npmjs.org/json/-/json-9.0.6.tgz", @@ -2391,11 +3046,11 @@ "integrity": "sha1-hN3Es3Bnm6i9TNz6TAa0PVcREUc=" }, "lcid": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/lcid/-/lcid-1.0.0.tgz", - "integrity": "sha1-MIrMr6C8SDo4Z7S28rlQYlHRuDU=", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/lcid/-/lcid-2.0.0.tgz", + "integrity": "sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA==", "requires": { - "invert-kv": "^1.0.0" + "invert-kv": "^2.0.0" } }, "lcov-parse": { @@ -2415,53 +3070,178 @@ } }, "libcipm": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/libcipm/-/libcipm-2.0.2.tgz", - "integrity": "sha512-9uZ6/LAflVEijksTRq/RX0e+pGA4mr8tND9Cmk2JMg7j2fFUBrs8PpFX2DOAJR/XoxPzz+5h8bkWmtIYLunKAg==", + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/libcipm/-/libcipm-4.0.7.tgz", + "integrity": "sha512-fTq33otU3PNXxxCTCYCYe7V96o59v/o7bvtspmbORXpgFk+wcWrGf5x6tBgui5gCed/45/wtPomBsZBYm5KbIw==", "requires": { "bin-links": "^1.1.2", "bluebird": "^3.5.1", + "figgy-pudding": "^3.5.1", "find-npm-prefix": "^1.0.2", "graceful-fs": "^4.1.11", + "ini": "^1.3.5", "lock-verify": "^2.0.2", "mkdirp": "^0.5.1", - "npm-lifecycle": "^2.0.3", + "npm-lifecycle": "^3.0.0", "npm-logical-tree": "^1.2.1", "npm-package-arg": "^6.1.0", - "pacote": "^8.1.6", - "protoduck": "^5.0.0", + "pacote": "^9.1.0", "read-package-json": "^2.0.13", "rimraf": "^2.6.2", "worker-farm": "^1.6.0" } }, - "libnpmhook": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/libnpmhook/-/libnpmhook-4.0.1.tgz", - "integrity": "sha512-3qqpfqvBD1712WA6iGe0stkG40WwAeoWcujA6BlC0Be1JArQbqwabnEnZ0CRcD05Tf1fPYJYdCbSfcfedEJCOg==", + "libnpm": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/libnpm/-/libnpm-3.0.1.tgz", + "integrity": "sha512-d7jU5ZcMiTfBqTUJVZ3xid44fE5ERBm9vBnmhp2ECD2Ls+FNXWxHSkO7gtvrnbLO78gwPdNPz1HpsF3W4rjkBQ==", "requires": { - "figgy-pudding": "^3.1.0", - "npm-registry-fetch": "^3.0.0" + "bin-links": "^1.1.2", + "bluebird": "^3.5.3", + "find-npm-prefix": "^1.0.2", + "libnpmaccess": "^3.0.2", + "libnpmconfig": "^1.2.1", + "libnpmhook": "^5.0.3", + "libnpmorg": "^1.0.1", + "libnpmpublish": "^1.1.2", + "libnpmsearch": "^2.0.2", + "libnpmteam": "^1.0.2", + "lock-verify": "^2.0.2", + "npm-lifecycle": "^3.0.0", + "npm-logical-tree": "^1.2.1", + "npm-package-arg": "^6.1.0", + "npm-profile": "^4.0.2", + "npm-registry-fetch": "^4.0.0", + "npmlog": "^4.1.2", + "pacote": "^9.5.3", + "read-package-json": "^2.0.13", + "stringify-package": "^1.0.0" + } + }, + "libnpmaccess": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/libnpmaccess/-/libnpmaccess-3.0.2.tgz", + "integrity": "sha512-01512AK7MqByrI2mfC7h5j8N9V4I7MHJuk9buo8Gv+5QgThpOgpjB7sQBDDkeZqRteFb1QM/6YNdHfG7cDvfAQ==", + "requires": { + "aproba": "^2.0.0", + "get-stream": "^4.0.0", + "npm-package-arg": "^6.1.0", + "npm-registry-fetch": "^4.0.0" + } + }, + "libnpmconfig": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/libnpmconfig/-/libnpmconfig-1.2.1.tgz", + "integrity": "sha512-9esX8rTQAHqarx6qeZqmGQKBNZR5OIbl/Ayr0qQDy3oXja2iFVQQI81R6GZ2a02bSNZ9p3YOGX1O6HHCb1X7kA==", + "requires": { + "figgy-pudding": "^3.5.1", + "find-up": "^3.0.0", + "ini": "^1.3.5" }, "dependencies": { - "npm-registry-fetch": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-3.1.1.tgz", - "integrity": "sha512-xBobENeenvjIG8PgQ1dy77AXTI25IbYhmA3DusMIfw/4EL5BaQ5e1V9trkPrqHvyjR3/T0cnH6o0Wt/IzcI5Ag==", + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz", + "integrity": "sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ==", + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", "requires": { - "bluebird": "^3.5.1", - "figgy-pudding": "^3.1.0", - "lru-cache": "^4.1.2", - "make-fetch-happen": "^4.0.0", - "npm-package-arg": "^6.0.0" + "p-limit": "^2.0.0" } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" } } }, + "libnpmhook": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/libnpmhook/-/libnpmhook-5.0.3.tgz", + "integrity": "sha512-UdNLMuefVZra/wbnBXECZPefHMGsVDTq5zaM/LgKNE9Keyl5YXQTnGAzEo+nFOpdRqTWI9LYi4ApqF9uVCCtuA==", + "requires": { + "aproba": "^2.0.0", + "figgy-pudding": "^3.4.1", + "get-stream": "^4.0.0", + "npm-registry-fetch": "^4.0.0" + } + }, + "libnpmorg": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/libnpmorg/-/libnpmorg-1.0.1.tgz", + "integrity": "sha512-0sRUXLh+PLBgZmARvthhYXQAWn0fOsa6T5l3JSe2n9vKG/lCVK4nuG7pDsa7uMq+uTt2epdPK+a2g6btcY11Ww==", + "requires": { + "aproba": "^2.0.0", + "figgy-pudding": "^3.4.1", + "get-stream": "^4.0.0", + "npm-registry-fetch": "^4.0.0" + } + }, + "libnpmpublish": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/libnpmpublish/-/libnpmpublish-1.1.2.tgz", + "integrity": "sha512-2yIwaXrhTTcF7bkJKIKmaCV9wZOALf/gsTDxVSu/Gu/6wiG3fA8ce8YKstiWKTxSFNC0R7isPUb6tXTVFZHt2g==", + "requires": { + "aproba": "^2.0.0", + "figgy-pudding": "^3.5.1", + "get-stream": "^4.0.0", + "lodash.clonedeep": "^4.5.0", + "normalize-package-data": "^2.4.0", + "npm-package-arg": "^6.1.0", + "npm-registry-fetch": "^4.0.0", + "semver": "^5.5.1", + "ssri": "^6.0.1" + } + }, + "libnpmsearch": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/libnpmsearch/-/libnpmsearch-2.0.2.tgz", + "integrity": "sha512-VTBbV55Q6fRzTdzziYCr64+f8AopQ1YZ+BdPOv16UegIEaE8C0Kch01wo4s3kRTFV64P121WZJwgmBwrq68zYg==", + "requires": { + "figgy-pudding": "^3.5.1", + "get-stream": "^4.0.0", + "npm-registry-fetch": "^4.0.0" + } + }, + "libnpmteam": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/libnpmteam/-/libnpmteam-1.0.2.tgz", + "integrity": "sha512-p420vM28Us04NAcg1rzgGW63LMM6rwe+6rtZpfDxCcXxM0zUTLl7nPFEnRF3JfFBF5skF/yuZDUthTsHgde8QA==", + "requires": { + "aproba": "^2.0.0", + "figgy-pudding": "^3.4.1", + "get-stream": "^4.0.0", + "npm-registry-fetch": "^4.0.0" + } + }, "libnpx": { - "version": "10.2.0", - "resolved": "https://registry.npmjs.org/libnpx/-/libnpx-10.2.0.tgz", - "integrity": "sha512-X28coei8/XRCt15cYStbLBph+KGhFra4VQhRBPuH/HHMkC5dxM8v24RVgUsvODKCrUZ0eTgiTqJp6zbl0sskQQ==", + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/libnpx/-/libnpx-10.2.2.tgz", + "integrity": "sha512-ujaYToga1SAX5r7FU5ShMFi88CWpY75meNZtr6RtEyv4l2ZK3+Wgvxq2IqlwWBiDZOqhumdeiocPS1aKrCMe3A==", "requires": { "dotenv": "^5.0.1", "npm-package-arg": "^6.0.0", @@ -2474,20 +3254,34 @@ } }, "licensee": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/licensee/-/licensee-5.0.0.tgz", - "integrity": "sha512-g243BLsJYWWyNhwDEMewc2f6Ebyy1yiJmMDgO8MCQtLOXA8JO4O2/kbJ1QwwWdlmrwDgYECdl9CJBrKsr4ZezA==", + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/licensee/-/licensee-7.0.3.tgz", + "integrity": "sha512-Rdwk+y+nOzkTKz68Gj0OOIy7ISkw+JJzSpn37hfHOXT6swkGfnKEt9+jRolWIFONiIhwv+W5E8K/pnXBfWLewQ==", "dev": true, "requires": { + "@blueoak/list": "^1.0.2", + "correct-license-metadata": "^1.0.1", "docopt": "^0.6.2", - "fs-access": "^1.0.0", + "fs-access": "^2.0.0", + "has": "^1.0.3", "json-parse-errback": "^2.0.1", + "npm-license-corrections": "^1.0.0", "read-package-tree": "^5.2.1", "run-parallel": "^1.1.9", - "semver": "^5.5.0", + "semver": "^6.1.1", "simple-concat": "^1.0.0", - "spdx-expression-validate": "^1.0.1", - "spdx-satisfies": "^4.0.0" + "spdx-expression-parse": "^3.0.0", + "spdx-expression-validate": "^2.0.0", + "spdx-osi": "^3.0.0", + "spdx-whitelisted": "^1.0.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + } } }, "load-json-file": { @@ -2520,11 +3314,11 @@ } }, "lock-verify": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/lock-verify/-/lock-verify-2.0.2.tgz", - "integrity": "sha512-QNVwK0EGZBS4R3YQ7F1Ox8p41Po9VGl2QG/2GsuvTbkJZYSsPeWHKMbbH6iZMCHWSMww5nrJroZYnGzI4cePuw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/lock-verify/-/lock-verify-2.1.0.tgz", + "integrity": "sha512-vcLpxnGvrqisKvLQ2C2v0/u7LVly17ak2YSgoK4PrdsYBXQIax19vhKiLfvKNFx7FRrpTnitrpzF/uuCMuorIg==", "requires": { - "npm-package-arg": "^5.1.2 || 6", + "npm-package-arg": "^6.1.0", "semver": "^5.4.1" } }, @@ -2536,6 +3330,12 @@ "signal-exit": "^3.0.2" } }, + "lodash": { + "version": "4.17.11", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz", + "integrity": "sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg==", + "dev": true + }, "lodash._baseindexof": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/lodash._baseindexof/-/lodash._baseindexof-3.1.0.tgz", @@ -2588,10 +3388,10 @@ "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", "integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=" }, - "lodash.isempty": { + "lodash.flattendeep": { "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz", - "integrity": "sha1-b4bL7di+TsmHvpqvM8loTbGzHn4=", + "resolved": "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz", + "integrity": "sha1-+wMJF/hqMTTlvJvsDWngAT3f7bI=", "dev": true }, "lodash.restparam": { @@ -2621,12 +3421,12 @@ "dev": true }, "loose-envify": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.3.1.tgz", - "integrity": "sha1-0aitM/qc4OcT1l/dCsi3SNR4yEg=", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", "dev": true, "requires": { - "js-tokens": "^3.0.0" + "js-tokens": "^3.0.0 || ^4.0.0" } }, "lowercase-keys": { @@ -2635,12 +3435,11 @@ "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==" }, "lru-cache": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.3.tgz", - "integrity": "sha512-fFEhvcgzuIoJVUF8fYr5KR0YqxD238zgObTps31YdADwPPAp82a4M8TrckkWyx7ekNlf9aBcVn81cFwwXngrJA==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "requires": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" + "yallist": "^3.0.2" } }, "make-dir": { @@ -2651,17 +3450,23 @@ "pify": "^3.0.0" } }, + "make-error": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.5.tgz", + "integrity": "sha512-c3sIjNUow0+8swNwVpqoH4YCShKNFkMaw6oH1mNS2haDZQqkeZFlHS3dhoeEbKKmJB4vXpJucU6oH75aDYeE9g==", + "dev": true + }, "make-fetch-happen": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-4.0.1.tgz", - "integrity": "sha512-7R5ivfy9ilRJ1EMKIOziwrns9fGeAD4bAha8EB7BIiBBLHm2KeTUGCrICFt2rbHfzheTLynv50GnNTK1zDTrcQ==", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-5.0.2.tgz", + "integrity": "sha512-07JHC0r1ykIoruKO8ifMXu+xEU8qOXDFETylktdug6vJDACnP+HKevOu3PXyNPzFyTSlz8vrBYlBO1JZRe8Cag==", "requires": { "agentkeepalive": "^3.4.1", - "cacache": "^11.0.1", + "cacache": "^12.0.0", "http-cache-semantics": "^3.8.1", "http-proxy-agent": "^2.1.0", - "https-proxy-agent": "^2.2.1", - "lru-cache": "^4.1.2", + "https-proxy-agent": "^2.2.3", + "lru-cache": "^5.1.1", "mississippi": "^3.0.0", "node-fetch-npm": "^2.0.2", "promise-retry": "^1.1.1", @@ -2669,28 +3474,25 @@ "ssri": "^6.0.0" } }, + "map-age-cleaner": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz", + "integrity": "sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w==", + "requires": { + "p-defer": "^1.0.0" + } + }, "marked": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/marked/-/marked-0.5.0.tgz", - "integrity": "sha512-UhjmkCWKu1SS/BIePL2a59BMJ7V42EYtTfksodPRXzPEGEph3Inp5dylseqt+KbU9Jglsx8xcMKmlumfJMBXAA==", + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/marked/-/marked-0.6.3.tgz", + "integrity": "sha512-Fqa7eq+UaxfMriqzYLayfqAE40WN03jf+zHjT18/uXNuzjq3TY0XTbrAoPeqSJrAmPz11VuUA+kBPYOhHt9oOQ==", "dev": true }, "marked-man": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/marked-man/-/marked-man-0.2.1.tgz", - "integrity": "sha1-8lknFIHeO1ByY0ifUiG3xaz9I4M=", - "dev": true, - "requires": { - "marked": "^0.3.2" - }, - "dependencies": { - "marked": { - "version": "0.3.19", - "resolved": "https://registry.npmjs.org/marked/-/marked-0.3.19.tgz", - "integrity": "sha512-ea2eGWOqNxPcXv8dyERdSr/6FmzvWwzjMxpfGB/sbMccXoct+xY+YukPD+QTUZwyvK7BZwcr4m21WBOW41pAkg==", - "dev": true - } - } + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/marked-man/-/marked-man-0.6.0.tgz", + "integrity": "sha512-lqzGe2uPo0KPO5J5yyfZ6PbEpZS6VIMHIzltXNzdwTNBysD0pNLtTaGrtT5R4aFT8UaVR3Fuz9rN3SEFYnea5Q==", + "dev": true }, "meant": { "version": "1.0.1", @@ -2698,11 +3500,37 @@ "integrity": "sha512-UakVLFjKkbbUwNWJ2frVLnnAtbb7D7DsloxRd3s/gDpI8rdv8W5Hp3NaDb+POBI1fQdeussER6NB8vpcRURvlg==" }, "mem": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/mem/-/mem-4.3.0.tgz", + "integrity": "sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w==", + "requires": { + "map-age-cleaner": "^0.1.1", + "mimic-fn": "^2.0.0", + "p-is-promise": "^2.0.0" + }, + "dependencies": { + "mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==" + } + } + }, + "merge-source-map": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/mem/-/mem-1.1.0.tgz", - "integrity": "sha1-Xt1StIXKHZAP5kiVUFOZoN+kX3Y=", + "resolved": "https://registry.npmjs.org/merge-source-map/-/merge-source-map-1.1.0.tgz", + "integrity": "sha512-Qkcp7P2ygktpMPh2mCQZaf3jhN6D3Z/qVZHSdWvQ+2Ef5HgRAPBO57A77+ENm0CPx2+1Ce/MYKi3ymqdfuqibw==", + "dev": true, "requires": { - "mimic-fn": "^1.0.0" + "source-map": "^0.6.1" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } } }, "mime-db": { @@ -2721,7 +3549,8 @@ "mimic-fn": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", - "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==" + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", + "dev": true }, "minimatch": { "version": "3.0.4", @@ -2740,6 +3569,7 @@ "version": "2.3.3", "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.3.3.tgz", "integrity": "sha512-/jAn9/tEX4gnpyRATxgHEOV6xbcyxgT7iUnxo9Y3+OB0zX00TgKIv/2FZCf5brBbICcwbLqVv2ImjvWWrQMSYw==", + "dev": true, "requires": { "safe-buffer": "^5.1.2", "yallist": "^3.0.0" @@ -2748,16 +3578,28 @@ "yallist": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.2.tgz", - "integrity": "sha1-hFK0u36Dx8GI2AQcGoN8dz1ti7k=" + "integrity": "sha1-hFK0u36Dx8GI2AQcGoN8dz1ti7k=", + "dev": true } } }, "minizlib": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.1.1.tgz", - "integrity": "sha512-TrfjCjk4jLhcJyGMYymBH6oTXcWjYbUAXTHDbtnWHjZC25h0cdajHuPE1zxb4DVmu8crfh+HwH/WMuyLG0nHBg==", + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.3.3.tgz", + "integrity": "sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==", "requires": { - "minipass": "^2.2.1" + "minipass": "^2.9.0" + }, + "dependencies": { + "minipass": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz", + "integrity": "sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==", + "requires": { + "safe-buffer": "^5.1.2", + "yallist": "^3.0.0" + } + } } }, "mississippi": { @@ -2796,6 +3638,13 @@ "mkdirp": "^0.5.1", "rimraf": "^2.5.4", "run-queue": "^1.0.3" + }, + "dependencies": { + "aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" + } } }, "ms": { @@ -2809,33 +3658,26 @@ "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=" }, "nano": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/nano/-/nano-7.0.0.tgz", - "integrity": "sha512-zR1jRRfpG/lcFjYnGGxabABLFRtFX1E7YqWIJzvC0dLRJ9NTxodJC4MzVifhriMT9yhulsOf8k2UNOX8fMULAg==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/nano/-/nano-8.1.0.tgz", + "integrity": "sha512-suMHW9XtTP8doR4FnId5+6ZfbAvDIZOAVp3qe7zTHXp7BvT/Cf4G9xBjXAthrIzoa+fkcionEr9xo8cZtvqMmg==", "dev": true, "requires": { "@types/request": "^2.47.1", - "cloudant-follow": "~0.17.0", - "debug": "^2.2.0", + "cloudant-follow": "^0.18.1", + "debug": "^4.1.1", "errs": "^0.3.2", - "lodash.isempty": "^4.4.0", - "request": "^2.85.0" + "request": "^2.88.0" }, "dependencies": { "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", "dev": true, "requires": { - "ms": "2.0.0" + "ms": "^2.1.1" } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true } } }, @@ -2845,15 +3687,22 @@ "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", "dev": true }, - "node-fetch": { - "version": "1.7.3", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-1.7.3.tgz", - "integrity": "sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ==", - "dev": true, - "requires": { - "encoding": "^0.1.11", - "is-stream": "^1.0.1" - } + "neo-async": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.1.tgz", + "integrity": "sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw==", + "dev": true + }, + "nested-error-stacks": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/nested-error-stacks/-/nested-error-stacks-2.1.0.tgz", + "integrity": "sha512-AO81vsIO1k1sM4Zrd6Hu7regmJN1NSiAja10gc4bX3F0wd+9rQmcuHQaHVQCYIEC8iFXnE+mavh23GOt7wBgug==", + "dev": true + }, + "nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==" }, "node-fetch-npm": { "version": "2.0.2", @@ -2866,47 +3715,21 @@ } }, "node-gyp": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-3.8.0.tgz", - "integrity": "sha512-3g8lYefrRRzvGeSowdJKAKyks8oUpLEd/DyPV4eMhVlhJ0aNaZqIrNUIPuEWWTAoPqyFkfGrM67MC69baqn6vA==", + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-5.0.7.tgz", + "integrity": "sha512-K8aByl8OJD51V0VbUURTKsmdswkQQusIvlvmTyhHlIT1hBvaSxzdxpSle857XuXa7uc02UEZx9OR5aDxSWS5Qw==", "requires": { - "fstream": "^1.0.0", - "glob": "^7.0.3", - "graceful-fs": "^4.1.2", - "mkdirp": "^0.5.0", - "nopt": "2 || 3", - "npmlog": "0 || 1 || 2 || 3 || 4", - "osenv": "0", - "request": "^2.87.0", - "rimraf": "2", - "semver": "~5.3.0", - "tar": "^2.0.0", - "which": "1" - }, - "dependencies": { - "nopt": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-3.0.6.tgz", - "integrity": "sha1-xkZdvwirzU2zWTF/eaxopkayj/k=", - "requires": { - "abbrev": "1" - } - }, - "semver": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz", - "integrity": "sha1-myzl094C0XxgEq0yaqa00M9U+U8=" - }, - "tar": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-2.2.1.tgz", - "integrity": "sha1-jk0qJWwOIYXGsYrWlK7JaLg8sdE=", - "requires": { - "block-stream": "*", - "fstream": "^1.0.2", - "inherits": "2" - } - } + "env-paths": "^2.2.0", + "glob": "^7.1.4", + "graceful-fs": "^4.2.2", + "mkdirp": "^0.5.1", + "nopt": "^4.0.1", + "npmlog": "^4.1.2", + "request": "^2.88.0", + "rimraf": "^2.6.3", + "semver": "^5.7.1", + "tar": "^4.4.12", + "which": "^1.3.1" } }, "nopt": { @@ -2919,29 +3742,42 @@ } }, "normalize-package-data": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.4.0.tgz", - "integrity": "sha512-9jjUFbTPfEy3R/ad/2oNbKtW9Hgovl5O1FvFWKkKblNXoN/Oou6+9+KKohPK13Yc3/TyunyWhJp6gvRNR/PPAw==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", "requires": { "hosted-git-info": "^2.1.4", - "is-builtin-module": "^1.0.0", + "resolve": "^1.10.0", "semver": "2 || 3 || 4 || 5", "validate-npm-package-license": "^3.0.1" + }, + "dependencies": { + "resolve": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.10.0.tgz", + "integrity": "sha512-3sUr9aq5OfSg2S9pNtPA9hL1FVEAjvfOC4leW0SNf/mpnaakz2a9femSd6LqAww2RaFctwyf1lCqnTHuF1rxDg==", + "requires": { + "path-parse": "^1.0.6" + } + } } }, "npm-audit-report": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/npm-audit-report/-/npm-audit-report-1.3.1.tgz", - "integrity": "sha512-SjTF8ZP4rOu3JiFrTMi4M1CmVo2tni2sP4TzhyCMHwnMGf6XkdGLZKt9cdZ12esKf0mbQqFyU9LtY0SoeahL7g==", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/npm-audit-report/-/npm-audit-report-1.3.2.tgz", + "integrity": "sha512-abeqS5ONyXNaZJPGAf6TOUMNdSe1Y6cpc9MLBRn+CuUoYbfdca6AxOyXVlfIv9OgKX+cacblbG5w7A6ccwoTPw==", "requires": { "cli-table3": "^0.5.0", "console-control-strings": "^1.1.0" } }, "npm-bundled": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.0.5.tgz", - "integrity": "sha512-m/e6jgWu8/v5niCUKQi9qQl8QdeEduFA96xHDDzFGqly0OOjI7c+60KM/2sppfnUU9JJagf+zs+yGhqSOFj71g==" + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.1.1.tgz", + "integrity": "sha512-gqkfgGePhTpAEgUsGEgcq1rqPXA+tv/aVBlgEzfXwA1yiUJF7xtEt3CtVwOjNYQOVknDk0F20w58Fnm3EtG0fA==", + "requires": { + "npm-normalize-package-bin": "^1.0.1" + } }, "npm-cache-filename": { "version": "1.0.2", @@ -2949,21 +3785,27 @@ "integrity": "sha1-3tMGxbC/yHCp6fr4I7xfKD4FrhE=" }, "npm-install-checks": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-3.0.0.tgz", - "integrity": "sha1-1K7N/VGlPjcjt7L5Oy7ijjB7wNc=", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-3.0.2.tgz", + "integrity": "sha512-E4kzkyZDIWoin6uT5howP8VDvkM+E8IQDcHAycaAxMbwkqhIg5eEYALnXOl3Hq9MrkdQB/2/g1xwBINXdKSRkg==", "requires": { "semver": "^2.3.0 || 3.x || 4 || 5" } }, - "npm-lifecycle": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/npm-lifecycle/-/npm-lifecycle-2.1.0.tgz", - "integrity": "sha512-QbBfLlGBKsktwBZLj6AviHC6Q9Y3R/AY4a2PYSIRhSKSS0/CxRyD/PfxEX6tPeOCXQgMSNdwGeECacstgptc+g==", + "npm-license-corrections": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/npm-license-corrections/-/npm-license-corrections-1.3.0.tgz", + "integrity": "sha512-YnHOE4v9LCzRLC5IIsaatKjmqcGh10ksuUYTlU/v6DD5GzxHWCPZWJ+jqZquxxRDFKa7Jka61OD5hDbI1Iq7Ww==", + "dev": true + }, + "npm-lifecycle": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/npm-lifecycle/-/npm-lifecycle-3.1.4.tgz", + "integrity": "sha512-tgs1PaucZwkxECGKhC/stbEgFyc3TGh2TJcg2CDr6jbvQRdteHNhmMeljRzpe4wgFAXQADoy1cSqqi7mtiAa5A==", "requires": { "byline": "^5.0.0", - "graceful-fs": "^4.1.11", - "node-gyp": "^3.8.0", + "graceful-fs": "^4.1.15", + "node-gyp": "^5.0.2", "resolve-from": "^4.0.0", "slide": "^1.1.6", "uid-number": "0.0.6", @@ -2976,82 +3818,56 @@ "resolved": "https://registry.npmjs.org/npm-logical-tree/-/npm-logical-tree-1.2.1.tgz", "integrity": "sha512-AJI/qxDB2PWI4LG1CYN579AY1vCiNyWfkiquCsJWqntRu/WwimVrC8yXeILBFHDwxfOejxewlmnvW9XXjMlYIg==" }, + "npm-normalize-package-bin": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz", + "integrity": "sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==" + }, "npm-package-arg": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-6.1.0.tgz", - "integrity": "sha512-zYbhP2k9DbJhA0Z3HKUePUgdB1x7MfIfKssC+WLPFMKTBZKpZh5m13PgexJjCq6KW7j17r0jHWcCpxEqnnncSA==", + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-6.1.1.tgz", + "integrity": "sha512-qBpssaL3IOZWi5vEKUKW0cO7kzLeT+EQO9W8RsLOZf76KF9E/K9+wH0C7t06HXPpaH8WH5xF1MExLuCwbTqRUg==", "requires": { - "hosted-git-info": "^2.6.0", + "hosted-git-info": "^2.7.1", "osenv": "^0.1.5", - "semver": "^5.5.0", + "semver": "^5.6.0", "validate-npm-package-name": "^3.0.0" } }, "npm-packlist": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.1.12.tgz", - "integrity": "sha512-WJKFOVMeAlsU/pjXuqVdzU0WfgtIBCupkEVwn+1Y0ERAbUfWw8R4GjgVbaKnUjRoD2FoQbHOCbOyT5Mbs9Lw4g==", + "version": "1.4.8", + "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.4.8.tgz", + "integrity": "sha512-5+AZgwru5IevF5ZdnFglB5wNlHG1AOOuw28WhUq8/8emhBmLv6jX5by4WJCh7lW0uSYZYS6DXqIsyZVIXRZU9A==", "requires": { "ignore-walk": "^3.0.1", - "npm-bundled": "^1.0.1" + "npm-bundled": "^1.0.1", + "npm-normalize-package-bin": "^1.0.1" } }, "npm-pick-manifest": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-2.1.0.tgz", - "integrity": "sha512-q9zLP8cTr8xKPmMZN3naxp1k/NxVFsjxN6uWuO1tiw9gxg7wZWQ/b5UTfzD0ANw2q1lQxdLKTeCCksq+bPSgbQ==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-3.0.2.tgz", + "integrity": "sha512-wNprTNg+X5nf+tDi+hbjdHhM4bX+mKqv6XmPh7B5eG+QY9VARfQPfCEH013H5GqfNj6ee8Ij2fg8yk0mzps1Vw==", "requires": { + "figgy-pudding": "^3.5.1", "npm-package-arg": "^6.0.0", "semver": "^5.4.1" } }, "npm-profile": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-3.0.2.tgz", - "integrity": "sha512-rEJOFR6PbwOvvhGa2YTNOJQKNuc6RovJ6T50xPU7pS9h/zKPNCJ+VHZY2OFXyZvEi+UQYtHRTp8O/YM3tUD20A==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-4.0.4.tgz", + "integrity": "sha512-Ta8xq8TLMpqssF0H60BXS1A90iMoM6GeKwsmravJ6wYjWwSzcYBTdyWa3DZCYqPutacBMEm7cxiOkiIeCUAHDQ==", "requires": { "aproba": "^1.1.2 || 2", - "make-fetch-happen": "^2.5.0 || 3 || 4" - } - }, - "npm-registry-client": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/npm-registry-client/-/npm-registry-client-8.6.0.tgz", - "integrity": "sha512-Qs6P6nnopig+Y8gbzpeN/dkt+n7IyVd8f45NTMotGk6Qo7GfBmzwYx6jRLoOOgKiMnaQfYxsuyQlD8Mc3guBhg==", - "requires": { - "concat-stream": "^1.5.2", - "graceful-fs": "^4.1.6", - "normalize-package-data": "~1.0.1 || ^2.0.0", - "npm-package-arg": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0", - "npmlog": "2 || ^3.1.0 || ^4.0.0", - "once": "^1.3.3", - "request": "^2.74.0", - "retry": "^0.10.0", - "safe-buffer": "^5.1.1", - "semver": "2 >=2.2.1 || 3.x || 4 || 5", - "slide": "^1.1.3", - "ssri": "^5.2.4" - }, - "dependencies": { - "retry": { - "version": "0.10.1", - "resolved": "https://registry.npmjs.org/retry/-/retry-0.10.1.tgz", - "integrity": "sha1-52OI0heZLCUnUCQdPTlW/tmNj/Q=" - }, - "ssri": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/ssri/-/ssri-5.3.0.tgz", - "integrity": "sha512-XRSIPqLij52MtgoQavH/x/dU1qVKtWUAAZeOHsR9c2Ddi4XerFy3mc1alf+dLJKl9EUIm/Ht+EowFkTUOA6GAQ==", - "requires": { - "safe-buffer": "^5.1.1" - } - } + "figgy-pudding": "^3.4.1", + "npm-registry-fetch": "^4.0.0" } }, "npm-registry-couchapp": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/npm-registry-couchapp/-/npm-registry-couchapp-2.7.1.tgz", - "integrity": "sha512-NEU0/Sk0RglsfrnftOpuvmTIq0g0GwjGqXEBD7QkwKZRnMlw06/UxfBQuDaRD5nFxEgnOu150aP6sW/qdyi60w==", + "version": "2.7.3", + "resolved": "https://registry.npmjs.org/npm-registry-couchapp/-/npm-registry-couchapp-2.7.3.tgz", + "integrity": "sha512-Gdp4WHF6pl4Qke4CCuV34AiDzkNk6tOWAddR28LC2BG0AooTO6AY7wbWU+l/2zHwX7+sKWJFqzMlp6j7qI+SUw==", "dev": true, "requires": { "couchapp": "~0.11.0", @@ -3068,130 +3884,35 @@ } }, "npm-registry-fetch": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-1.1.0.tgz", - "integrity": "sha512-XJPIBfMtgaooRtZmuA42xCeLf3tkxdIX0xqRsGWwNrcVvJ9UYFccD7Ho7QWCzvkM3i/QrkUC37Hu0a+vDBmt5g==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-4.0.3.tgz", + "integrity": "sha512-WGvUx0lkKFhu9MbiGFuT9nG2NpfQ+4dCJwRwwtK2HK5izJEvwDxMeUyqbuMS7N/OkpVCqDorV6rO5E4V9F8lJw==", "requires": { + "JSONStream": "^1.3.4", "bluebird": "^3.5.1", - "figgy-pudding": "^2.0.1", - "lru-cache": "^4.1.2", - "make-fetch-happen": "^3.0.0", - "npm-package-arg": "^6.0.0", - "safe-buffer": "^5.1.1" + "figgy-pudding": "^3.4.1", + "lru-cache": "^5.1.1", + "make-fetch-happen": "^5.0.0", + "npm-package-arg": "^6.1.0", + "safe-buffer": "^5.2.0" }, "dependencies": { - "cacache": { - "version": "10.0.4", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-10.0.4.tgz", - "integrity": "sha512-Dph0MzuH+rTQzGPNT9fAnrPmMmjKfST6trxJeK7NQuHRaVw24VzPRWTmg9MpcwOVQZO0E1FBICUlFeNaKPIfHA==", - "requires": { - "bluebird": "^3.5.1", - "chownr": "^1.0.1", - "glob": "^7.1.2", - "graceful-fs": "^4.1.11", - "lru-cache": "^4.1.1", - "mississippi": "^2.0.0", - "mkdirp": "^0.5.1", - "move-concurrently": "^1.0.1", - "promise-inflight": "^1.0.1", - "rimraf": "^2.6.2", - "ssri": "^5.2.4", - "unique-filename": "^1.1.0", - "y18n": "^4.0.0" - }, - "dependencies": { - "mississippi": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mississippi/-/mississippi-2.0.0.tgz", - "integrity": "sha512-zHo8v+otD1J10j/tC+VNoGK9keCuByhKovAvdn74dmxJl9+mWHnx6EMsDN4lgRoMI/eYo2nchAxniIbUPb5onw==", - "requires": { - "concat-stream": "^1.5.0", - "duplexify": "^3.4.2", - "end-of-stream": "^1.1.0", - "flush-write-stream": "^1.0.0", - "from2": "^2.1.0", - "parallel-transform": "^1.1.0", - "pump": "^2.0.1", - "pumpify": "^1.3.3", - "stream-each": "^1.1.0", - "through2": "^2.0.0" - } - } - } - }, - "figgy-pudding": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/figgy-pudding/-/figgy-pudding-2.0.1.tgz", - "integrity": "sha512-yIJPhIBi/oFdU/P+GSXjmk/rmGjuZkm7A5LTXZxNrEprXJXRK012FiI1BR1Pga+0d/d6taWWD+B5d2ozqaxHig==" - }, - "make-fetch-happen": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-3.0.0.tgz", - "integrity": "sha512-FmWY7gC0mL6Z4N86vE14+m719JKE4H0A+pyiOH18B025gF/C113pyfb4gHDDYP5cqnRMHOz06JGdmffC/SES+w==", - "requires": { - "agentkeepalive": "^3.4.1", - "cacache": "^10.0.4", - "http-cache-semantics": "^3.8.1", - "http-proxy-agent": "^2.1.0", - "https-proxy-agent": "^2.2.0", - "lru-cache": "^4.1.2", - "mississippi": "^3.0.0", - "node-fetch-npm": "^2.0.2", - "promise-retry": "^1.1.1", - "socks-proxy-agent": "^3.0.1", - "ssri": "^5.2.4" - } - }, - "pump": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/pump/-/pump-2.0.1.tgz", - "integrity": "sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==", - "requires": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, - "smart-buffer": { - "version": "1.1.15", - "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-1.1.15.tgz", - "integrity": "sha1-fxFLW2X6s+KjWqd1uxLw0cZJvxY=" - }, - "socks": { - "version": "1.1.10", - "resolved": "https://registry.npmjs.org/socks/-/socks-1.1.10.tgz", - "integrity": "sha1-W4t/x8jzQcU+0FbpKbe/Tei6e1o=", - "requires": { - "ip": "^1.1.4", - "smart-buffer": "^1.0.13" - } - }, - "socks-proxy-agent": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-3.0.1.tgz", - "integrity": "sha512-ZwEDymm204mTzvdqyUqOdovVr2YRd2NYskrYrF2LXyZ9qDiMAoFESGK8CRphiO7rtbo2Y757k2Nia3x2hGtalA==", - "requires": { - "agent-base": "^4.1.0", - "socks": "^1.1.10" - } - }, - "ssri": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/ssri/-/ssri-5.3.0.tgz", - "integrity": "sha512-XRSIPqLij52MtgoQavH/x/dU1qVKtWUAAZeOHsR9c2Ddi4XerFy3mc1alf+dLJKl9EUIm/Ht+EowFkTUOA6GAQ==", - "requires": { - "safe-buffer": "^5.1.1" - } + "safe-buffer": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz", + "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==" } } }, "npm-registry-mock": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/npm-registry-mock/-/npm-registry-mock-1.1.0.tgz", - "integrity": "sha1-bkKiQixLK9AqaChfVr5wIiDGRMw=", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/npm-registry-mock/-/npm-registry-mock-1.3.0.tgz", + "integrity": "sha512-xjue2BSruEF8NM5IjFNq6w9qjZObR1XqqNe0aqM865wtqOmzXtt2yALsyEEJc5cuwCeHYrTHIjNByWeAypewsg==", "dev": true, "requires": { "hock": "~0.2.5", - "util-extend": "~1.0.1" + "readdir-scoped-modules": "^1.1.0", + "util-extend": "^1.0.3" } }, "npm-run-path": { @@ -3230,2631 +3951,277 @@ "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=" }, "nyc": { - "version": "11.9.0", - "resolved": "https://registry.npmjs.org/nyc/-/nyc-11.9.0.tgz", - "integrity": "sha512-w8OdJAhXL5izerzZMdqzYKMj/pgHJyY3qEPYBjLLxrhcVoHEY9pU5ENIiZyCgG9OR7x3VcUMoD40o6PtVpfR4g==", + "version": "14.1.1", + "resolved": "https://registry.npmjs.org/nyc/-/nyc-14.1.1.tgz", + "integrity": "sha512-OI0vm6ZGUnoGZv/tLdZ2esSVzDwUC88SNs+6JoSOMVxA+gKMB8Tk7jBwgemLx4O40lhhvZCVw1C+OYLOBOPXWw==", "dev": true, "requires": { "archy": "^1.0.0", - "arrify": "^1.0.1", - "caching-transform": "^1.0.0", - "convert-source-map": "^1.5.1", - "debug-log": "^1.0.1", - "default-require-extensions": "^1.0.0", - "find-cache-dir": "^0.1.1", - "find-up": "^2.1.0", - "foreground-child": "^1.5.3", - "glob": "^7.0.6", - "istanbul-lib-coverage": "^1.1.2", - "istanbul-lib-hook": "^1.1.0", - "istanbul-lib-instrument": "^1.10.0", - "istanbul-lib-report": "^1.1.3", - "istanbul-lib-source-maps": "^1.2.3", - "istanbul-reports": "^1.4.0", - "md5-hex": "^1.2.0", + "caching-transform": "^3.0.2", + "convert-source-map": "^1.6.0", + "cp-file": "^6.2.0", + "find-cache-dir": "^2.1.0", + "find-up": "^3.0.0", + "foreground-child": "^1.5.6", + "glob": "^7.1.3", + "istanbul-lib-coverage": "^2.0.5", + "istanbul-lib-hook": "^2.0.7", + "istanbul-lib-instrument": "^3.3.0", + "istanbul-lib-report": "^2.0.8", + "istanbul-lib-source-maps": "^3.0.6", + "istanbul-reports": "^2.2.4", + "js-yaml": "^3.13.1", + "make-dir": "^2.1.0", "merge-source-map": "^1.1.0", - "micromatch": "^3.1.10", - "mkdirp": "^0.5.0", - "resolve-from": "^2.0.0", - "rimraf": "^2.6.2", - "signal-exit": "^3.0.1", + "resolve-from": "^4.0.0", + "rimraf": "^2.6.3", + "signal-exit": "^3.0.2", "spawn-wrap": "^1.4.2", - "test-exclude": "^4.2.0", - "yargs": "11.1.0", - "yargs-parser": "^8.0.0" + "test-exclude": "^5.2.3", + "uuid": "^3.3.2", + "yargs": "^13.2.2", + "yargs-parser": "^13.0.0" }, "dependencies": { - "align-text": { - "version": "0.1.4", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^3.0.2", - "longest": "^1.0.1", - "repeat-string": "^1.5.2" - } - }, - "amdefine": { - "version": "1.0.1", - "bundled": true, - "dev": true - }, "ansi-regex": { - "version": "2.1.1", - "bundled": true, - "dev": true - }, - "ansi-styles": { - "version": "2.2.1", - "bundled": true, - "dev": true - }, - "append-transform": { - "version": "0.4.0", - "bundled": true, - "dev": true, - "requires": { - "default-require-extensions": "^1.0.0" - } - }, - "archy": { - "version": "1.0.0", - "bundled": true, - "dev": true - }, - "arr-diff": { - "version": "4.0.0", - "bundled": true, - "dev": true - }, - "arr-flatten": { - "version": "1.1.0", - "bundled": true, - "dev": true - }, - "arr-union": { - "version": "3.1.0", - "bundled": true, - "dev": true - }, - "array-unique": { - "version": "0.3.2", - "bundled": true, - "dev": true - }, - "arrify": { - "version": "1.0.1", - "bundled": true, - "dev": true - }, - "assign-symbols": { - "version": "1.0.0", - "bundled": true, + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", "dev": true }, - "async": { - "version": "1.5.2", - "bundled": true, - "dev": true - }, - "atob": { - "version": "2.1.1", - "bundled": true, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", "dev": true }, - "babel-code-frame": { - "version": "6.26.0", - "bundled": true, - "dev": true, - "requires": { - "chalk": "^1.1.3", - "esutils": "^2.0.2", - "js-tokens": "^3.0.2" - } - }, - "babel-generator": { - "version": "6.26.1", - "bundled": true, - "dev": true, - "requires": { - "babel-messages": "^6.23.0", - "babel-runtime": "^6.26.0", - "babel-types": "^6.26.0", - "detect-indent": "^4.0.0", - "jsesc": "^1.3.0", - "lodash": "^4.17.4", - "source-map": "^0.5.7", - "trim-right": "^1.0.1" - } - }, - "babel-messages": { - "version": "6.23.0", - "bundled": true, - "dev": true, - "requires": { - "babel-runtime": "^6.22.0" - } - }, - "babel-runtime": { - "version": "6.26.0", - "bundled": true, + "cliui": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", "dev": true, "requires": { - "core-js": "^2.4.0", - "regenerator-runtime": "^0.11.0" + "string-width": "^3.1.0", + "strip-ansi": "^5.2.0", + "wrap-ansi": "^5.1.0" } }, - "babel-template": { - "version": "6.26.0", - "bundled": true, + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", "dev": true, "requires": { - "babel-runtime": "^6.26.0", - "babel-traverse": "^6.26.0", - "babel-types": "^6.26.0", - "babylon": "^6.18.0", - "lodash": "^4.17.4" + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" } }, - "babel-traverse": { - "version": "6.26.0", - "bundled": true, + "execa": { + "version": "1.0.0", + "resolved": false, + "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", "dev": true, "requires": { - "babel-code-frame": "^6.26.0", - "babel-messages": "^6.23.0", - "babel-runtime": "^6.26.0", - "babel-types": "^6.26.0", - "babylon": "^6.18.0", - "debug": "^2.6.8", - "globals": "^9.18.0", - "invariant": "^2.2.2", - "lodash": "^4.17.4" + "cross-spawn": "^6.0.0", + "get-stream": "^4.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" } }, - "babel-types": { - "version": "6.26.0", - "bundled": true, + "find-up": { + "version": "3.0.0", + "resolved": false, + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", "dev": true, "requires": { - "babel-runtime": "^6.26.0", - "esutils": "^2.0.2", - "lodash": "^4.17.4", - "to-fast-properties": "^1.0.3" + "locate-path": "^3.0.0" } }, - "babylon": { - "version": "6.18.0", - "bundled": true, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", "dev": true }, - "balanced-match": { - "version": "1.0.0", - "bundled": true, + "invert-kv": { + "version": "2.0.0", + "resolved": false, + "integrity": "sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA==", "dev": true }, - "base": { - "version": "0.11.2", - "bundled": true, - "dev": true, - "requires": { - "cache-base": "^1.0.1", - "class-utils": "^0.3.5", - "component-emitter": "^1.2.1", - "define-property": "^1.0.0", - "isobject": "^3.0.1", - "mixin-deep": "^1.2.0", - "pascalcase": "^0.1.1" - }, - "dependencies": { - "define-property": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "is-descriptor": "^1.0.0" - } - }, - "is-accessor-descriptor": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-data-descriptor": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-descriptor": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - } - }, - "isobject": { - "version": "3.0.1", - "bundled": true, - "dev": true - }, - "kind-of": { - "version": "6.0.2", - "bundled": true, - "dev": true - } - } - }, - "brace-expansion": { - "version": "1.1.11", - "bundled": true, - "dev": true, - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "braces": { - "version": "2.3.2", - "bundled": true, - "dev": true, - "requires": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "builtin-modules": { - "version": "1.1.1", - "bundled": true, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": false, + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", "dev": true }, - "cache-base": { - "version": "1.0.1", - "bundled": true, - "dev": true, - "requires": { - "collection-visit": "^1.0.0", - "component-emitter": "^1.2.1", - "get-value": "^2.0.6", - "has-value": "^1.0.0", - "isobject": "^3.0.1", - "set-value": "^2.0.0", - "to-object-path": "^0.3.0", - "union-value": "^1.0.0", - "unset-value": "^1.0.0" - }, - "dependencies": { - "isobject": { - "version": "3.0.1", - "bundled": true, - "dev": true - } - } - }, - "caching-transform": { - "version": "1.0.1", - "bundled": true, - "dev": true, - "requires": { - "md5-hex": "^1.2.0", - "mkdirp": "^0.5.1", - "write-file-atomic": "^1.1.4" - } - }, - "camelcase": { - "version": "1.2.1", - "bundled": true, - "dev": true, - "optional": true - }, - "center-align": { - "version": "0.1.3", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "align-text": "^0.1.3", - "lazy-cache": "^1.0.3" - } - }, - "chalk": { - "version": "1.1.3", - "bundled": true, - "dev": true, - "requires": { - "ansi-styles": "^2.2.1", - "escape-string-regexp": "^1.0.2", - "has-ansi": "^2.0.0", - "strip-ansi": "^3.0.0", - "supports-color": "^2.0.0" - } - }, - "class-utils": { - "version": "0.3.6", - "bundled": true, + "js-yaml": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", "dev": true, "requires": { - "arr-union": "^3.1.0", - "define-property": "^0.2.5", - "isobject": "^3.0.0", - "static-extend": "^0.1.1" - }, - "dependencies": { - "define-property": { - "version": "0.2.5", - "bundled": true, - "dev": true, - "requires": { - "is-descriptor": "^0.1.0" - } - }, - "isobject": { - "version": "3.0.1", - "bundled": true, - "dev": true - } + "argparse": "^1.0.7", + "esprima": "^4.0.0" } }, - "cliui": { - "version": "2.1.0", - "bundled": true, + "lcid": { + "version": "2.0.0", + "resolved": false, + "integrity": "sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA==", "dev": true, - "optional": true, - "requires": { - "center-align": "^0.1.1", - "right-align": "^0.1.1", - "wordwrap": "0.0.2" - }, - "dependencies": { - "wordwrap": { - "version": "0.0.2", - "bundled": true, - "dev": true, - "optional": true - } - } - }, - "code-point-at": { - "version": "1.1.0", - "bundled": true, - "dev": true - }, - "collection-visit": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "map-visit": "^1.0.0", - "object-visit": "^1.0.0" - } - }, - "commondir": { - "version": "1.0.1", - "bundled": true, - "dev": true - }, - "component-emitter": { - "version": "1.2.1", - "bundled": true, - "dev": true - }, - "concat-map": { - "version": "0.0.1", - "bundled": true, - "dev": true - }, - "convert-source-map": { - "version": "1.5.1", - "bundled": true, - "dev": true - }, - "copy-descriptor": { - "version": "0.1.1", - "bundled": true, - "dev": true - }, - "core-js": { - "version": "2.5.6", - "bundled": true, - "dev": true - }, - "cross-spawn": { - "version": "4.0.2", - "bundled": true, - "dev": true, - "requires": { - "lru-cache": "^4.0.1", - "which": "^1.2.9" - } - }, - "debug": { - "version": "2.6.9", - "bundled": true, - "dev": true, - "requires": { - "ms": "2.0.0" - } - }, - "debug-log": { - "version": "1.0.1", - "bundled": true, - "dev": true - }, - "decamelize": { - "version": "1.2.0", - "bundled": true, - "dev": true - }, - "decode-uri-component": { - "version": "0.2.0", - "bundled": true, - "dev": true - }, - "default-require-extensions": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "strip-bom": "^2.0.0" - } - }, - "define-property": { - "version": "2.0.2", - "bundled": true, - "dev": true, - "requires": { - "is-descriptor": "^1.0.2", - "isobject": "^3.0.1" - }, - "dependencies": { - "is-accessor-descriptor": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-data-descriptor": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-descriptor": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - } - }, - "isobject": { - "version": "3.0.1", - "bundled": true, - "dev": true - }, - "kind-of": { - "version": "6.0.2", - "bundled": true, - "dev": true - } - } - }, - "detect-indent": { - "version": "4.0.0", - "bundled": true, - "dev": true, - "requires": { - "repeating": "^2.0.0" - } - }, - "error-ex": { - "version": "1.3.1", - "bundled": true, - "dev": true, - "requires": { - "is-arrayish": "^0.2.1" - } - }, - "escape-string-regexp": { - "version": "1.0.5", - "bundled": true, - "dev": true - }, - "esutils": { - "version": "2.0.2", - "bundled": true, - "dev": true - }, - "execa": { - "version": "0.7.0", - "bundled": true, - "dev": true, - "requires": { - "cross-spawn": "^5.0.1", - "get-stream": "^3.0.0", - "is-stream": "^1.1.0", - "npm-run-path": "^2.0.0", - "p-finally": "^1.0.0", - "signal-exit": "^3.0.0", - "strip-eof": "^1.0.0" - }, - "dependencies": { - "cross-spawn": { - "version": "5.1.0", - "bundled": true, - "dev": true, - "requires": { - "lru-cache": "^4.0.1", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - } - } - }, - "expand-brackets": { - "version": "2.1.4", - "bundled": true, - "dev": true, - "requires": { - "debug": "^2.3.3", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "posix-character-classes": "^0.1.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "dependencies": { - "define-property": { - "version": "0.2.5", - "bundled": true, - "dev": true, - "requires": { - "is-descriptor": "^0.1.0" - } - }, - "extend-shallow": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "extend-shallow": { - "version": "3.0.2", - "bundled": true, - "dev": true, - "requires": { - "assign-symbols": "^1.0.0", - "is-extendable": "^1.0.1" - }, - "dependencies": { - "is-extendable": { - "version": "1.0.1", - "bundled": true, - "dev": true, - "requires": { - "is-plain-object": "^2.0.4" - } - } - } - }, - "extglob": { - "version": "2.0.4", - "bundled": true, - "dev": true, - "requires": { - "array-unique": "^0.3.2", - "define-property": "^1.0.0", - "expand-brackets": "^2.1.4", - "extend-shallow": "^2.0.1", - "fragment-cache": "^0.2.1", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "dependencies": { - "define-property": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "is-descriptor": "^1.0.0" - } - }, - "extend-shallow": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - }, - "is-accessor-descriptor": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-data-descriptor": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-descriptor": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - } - }, - "kind-of": { - "version": "6.0.2", - "bundled": true, - "dev": true - } - } - }, - "fill-range": { - "version": "4.0.0", - "bundled": true, - "dev": true, - "requires": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "find-cache-dir": { - "version": "0.1.1", - "bundled": true, - "dev": true, - "requires": { - "commondir": "^1.0.1", - "mkdirp": "^0.5.1", - "pkg-dir": "^1.0.0" - } - }, - "find-up": { - "version": "2.1.0", - "bundled": true, - "dev": true, - "requires": { - "locate-path": "^2.0.0" - } - }, - "for-in": { - "version": "1.0.2", - "bundled": true, - "dev": true - }, - "foreground-child": { - "version": "1.5.6", - "bundled": true, - "dev": true, - "requires": { - "cross-spawn": "^4", - "signal-exit": "^3.0.0" - } - }, - "fragment-cache": { - "version": "0.2.1", - "bundled": true, - "dev": true, - "requires": { - "map-cache": "^0.2.2" - } - }, - "fs.realpath": { - "version": "1.0.0", - "bundled": true, - "dev": true - }, - "get-caller-file": { - "version": "1.0.2", - "bundled": true, - "dev": true - }, - "get-stream": { - "version": "3.0.0", - "bundled": true, - "dev": true - }, - "get-value": { - "version": "2.0.6", - "bundled": true, - "dev": true - }, - "glob": { - "version": "7.1.2", - "bundled": true, - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "globals": { - "version": "9.18.0", - "bundled": true, - "dev": true - }, - "graceful-fs": { - "version": "4.1.11", - "bundled": true, - "dev": true - }, - "handlebars": { - "version": "4.0.11", - "bundled": true, - "dev": true, - "requires": { - "async": "^1.4.0", - "optimist": "^0.6.1", - "source-map": "^0.4.4", - "uglify-js": "^2.6" - }, - "dependencies": { - "source-map": { - "version": "0.4.4", - "bundled": true, - "dev": true, - "requires": { - "amdefine": ">=0.0.4" - } - } - } - }, - "has-ansi": { - "version": "2.0.0", - "bundled": true, - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "has-flag": { - "version": "1.0.0", - "bundled": true, - "dev": true - }, - "has-value": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "get-value": "^2.0.6", - "has-values": "^1.0.0", - "isobject": "^3.0.0" - }, - "dependencies": { - "isobject": { - "version": "3.0.1", - "bundled": true, - "dev": true - } - } - }, - "has-values": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "is-number": "^3.0.0", - "kind-of": "^4.0.0" - }, - "dependencies": { - "is-number": { - "version": "3.0.0", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "bundled": true, - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "kind-of": { - "version": "4.0.0", - "bundled": true, - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "hosted-git-info": { - "version": "2.6.0", - "bundled": true, - "dev": true - }, - "imurmurhash": { - "version": "0.1.4", - "bundled": true, - "dev": true - }, - "inflight": { - "version": "1.0.6", - "bundled": true, - "dev": true, - "requires": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "inherits": { - "version": "2.0.3", - "bundled": true, - "dev": true - }, - "invariant": { - "version": "2.2.4", - "bundled": true, - "dev": true, - "requires": { - "loose-envify": "^1.0.0" - } - }, - "invert-kv": { - "version": "1.0.0", - "bundled": true, - "dev": true - }, - "is-accessor-descriptor": { - "version": "0.1.6", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^3.0.2" - } - }, - "is-arrayish": { - "version": "0.2.1", - "bundled": true, - "dev": true - }, - "is-buffer": { - "version": "1.1.6", - "bundled": true, - "dev": true - }, - "is-builtin-module": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "builtin-modules": "^1.0.0" - } - }, - "is-data-descriptor": { - "version": "0.1.4", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^3.0.2" - } - }, - "is-descriptor": { - "version": "0.1.6", - "bundled": true, - "dev": true, - "requires": { - "is-accessor-descriptor": "^0.1.6", - "is-data-descriptor": "^0.1.4", - "kind-of": "^5.0.0" - }, - "dependencies": { - "kind-of": { - "version": "5.1.0", - "bundled": true, - "dev": true - } - } - }, - "is-extendable": { - "version": "0.1.1", - "bundled": true, - "dev": true - }, - "is-finite": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "requires": { - "number-is-nan": "^1.0.0" - } - }, - "is-fullwidth-code-point": { - "version": "2.0.0", - "bundled": true, - "dev": true - }, - "is-number": { - "version": "3.0.0", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^3.0.2" - } - }, - "is-odd": { - "version": "2.0.0", - "bundled": true, - "dev": true, - "requires": { - "is-number": "^4.0.0" - }, - "dependencies": { - "is-number": { - "version": "4.0.0", - "bundled": true, - "dev": true - } - } - }, - "is-plain-object": { - "version": "2.0.4", - "bundled": true, - "dev": true, - "requires": { - "isobject": "^3.0.1" - }, - "dependencies": { - "isobject": { - "version": "3.0.1", - "bundled": true, - "dev": true - } - } - }, - "is-stream": { - "version": "1.1.0", - "bundled": true, - "dev": true - }, - "is-utf8": { - "version": "0.2.1", - "bundled": true, - "dev": true - }, - "is-windows": { - "version": "1.0.2", - "bundled": true, - "dev": true - }, - "isarray": { - "version": "1.0.0", - "bundled": true, - "dev": true - }, - "isexe": { - "version": "2.0.0", - "bundled": true, - "dev": true - }, - "isobject": { - "version": "3.0.1", - "bundled": true, - "dev": true - }, - "istanbul-lib-coverage": { - "version": "1.2.0", - "bundled": true, - "dev": true - }, - "istanbul-lib-hook": { - "version": "1.1.0", - "bundled": true, - "dev": true, - "requires": { - "append-transform": "^0.4.0" - } - }, - "istanbul-lib-instrument": { - "version": "1.10.1", - "bundled": true, - "dev": true, - "requires": { - "babel-generator": "^6.18.0", - "babel-template": "^6.16.0", - "babel-traverse": "^6.18.0", - "babel-types": "^6.18.0", - "babylon": "^6.18.0", - "istanbul-lib-coverage": "^1.2.0", - "semver": "^5.3.0" - } - }, - "istanbul-lib-report": { - "version": "1.1.3", - "bundled": true, - "dev": true, - "requires": { - "istanbul-lib-coverage": "^1.1.2", - "mkdirp": "^0.5.1", - "path-parse": "^1.0.5", - "supports-color": "^3.1.2" - }, - "dependencies": { - "supports-color": { - "version": "3.2.3", - "bundled": true, - "dev": true, - "requires": { - "has-flag": "^1.0.0" - } - } - } - }, - "istanbul-lib-source-maps": { - "version": "1.2.3", - "bundled": true, - "dev": true, - "requires": { - "debug": "^3.1.0", - "istanbul-lib-coverage": "^1.1.2", - "mkdirp": "^0.5.1", - "rimraf": "^2.6.1", - "source-map": "^0.5.3" - }, - "dependencies": { - "debug": { - "version": "3.1.0", - "bundled": true, - "dev": true, - "requires": { - "ms": "2.0.0" - } - } - } - }, - "istanbul-reports": { - "version": "1.4.0", - "bundled": true, - "dev": true, - "requires": { - "handlebars": "^4.0.3" - } - }, - "js-tokens": { - "version": "3.0.2", - "bundled": true, - "dev": true - }, - "jsesc": { - "version": "1.3.0", - "bundled": true, - "dev": true - }, - "kind-of": { - "version": "3.2.2", - "bundled": true, - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } - }, - "lazy-cache": { - "version": "1.0.4", - "bundled": true, - "dev": true, - "optional": true - }, - "lcid": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "invert-kv": "^1.0.0" - } - }, - "load-json-file": { - "version": "1.1.0", - "bundled": true, - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "parse-json": "^2.2.0", - "pify": "^2.0.0", - "pinkie-promise": "^2.0.0", - "strip-bom": "^2.0.0" - } - }, - "locate-path": { - "version": "2.0.0", - "bundled": true, - "dev": true, - "requires": { - "p-locate": "^2.0.0", - "path-exists": "^3.0.0" - }, - "dependencies": { - "path-exists": { - "version": "3.0.0", - "bundled": true, - "dev": true - } - } - }, - "lodash": { - "version": "4.17.10", - "bundled": true, - "dev": true - }, - "longest": { - "version": "1.0.1", - "bundled": true, - "dev": true - }, - "loose-envify": { - "version": "1.3.1", - "bundled": true, - "dev": true, - "requires": { - "js-tokens": "^3.0.0" - } - }, - "lru-cache": { - "version": "4.1.3", - "bundled": true, - "dev": true, - "requires": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" - } - }, - "map-cache": { - "version": "0.2.2", - "bundled": true, - "dev": true - }, - "map-visit": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "object-visit": "^1.0.0" - } - }, - "md5-hex": { - "version": "1.3.0", - "bundled": true, - "dev": true, - "requires": { - "md5-o-matic": "^0.1.1" - } - }, - "md5-o-matic": { - "version": "0.1.1", - "bundled": true, - "dev": true - }, - "mem": { - "version": "1.1.0", - "bundled": true, - "dev": true, - "requires": { - "mimic-fn": "^1.0.0" - } - }, - "merge-source-map": { - "version": "1.1.0", - "bundled": true, - "dev": true, - "requires": { - "source-map": "^0.6.1" - }, - "dependencies": { - "source-map": { - "version": "0.6.1", - "bundled": true, - "dev": true - } - } - }, - "micromatch": { - "version": "3.1.10", - "bundled": true, - "dev": true, - "requires": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "6.0.2", - "bundled": true, - "dev": true - } - } - }, - "mimic-fn": { - "version": "1.2.0", - "bundled": true, - "dev": true - }, - "minimatch": { - "version": "3.0.4", - "bundled": true, - "dev": true, - "requires": { - "brace-expansion": "^1.1.7" - } - }, - "minimist": { - "version": "0.0.8", - "bundled": true, - "dev": true - }, - "mixin-deep": { - "version": "1.3.1", - "bundled": true, - "dev": true, - "requires": { - "for-in": "^1.0.2", - "is-extendable": "^1.0.1" - }, - "dependencies": { - "is-extendable": { - "version": "1.0.1", - "bundled": true, - "dev": true, - "requires": { - "is-plain-object": "^2.0.4" - } - } - } - }, - "mkdirp": { - "version": "0.5.1", - "bundled": true, - "dev": true, - "requires": { - "minimist": "0.0.8" - } - }, - "ms": { - "version": "2.0.0", - "bundled": true, - "dev": true - }, - "nanomatch": { - "version": "1.2.9", - "bundled": true, - "dev": true, - "requires": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "fragment-cache": "^0.2.1", - "is-odd": "^2.0.0", - "is-windows": "^1.0.2", - "kind-of": "^6.0.2", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "dependencies": { - "arr-diff": { - "version": "4.0.0", - "bundled": true, - "dev": true - }, - "array-unique": { - "version": "0.3.2", - "bundled": true, - "dev": true - }, - "kind-of": { - "version": "6.0.2", - "bundled": true, - "dev": true - } - } - }, - "normalize-package-data": { - "version": "2.4.0", - "bundled": true, - "dev": true, - "requires": { - "hosted-git-info": "^2.1.4", - "is-builtin-module": "^1.0.0", - "semver": "2 || 3 || 4 || 5", - "validate-npm-package-license": "^3.0.1" - } - }, - "npm-run-path": { - "version": "2.0.2", - "bundled": true, - "dev": true, - "requires": { - "path-key": "^2.0.0" - } - }, - "number-is-nan": { - "version": "1.0.1", - "bundled": true, - "dev": true - }, - "object-assign": { - "version": "4.1.1", - "bundled": true, - "dev": true - }, - "object-copy": { - "version": "0.1.0", - "bundled": true, - "dev": true, - "requires": { - "copy-descriptor": "^0.1.0", - "define-property": "^0.2.5", - "kind-of": "^3.0.3" - }, - "dependencies": { - "define-property": { - "version": "0.2.5", - "bundled": true, - "dev": true, - "requires": { - "is-descriptor": "^0.1.0" - } - } - } - }, - "object-visit": { - "version": "1.0.1", - "bundled": true, - "dev": true, - "requires": { - "isobject": "^3.0.0" - }, - "dependencies": { - "isobject": { - "version": "3.0.1", - "bundled": true, - "dev": true - } - } - }, - "object.pick": { - "version": "1.3.0", - "bundled": true, - "dev": true, - "requires": { - "isobject": "^3.0.1" - }, - "dependencies": { - "isobject": { - "version": "3.0.1", - "bundled": true, - "dev": true - } - } - }, - "once": { - "version": "1.4.0", - "bundled": true, - "dev": true, - "requires": { - "wrappy": "1" - } - }, - "optimist": { - "version": "0.6.1", - "bundled": true, - "dev": true, - "requires": { - "minimist": "~0.0.1", - "wordwrap": "~0.0.2" - } - }, - "os-homedir": { - "version": "1.0.2", - "bundled": true, - "dev": true - }, - "os-locale": { - "version": "2.1.0", - "bundled": true, - "dev": true, - "requires": { - "execa": "^0.7.0", - "lcid": "^1.0.0", - "mem": "^1.1.0" - } - }, - "p-finally": { - "version": "1.0.0", - "bundled": true, - "dev": true - }, - "p-limit": { - "version": "1.2.0", - "bundled": true, - "dev": true, - "requires": { - "p-try": "^1.0.0" - } - }, - "p-locate": { - "version": "2.0.0", - "bundled": true, - "dev": true, - "requires": { - "p-limit": "^1.1.0" - } - }, - "p-try": { - "version": "1.0.0", - "bundled": true, - "dev": true - }, - "parse-json": { - "version": "2.2.0", - "bundled": true, - "dev": true, - "requires": { - "error-ex": "^1.2.0" - } - }, - "pascalcase": { - "version": "0.1.1", - "bundled": true, - "dev": true - }, - "path-exists": { - "version": "2.1.0", - "bundled": true, - "dev": true, - "requires": { - "pinkie-promise": "^2.0.0" - } - }, - "path-is-absolute": { - "version": "1.0.1", - "bundled": true, - "dev": true - }, - "path-key": { - "version": "2.0.1", - "bundled": true, - "dev": true - }, - "path-parse": { - "version": "1.0.5", - "bundled": true, - "dev": true - }, - "path-type": { - "version": "1.1.0", - "bundled": true, - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "pify": "^2.0.0", - "pinkie-promise": "^2.0.0" - } - }, - "pify": { - "version": "2.3.0", - "bundled": true, - "dev": true - }, - "pinkie": { - "version": "2.0.4", - "bundled": true, - "dev": true - }, - "pinkie-promise": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "requires": { - "pinkie": "^2.0.0" - } - }, - "pkg-dir": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "find-up": "^1.0.0" - }, - "dependencies": { - "find-up": { - "version": "1.1.2", - "bundled": true, - "dev": true, - "requires": { - "path-exists": "^2.0.0", - "pinkie-promise": "^2.0.0" - } - } - } - }, - "posix-character-classes": { - "version": "0.1.1", - "bundled": true, - "dev": true - }, - "pseudomap": { - "version": "1.0.2", - "bundled": true, - "dev": true - }, - "read-pkg": { - "version": "1.1.0", - "bundled": true, - "dev": true, - "requires": { - "load-json-file": "^1.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^1.0.0" - } - }, - "read-pkg-up": { - "version": "1.0.1", - "bundled": true, - "dev": true, - "requires": { - "find-up": "^1.0.0", - "read-pkg": "^1.0.0" - }, - "dependencies": { - "find-up": { - "version": "1.1.2", - "bundled": true, - "dev": true, - "requires": { - "path-exists": "^2.0.0", - "pinkie-promise": "^2.0.0" - } - } - } - }, - "regenerator-runtime": { - "version": "0.11.1", - "bundled": true, - "dev": true - }, - "regex-not": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "requires": { - "extend-shallow": "^3.0.2", - "safe-regex": "^1.1.0" - } - }, - "repeat-element": { - "version": "1.1.2", - "bundled": true, - "dev": true - }, - "repeat-string": { - "version": "1.6.1", - "bundled": true, - "dev": true - }, - "repeating": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "requires": { - "is-finite": "^1.0.0" - } - }, - "require-directory": { - "version": "2.1.1", - "bundled": true, - "dev": true - }, - "require-main-filename": { - "version": "1.0.1", - "bundled": true, - "dev": true - }, - "resolve-from": { - "version": "2.0.0", - "bundled": true, - "dev": true - }, - "resolve-url": { - "version": "0.2.1", - "bundled": true, - "dev": true - }, - "ret": { - "version": "0.1.15", - "bundled": true, - "dev": true - }, - "right-align": { - "version": "0.1.3", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "align-text": "^0.1.1" - } - }, - "rimraf": { - "version": "2.6.2", - "bundled": true, - "dev": true, - "requires": { - "glob": "^7.0.5" - } - }, - "safe-regex": { - "version": "1.1.0", - "bundled": true, - "dev": true, - "requires": { - "ret": "~0.1.10" - } - }, - "semver": { - "version": "5.5.0", - "bundled": true, - "dev": true - }, - "set-blocking": { - "version": "2.0.0", - "bundled": true, - "dev": true - }, - "set-value": { - "version": "2.0.0", - "bundled": true, - "dev": true, - "requires": { - "extend-shallow": "^2.0.1", - "is-extendable": "^0.1.1", - "is-plain-object": "^2.0.3", - "split-string": "^3.0.1" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "shebang-command": { - "version": "1.2.0", - "bundled": true, - "dev": true, - "requires": { - "shebang-regex": "^1.0.0" - } - }, - "shebang-regex": { - "version": "1.0.0", - "bundled": true, - "dev": true - }, - "signal-exit": { - "version": "3.0.2", - "bundled": true, - "dev": true - }, - "slide": { - "version": "1.1.6", - "bundled": true, - "dev": true - }, - "snapdragon": { - "version": "0.8.2", - "bundled": true, - "dev": true, - "requires": { - "base": "^0.11.1", - "debug": "^2.2.0", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "map-cache": "^0.2.2", - "source-map": "^0.5.6", - "source-map-resolve": "^0.5.0", - "use": "^3.1.0" - }, - "dependencies": { - "define-property": { - "version": "0.2.5", - "bundled": true, - "dev": true, - "requires": { - "is-descriptor": "^0.1.0" - } - }, - "extend-shallow": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "snapdragon-node": { - "version": "2.1.1", - "bundled": true, - "dev": true, - "requires": { - "define-property": "^1.0.0", - "isobject": "^3.0.0", - "snapdragon-util": "^3.0.1" - }, - "dependencies": { - "define-property": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "is-descriptor": "^1.0.0" - } - }, - "is-accessor-descriptor": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-data-descriptor": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-descriptor": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - } - }, - "isobject": { - "version": "3.0.1", - "bundled": true, - "dev": true - }, - "kind-of": { - "version": "6.0.2", - "bundled": true, - "dev": true - } - } - }, - "snapdragon-util": { - "version": "3.0.1", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^3.2.0" - } - }, - "source-map": { - "version": "0.5.7", - "bundled": true, - "dev": true - }, - "source-map-resolve": { - "version": "0.5.1", - "bundled": true, - "dev": true, - "requires": { - "atob": "^2.0.0", - "decode-uri-component": "^0.2.0", - "resolve-url": "^0.2.1", - "source-map-url": "^0.4.0", - "urix": "^0.1.0" - } - }, - "source-map-url": { - "version": "0.4.0", - "bundled": true, - "dev": true - }, - "spawn-wrap": { - "version": "1.4.2", - "bundled": true, - "dev": true, - "requires": { - "foreground-child": "^1.5.6", - "mkdirp": "^0.5.0", - "os-homedir": "^1.0.1", - "rimraf": "^2.6.2", - "signal-exit": "^3.0.2", - "which": "^1.3.0" - } - }, - "spdx-correct": { - "version": "3.0.0", - "bundled": true, - "dev": true, - "requires": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "spdx-exceptions": { - "version": "2.1.0", - "bundled": true, - "dev": true - }, - "spdx-expression-parse": { - "version": "3.0.0", - "bundled": true, - "dev": true, - "requires": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "spdx-license-ids": { - "version": "3.0.0", - "bundled": true, - "dev": true - }, - "split-string": { - "version": "3.1.0", - "bundled": true, - "dev": true, - "requires": { - "extend-shallow": "^3.0.0" - } - }, - "static-extend": { - "version": "0.1.2", - "bundled": true, - "dev": true, - "requires": { - "define-property": "^0.2.5", - "object-copy": "^0.1.0" - }, - "dependencies": { - "define-property": { - "version": "0.2.5", - "bundled": true, - "dev": true, - "requires": { - "is-descriptor": "^0.1.0" - } - } - } - }, - "string-width": { - "version": "2.1.1", - "bundled": true, - "dev": true, - "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "3.0.0", - "bundled": true, - "dev": true - }, - "strip-ansi": { - "version": "4.0.0", - "bundled": true, - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } - } - } - }, - "strip-ansi": { - "version": "3.0.1", - "bundled": true, - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "strip-bom": { - "version": "2.0.0", - "bundled": true, - "dev": true, - "requires": { - "is-utf8": "^0.2.0" - } - }, - "strip-eof": { - "version": "1.0.0", - "bundled": true, - "dev": true - }, - "supports-color": { - "version": "2.0.0", - "bundled": true, - "dev": true - }, - "test-exclude": { - "version": "4.2.1", - "bundled": true, - "dev": true, - "requires": { - "arrify": "^1.0.1", - "micromatch": "^3.1.8", - "object-assign": "^4.1.0", - "read-pkg-up": "^1.0.1", - "require-main-filename": "^1.0.1" - }, - "dependencies": { - "arr-diff": { - "version": "4.0.0", - "bundled": true, - "dev": true - }, - "array-unique": { - "version": "0.3.2", - "bundled": true, - "dev": true - }, - "braces": { - "version": "2.3.2", - "bundled": true, - "dev": true, - "requires": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "expand-brackets": { - "version": "2.1.4", - "bundled": true, - "dev": true, - "requires": { - "debug": "^2.3.3", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "posix-character-classes": "^0.1.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "dependencies": { - "define-property": { - "version": "0.2.5", - "bundled": true, - "dev": true, - "requires": { - "is-descriptor": "^0.1.0" - } - }, - "extend-shallow": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - }, - "is-accessor-descriptor": { - "version": "0.1.6", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "bundled": true, - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "is-data-descriptor": { - "version": "0.1.4", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "bundled": true, - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "is-descriptor": { - "version": "0.1.6", - "bundled": true, - "dev": true, - "requires": { - "is-accessor-descriptor": "^0.1.6", - "is-data-descriptor": "^0.1.4", - "kind-of": "^5.0.0" - } - }, - "kind-of": { - "version": "5.1.0", - "bundled": true, - "dev": true - } - } - }, - "extglob": { - "version": "2.0.4", - "bundled": true, - "dev": true, - "requires": { - "array-unique": "^0.3.2", - "define-property": "^1.0.0", - "expand-brackets": "^2.1.4", - "extend-shallow": "^2.0.1", - "fragment-cache": "^0.2.1", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "dependencies": { - "define-property": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "is-descriptor": "^1.0.0" - } - }, - "extend-shallow": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "fill-range": { - "version": "4.0.0", - "bundled": true, - "dev": true, - "requires": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "is-accessor-descriptor": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-data-descriptor": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-descriptor": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - } - }, - "is-number": { - "version": "3.0.0", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "bundled": true, - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "isobject": { - "version": "3.0.1", - "bundled": true, - "dev": true - }, - "kind-of": { - "version": "6.0.2", - "bundled": true, - "dev": true - }, - "micromatch": { - "version": "3.1.10", - "bundled": true, - "dev": true, - "requires": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" - } - } - } - }, - "to-fast-properties": { - "version": "1.0.3", - "bundled": true, - "dev": true - }, - "to-object-path": { - "version": "0.3.0", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^3.0.2" - } - }, - "to-regex": { - "version": "3.0.2", - "bundled": true, - "dev": true, - "requires": { - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "regex-not": "^1.0.2", - "safe-regex": "^1.1.0" - } - }, - "to-regex-range": { - "version": "2.1.1", - "bundled": true, - "dev": true, - "requires": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" - }, - "dependencies": { - "is-number": { - "version": "3.0.0", - "bundled": true, - "dev": true, - "requires": { - "kind-of": "^3.0.2" - } - } - } - }, - "trim-right": { - "version": "1.0.1", - "bundled": true, - "dev": true - }, - "uglify-js": { - "version": "2.8.29", - "bundled": true, - "dev": true, - "optional": true, "requires": { - "source-map": "~0.5.1", - "uglify-to-browserify": "~1.0.0", - "yargs": "~3.10.0" - }, - "dependencies": { - "yargs": { - "version": "3.10.0", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "camelcase": "^1.0.2", - "cliui": "^2.1.0", - "decamelize": "^1.0.0", - "window-size": "0.1.0" - } - } + "invert-kv": "^2.0.0" } }, - "uglify-to-browserify": { - "version": "1.0.2", - "bundled": true, + "locate-path": { + "version": "3.0.0", + "resolved": false, + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", "dev": true, - "optional": true + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } }, - "union-value": { - "version": "1.0.0", - "bundled": true, + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", "dev": true, "requires": { - "arr-union": "^3.1.0", - "get-value": "^2.0.6", - "is-extendable": "^0.1.1", - "set-value": "^0.4.3" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - }, - "set-value": { - "version": "0.4.3", - "bundled": true, - "dev": true, - "requires": { - "extend-shallow": "^2.0.1", - "is-extendable": "^0.1.1", - "is-plain-object": "^2.0.1", - "to-object-path": "^0.3.0" - } - } + "pify": "^4.0.1", + "semver": "^5.6.0" } }, - "unset-value": { - "version": "1.0.0", - "bundled": true, + "mem": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/mem/-/mem-4.3.0.tgz", + "integrity": "sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w==", "dev": true, "requires": { - "has-value": "^0.3.1", - "isobject": "^3.0.0" - }, - "dependencies": { - "has-value": { - "version": "0.3.1", - "bundled": true, - "dev": true, - "requires": { - "get-value": "^2.0.3", - "has-values": "^0.1.4", - "isobject": "^2.0.0" - }, - "dependencies": { - "isobject": { - "version": "2.1.0", - "bundled": true, - "dev": true, - "requires": { - "isarray": "1.0.0" - } - } - } - }, - "has-values": { - "version": "0.1.4", - "bundled": true, - "dev": true - }, - "isobject": { - "version": "3.0.1", - "bundled": true, - "dev": true - } + "map-age-cleaner": "^0.1.1", + "mimic-fn": "^2.0.0", + "p-is-promise": "^2.0.0" } }, - "urix": { - "version": "0.1.0", - "bundled": true, + "mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", "dev": true }, - "use": { + "os-locale": { "version": "3.1.0", - "bundled": true, + "resolved": false, + "integrity": "sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q==", "dev": true, "requires": { - "kind-of": "^6.0.2" - }, - "dependencies": { - "kind-of": { - "version": "6.0.2", - "bundled": true, - "dev": true - } + "execa": "^1.0.0", + "lcid": "^2.0.0", + "mem": "^4.0.0" } }, - "validate-npm-package-license": { - "version": "3.0.3", - "bundled": true, + "p-limit": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz", + "integrity": "sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ==", "dev": true, "requires": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" + "p-try": "^2.0.0" } }, - "which": { - "version": "1.3.0", - "bundled": true, + "p-locate": { + "version": "3.0.0", + "resolved": false, + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", "dev": true, "requires": { - "isexe": "^2.0.0" + "p-limit": "^2.0.0" } }, - "which-module": { - "version": "2.0.0", - "bundled": true, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", "dev": true }, - "window-size": { - "version": "0.1.0", - "bundled": true, - "dev": true, - "optional": true + "pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true }, - "wordwrap": { - "version": "0.0.3", - "bundled": true, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", "dev": true }, - "wrap-ansi": { - "version": "2.1.0", - "bundled": true, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", "dev": true, "requires": { - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1" - }, - "dependencies": { - "is-fullwidth-code-point": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "requires": { - "number-is-nan": "^1.0.0" - } - }, - "string-width": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - } - } + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" } }, - "wrappy": { - "version": "1.0.2", - "bundled": true, - "dev": true - }, - "write-file-atomic": { - "version": "1.3.4", - "bundled": true, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", "dev": true, "requires": { - "graceful-fs": "^4.1.11", - "imurmurhash": "^0.1.4", - "slide": "^1.1.5" + "ansi-regex": "^4.1.0" } }, - "y18n": { - "version": "3.2.1", - "bundled": true, - "dev": true - }, - "yallist": { - "version": "2.1.2", - "bundled": true, - "dev": true + "wrap-ansi": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" + } }, "yargs": { - "version": "11.1.0", - "bundled": true, + "version": "13.2.4", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.2.4.tgz", + "integrity": "sha512-HG/DWAJa1PAnHT9JAhNa8AbAv3FPaiLzioSjCcmuXXhP8MlpHO5vwls4g4j6n30Z74GVQj8Xa62dWVx1QCGklg==", "dev": true, "requires": { - "cliui": "^4.0.0", - "decamelize": "^1.1.1", - "find-up": "^2.1.0", - "get-caller-file": "^1.0.1", - "os-locale": "^2.0.0", + "cliui": "^5.0.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "os-locale": "^3.1.0", "require-directory": "^2.1.1", - "require-main-filename": "^1.0.1", + "require-main-filename": "^2.0.0", "set-blocking": "^2.0.0", - "string-width": "^2.0.0", + "string-width": "^3.0.0", "which-module": "^2.0.0", - "y18n": "^3.2.1", - "yargs-parser": "^9.0.2" - }, - "dependencies": { - "ansi-regex": { - "version": "3.0.0", - "bundled": true, - "dev": true - }, - "camelcase": { - "version": "4.1.0", - "bundled": true, - "dev": true - }, - "cliui": { - "version": "4.1.0", - "bundled": true, - "dev": true, - "requires": { - "string-width": "^2.1.1", - "strip-ansi": "^4.0.0", - "wrap-ansi": "^2.0.0" - } - }, - "strip-ansi": { - "version": "4.0.0", - "bundled": true, - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } - }, - "yargs-parser": { - "version": "9.0.2", - "bundled": true, - "dev": true, - "requires": { - "camelcase": "^4.1.0" - } - } + "y18n": "^4.0.0", + "yargs-parser": "^13.1.0" } }, "yargs-parser": { - "version": "8.1.0", - "bundled": true, + "version": "13.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.1.tgz", + "integrity": "sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ==", "dev": true, "requires": { - "camelcase": "^4.1.0" - }, - "dependencies": { - "camelcase": { - "version": "4.1.0", - "bundled": true, - "dev": true - } + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" } } } @@ -5870,10 +4237,18 @@ "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" }, "object-keys": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.0.11.tgz", - "integrity": "sha1-xUYBd4rVYPEULODgG8yotW0TQm0=", - "dev": true + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.0.12.tgz", + "integrity": "sha512-FTMyFUm2wBcGHnH2eXmz7tC6IwlqQZ6mVZ+6dm6vZ4IQIHjs6FdNsQBuKGPuUUUY6NfJw2PshC08Tn6LzLDOag==" + }, + "object.getownpropertydescriptors": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz", + "integrity": "sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY=", + "requires": { + "define-properties": "^1.1.2", + "es-abstract": "^1.5.1" + } }, "on-finished": { "version": "2.3.0", @@ -5913,6 +4288,14 @@ "dev": true, "requires": { "wordwrap": "~0.0.2" + }, + "dependencies": { + "wordwrap": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", + "dev": true + } } }, "optionator": { @@ -5927,14 +4310,6 @@ "prelude-ls": "~1.1.2", "type-check": "~0.3.2", "wordwrap": "~1.0.0" - }, - "dependencies": { - "wordwrap": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", - "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=", - "dev": true - } } }, "os-homedir": { @@ -5943,13 +4318,41 @@ "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=" }, "os-locale": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-2.1.0.tgz", - "integrity": "sha512-3sslG3zJbEYcaC4YVAvDorjGxc7tv6KVATnLPZONiljsUncvihe9BQoVCEs0RZ1kmf4Hk9OBqlZfJZWI4GanKA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-3.1.0.tgz", + "integrity": "sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q==", "requires": { - "execa": "^0.7.0", - "lcid": "^1.0.0", - "mem": "^1.1.0" + "execa": "^1.0.0", + "lcid": "^2.0.0", + "mem": "^4.0.0" + }, + "dependencies": { + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "execa": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", + "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", + "requires": { + "cross-spawn": "^6.0.0", + "get-stream": "^4.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + } + } } }, "os-tmpdir": { @@ -5981,11 +4384,21 @@ "own-or": "^1.0.0" } }, + "p-defer": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-defer/-/p-defer-1.0.0.tgz", + "integrity": "sha1-n26xgvbJqozXQwBKfU+WsZaw+ww=" + }, "p-finally": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=" }, + "p-is-promise": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/p-is-promise/-/p-is-promise-2.1.0.tgz", + "integrity": "sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg==" + }, "p-limit": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.2.0.tgz", @@ -6007,6 +4420,18 @@ "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=" }, + "package-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-3.0.0.tgz", + "integrity": "sha512-lOtmukMDVvtkL84rJHI7dpTYq+0rli8N2wlnqUcBuDWCfVhRUfOmnR9SsoHFMLpACvEV60dX7rd0rFaYDZI+FA==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.15", + "hasha": "^3.0.0", + "lodash.flattendeep": "^4.4.0", + "release-zalgo": "^1.0.0" + } + }, "package-json": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/package-json/-/package-json-4.0.1.tgz", @@ -6019,35 +4444,51 @@ } }, "pacote": { - "version": "8.1.6", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-8.1.6.tgz", - "integrity": "sha512-wTOOfpaAQNEQNtPEx92x9Y9kRWVu45v583XT8x2oEV2xRB74+xdqMZIeGW4uFvAyZdmSBtye+wKdyyLaT8pcmw==", - "requires": { - "bluebird": "^3.5.1", - "cacache": "^11.0.2", - "get-stream": "^3.0.0", - "glob": "^7.1.2", - "lru-cache": "^4.1.3", - "make-fetch-happen": "^4.0.1", + "version": "9.5.12", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-9.5.12.tgz", + "integrity": "sha512-BUIj/4kKbwWg4RtnBncXPJd15piFSVNpTzY0rysSr3VnMowTYgkGKcaHrbReepAkjTr8lH2CVWRi58Spg2CicQ==", + "requires": { + "bluebird": "^3.5.3", + "cacache": "^12.0.2", + "chownr": "^1.1.2", + "figgy-pudding": "^3.5.1", + "get-stream": "^4.1.0", + "glob": "^7.1.3", + "infer-owner": "^1.0.4", + "lru-cache": "^5.1.1", + "make-fetch-happen": "^5.0.0", "minimatch": "^3.0.4", - "minipass": "^2.3.3", + "minipass": "^2.3.5", "mississippi": "^3.0.0", "mkdirp": "^0.5.1", "normalize-package-data": "^2.4.0", + "npm-normalize-package-bin": "^1.0.0", "npm-package-arg": "^6.1.0", - "npm-packlist": "^1.1.10", - "npm-pick-manifest": "^2.1.0", + "npm-packlist": "^1.1.12", + "npm-pick-manifest": "^3.0.0", + "npm-registry-fetch": "^4.0.0", "osenv": "^0.1.5", "promise-inflight": "^1.0.1", "promise-retry": "^1.1.1", - "protoduck": "^5.0.0", + "protoduck": "^5.0.1", "rimraf": "^2.6.2", "safe-buffer": "^5.1.2", - "semver": "^5.5.0", - "ssri": "^6.0.0", - "tar": "^4.4.3", - "unique-filename": "^1.1.0", - "which": "^1.3.0" + "semver": "^5.6.0", + "ssri": "^6.0.1", + "tar": "^4.4.10", + "unique-filename": "^1.1.1", + "which": "^1.3.1" + }, + "dependencies": { + "minipass": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz", + "integrity": "sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==", + "requires": { + "safe-buffer": "^5.1.2", + "yallist": "^3.0.0" + } + } } }, "parallel-transform": { @@ -6058,6 +4499,30 @@ "cyclist": "~0.2.2", "inherits": "^2.0.3", "readable-stream": "^2.1.5" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + } } }, "parse-json": { @@ -6070,9 +4535,9 @@ } }, "parseurl": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz", - "integrity": "sha1-/CidTtiZMRlGDBViUyYs3I3mW/M=", + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", "dev": true }, "path-exists": { @@ -6096,10 +4561,9 @@ "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=" }, "path-parse": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.5.tgz", - "integrity": "sha1-PBrfhx6pzWyUMbbqK9dKD/BVxME=", - "dev": true + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", + "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" }, "path-type": { "version": "2.0.0", @@ -6247,20 +4711,11 @@ "integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==" }, "progress": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.0.tgz", - "integrity": "sha1-ihvjZr+Pwj2yvSPxDG/pILQ4nR8=", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", + "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", "dev": true }, - "promise": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz", - "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==", - "dev": true, - "requires": { - "asap": "~2.0.3" - } - }, "promise-inflight": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz", @@ -6291,12 +4746,11 @@ } }, "prop-types": { - "version": "15.6.1", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.6.1.tgz", - "integrity": "sha512-4ec7bY1Y66LymSUOH/zARVYObB23AT2h8cf6e/O6ZALB/N0sqZFEx7rq6EYPX2MkOdKORuooI/H5k9TlR4q7kQ==", + "version": "15.6.2", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.6.2.tgz", + "integrity": "sha512-3pboPvLiWD7dkI3qf3KbUe6hKFKa52w+AE0VCqECtf+QHAKgOL37tTaNCnuX1nAAQ4ZhyP+kYVKf8rLmJ/feDQ==", "dev": true, "requires": { - "fbjs": "^0.8.16", "loose-envify": "^1.3.1", "object-assign": "^4.1.1" } @@ -6307,11 +4761,11 @@ "integrity": "sha1-IS1b/hMYMGpCD2QCuOJv85ZHqEk=" }, "protoduck": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/protoduck/-/protoduck-5.0.0.tgz", - "integrity": "sha512-agsGWD8/RZrS4ga6v82Fxb0RHIS2RZnbsSue6A9/MBRhB/jcqOANAMNrqM9900b8duj+Gx+T/JMy5IowDoO/hQ==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/protoduck/-/protoduck-5.0.1.tgz", + "integrity": "sha512-WxoCeDCoCBY55BMvj4cAEjdVUFGRWed9ZxPlqTKYyw1nDDTQ4pqmnIMAGfJlg7Dx35uB/M+PHJPTmGOvaCaPTg==", "requires": { - "genfun": "^4.0.1" + "genfun": "^5.0.0" } }, "prr": { @@ -6375,11 +4829,12 @@ "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" }, "query-string": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/query-string/-/query-string-6.1.0.tgz", - "integrity": "sha512-pNB/Gr8SA8ff8KpUFM36o/WFAlthgaThka5bV19AD9PNTH20Pwq5Zxodif2YyHwrctp6SkL4GqlOot0qR/wGaw==", + "version": "6.8.2", + "resolved": "https://registry.npmjs.org/query-string/-/query-string-6.8.2.tgz", + "integrity": "sha512-J3Qi8XZJXh93t2FiKyd/7Ec6GNifsjKXUsVFkSBj/kjLsDylWhnCz4NT1bkPcKotttPW+QbKGqqPH8OoI2pdqw==", "requires": { "decode-uri-component": "^0.2.0", + "split-on-first": "^1.0.0", "strict-uri-encode": "^2.0.0" } }, @@ -6421,9 +4876,9 @@ } }, "read-cmd-shim": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/read-cmd-shim/-/read-cmd-shim-1.0.1.tgz", - "integrity": "sha1-LV0Vd4ajfAVdIgd8MsU/gynpHHs=", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/read-cmd-shim/-/read-cmd-shim-1.0.5.tgz", + "integrity": "sha512-v5yCqQ/7okKoZZkBQUAfTsQ3sVJtXdNfbPnI5cceppoxEVLYA3k+VtV2omkeo8MS94JCy4fSiUwlRBAwCVRPUA==", "requires": { "graceful-fs": "^4.1.2" } @@ -6443,27 +4898,25 @@ } }, "read-package-json": { - "version": "2.0.13", - "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-2.0.13.tgz", - "integrity": "sha512-/1dZ7TRZvGrYqE0UAfN6qQb5GYBsNcqS1C0tNK601CFOJmtHI7NIGXwetEPU/OtoFHZL3hDxm4rolFFVE9Bnmg==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-2.1.1.tgz", + "integrity": "sha512-dAiqGtVc/q5doFz6096CcnXhpYk0ZN8dEKVkGLU0CsASt8SrgF6SF7OTKAYubfvFhWaqofl+Y8HK19GR8jwW+A==", "requires": { "glob": "^7.1.1", "graceful-fs": "^4.1.2", "json-parse-better-errors": "^1.0.1", "normalize-package-data": "^2.0.0", - "slash": "^1.0.0" + "npm-normalize-package-bin": "^1.0.0" } }, "read-package-tree": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/read-package-tree/-/read-package-tree-5.2.1.tgz", - "integrity": "sha512-2CNoRoh95LxY47LvqrehIAfUVda2JbuFE/HaGYs42bNrGG+ojbw1h3zOcPcQ+1GQ3+rkzNndZn85u1XyZ3UsIA==", + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/read-package-tree/-/read-package-tree-5.3.1.tgz", + "integrity": "sha512-mLUDsD5JVtlZxjSlPPx1RETkNjjvQYuweKwNVt1Sn8kP5Jh44pvYuUHCp6xSVDZWbNxVxG5lyZJ921aJH61sTw==", "requires": { - "debuglog": "^1.0.1", - "dezalgo": "^1.0.0", - "once": "^1.3.0", "read-package-json": "^2.0.0", - "readdir-scoped-modules": "^1.0.0" + "readdir-scoped-modules": "^1.0.0", + "util-promisify": "^2.1.0" } }, "read-pkg": { @@ -6488,23 +4941,19 @@ } }, "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" } }, "readdir-scoped-modules": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/readdir-scoped-modules/-/readdir-scoped-modules-1.0.2.tgz", - "integrity": "sha1-n6+jfShr5dksuuve4DDcm19AZ0c=", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/readdir-scoped-modules/-/readdir-scoped-modules-1.1.0.tgz", + "integrity": "sha512-asaikDeqAQg7JifRsZn1NJZXo9E+VwlyCfbkZhwyISinqk5zNS6266HS5kah6P0SaQKGF6SkNnZVHUzHFYxYDw==", "requires": { "debuglog": "^1.0.1", "dezalgo": "^1.0.0", @@ -6529,6 +4978,15 @@ "rc": "^1.0.1" } }, + "release-zalgo": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz", + "integrity": "sha1-CXALflB0Mpc5Mw5TXFqQ+2eFFzA=", + "dev": true, + "requires": { + "es6-error": "^4.0.1" + } + }, "request": { "version": "2.88.0", "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", @@ -6562,9 +5020,9 @@ "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=" }, "require-inject": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/require-inject/-/require-inject-1.4.3.tgz", - "integrity": "sha512-AwIeUR1jA8ARt20brFU2DfW62z5e3rzMZC/Q1I1lKoOlxtroSUqjRGKyOL8m+7qmnFGbZvV8lUjtcHgtuLDl6Q==", + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/require-inject/-/require-inject-1.4.4.tgz", + "integrity": "sha512-5Y5ctRN84+I4iOZO61gm+48tgP/6Hcd3VZydkaEM3MCuOvnHRsTJYQBOc01faI/Z9at5nsCAJVHhlfPA6Pc0Og==", "dev": true, "requires": { "caller": "^1.0.1" @@ -6594,9 +5052,9 @@ } }, "resolve": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.7.1.tgz", - "integrity": "sha512-c7rwLofp8g1U+h1KNyHL/jicrKg1Ek4q+Lr33AL65uZTinUZHe30D5HlyN5V9NW0JX1D5dXQ4jqW5l7Sy/kGfw==", + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.8.1.tgz", + "integrity": "sha512-AicPrAC7Qu1JxPCZ9ZgCZlY35QgFnNqc+0LtbRNxnVw4TXvjQ72wnuL9JQcEBgXkI9JM8MsT9kaQoHcpCRJOYA==", "dev": true, "requires": { "path-parse": "^1.0.5" @@ -6623,11 +5081,11 @@ "integrity": "sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs=" }, "rimraf": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.2.tgz", - "integrity": "sha512-lreewLK/BlghmxtfH36YYVg1i8IAce4TI7oao75I1g245+6BctqTVQiBP3YUJ9C6DQOXJmkYR9X9fCLtCOJc5w==", + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", + "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", "requires": { - "glob": "^7.0.5" + "glob": "^7.1.3" } }, "run-async": { @@ -6651,6 +5109,13 @@ "integrity": "sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec=", "requires": { "aproba": "^1.1.1" + }, + "dependencies": { + "aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" + } } }, "rx-lite": { @@ -6679,9 +5144,9 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "semver": { - "version": "5.5.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.5.1.tgz", - "integrity": "sha512-PqpAxfrEhlSUWge8dwIp4tZnQ25DIOthpiaHNIthsjEFQD6EvqUKUDM7L8O2rShkFccYo1VjJR0coWfNkCubRw==" + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" }, "semver-diff": { "version": "2.1.0", @@ -6696,19 +5161,12 @@ "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" }, - "setimmediate": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", - "integrity": "sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU=", - "dev": true - }, "sha": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/sha/-/sha-2.0.1.tgz", - "integrity": "sha1-YDCCL70smCOUn49y7WQR7lzyWq4=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/sha/-/sha-3.0.0.tgz", + "integrity": "sha512-DOYnM37cNsLNSGIG/zZWch5CKIRNoLdYUQTQlcgkRkoYIUwDYjqDyye16YcDZg/OPdcbUgTKMjc4SY6TB7ZAPw==", "requires": { - "graceful-fs": "^4.1.2", - "readable-stream": "^2.0.2" + "graceful-fs": "^4.1.2" } }, "shebang-command": { @@ -6735,11 +5193,6 @@ "integrity": "sha1-c0TLuLbib7J9ZrL8hvn21Zl1IcY=", "dev": true }, - "slash": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz", - "integrity": "sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU=" - }, "slice-ansi": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-1.0.0.tgz", @@ -6763,26 +5216,36 @@ "integrity": "sha1-VusCfWW00tzmyy4tMsTUr8nh1wc=" }, "smart-buffer": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.0.1.tgz", - "integrity": "sha512-RFqinRVJVcCAL9Uh1oVqE6FZkqsyLiVOYEZ20TqIOjuX7iFVJ+zsbs4RIghnw/pTs7mZvt8ZHhvm1ZUrR4fykg==" + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.1.0.tgz", + "integrity": "sha512-iVICrxOzCynf/SNaBQCw34eM9jROU/s5rzIhpOvzhzuYHfJR/DhZfDkXiZSgKXfgv26HT3Yni3AV/DGw0cGnnw==" }, "socks": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.2.0.tgz", - "integrity": "sha512-uRKV9uXQ9ytMbGm2+DilS1jB7N3AC0mmusmW5TVWjNuBZjxS8+lX38fasKVY9I4opv/bY/iqTbcpFFaTwpfwRg==", + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.3.3.tgz", + "integrity": "sha512-o5t52PCNtVdiOvzMry7wU4aOqYWL0PeCXRWBEiJow4/i/wr+wpsJQ9awEu1EonLIqsfGd5qSgDdxEOvCdmBEpA==", "requires": { - "ip": "^1.1.5", - "smart-buffer": "^4.0.1" + "ip": "1.1.5", + "smart-buffer": "^4.1.0" } }, "socks-proxy-agent": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-4.0.1.tgz", - "integrity": "sha512-Kezx6/VBguXOsEe5oU3lXYyKMi4+gva72TwJ7pQY5JfqUx2nMk7NXA6z/mpNqIlfQjWYVfeuNvQjexiTaTn6Nw==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-4.0.2.tgz", + "integrity": "sha512-NT6syHhI9LmuEMSK6Kd2V7gNv5KFZoLE7V5udWmn0de+3Mkj3UMA/AJPLyeNUVmElCurSHtUdM3ETpR3z770Wg==", "requires": { - "agent-base": "~4.2.0", - "socks": "~2.2.0" + "agent-base": "~4.2.1", + "socks": "~2.3.2" + }, + "dependencies": { + "agent-base": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.2.1.tgz", + "integrity": "sha512-JVwXMr9nHYTUXsBFKUqhJwvlcYU/blreOEUkhNR2eXZIvwd+c+o5V4MgDPKWnMS/56awN3TRzIP+KoPn+roQtg==", + "requires": { + "es6-promisify": "^5.0.0" + } + } } }, "sorted-object": { @@ -6831,20 +5294,42 @@ } } }, - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true - }, - "source-map-support": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.6.tgz", - "integrity": "sha512-N4KXEz7jcKqPf2b2vZF11lQIz9W5ZMuUcIOGj243lduidkf2fjkVKJS9vNxVWn3u/uxX38AcE8U9nnH9FPcq+g==", + "source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true + }, + "source-map-support": { + "version": "0.5.12", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.12.tgz", + "integrity": "sha512-4h2Pbvyy15EE02G+JOZpUCmqWJuqrs+sEkzewTm++BPi7Hvn/HwcqLAcNxYAyI0x13CpPPn+kMjl+hplXMHITQ==", + "dev": true, + "requires": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } + } + }, + "spawn-wrap": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-1.4.2.tgz", + "integrity": "sha512-vMwR3OmmDhnxCVxM8M+xO/FtIp6Ju/mNaDfCMMW7FDcLRTPFWUswec4LXJHTJE2hwTI9O0YBfygu4DalFl7Ylg==", "dev": true, "requires": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" + "foreground-child": "^1.5.6", + "mkdirp": "^0.5.0", + "os-homedir": "^1.0.1", + "rimraf": "^2.6.2", + "signal-exit": "^3.0.2", + "which": "^1.3.0" } }, "spdx-compare": { @@ -6882,48 +5367,50 @@ } }, "spdx-expression-validate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/spdx-expression-validate/-/spdx-expression-validate-1.0.2.tgz", - "integrity": "sha1-Wk5NdhbtHJuIFQNmtCF/dnwn6eM=", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/spdx-expression-validate/-/spdx-expression-validate-2.0.0.tgz", + "integrity": "sha512-b3wydZLM+Tc6CFvaRDBOF9d76oGIHNCLYFeHbftFXUWjnfZWganmDmvtM5sm1cRwJc/VDBMLyGGrsLFd1vOxbg==", "dev": true, "requires": { - "spdx-expression-parse": "^1.0.0" - }, - "dependencies": { - "spdx-expression-parse": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-1.0.4.tgz", - "integrity": "sha1-m98vIOH0DtRH++JzJmGR/O1RYmw=", - "dev": true - } + "spdx-expression-parse": "^3.0.0" } }, "spdx-license-ids": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.3.tgz", + "integrity": "sha512-uBIcIl3Ih6Phe3XHK1NqboJLdGfwr1UN3k6wSD1dZpmPsIkb8AGNbZYJ1fOBk834+Gxy8rpfDxrS6XLEMZMY2g==" + }, + "spdx-osi": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.0.tgz", - "integrity": "sha512-2+EPwgbnmOIl8HjGBXXMd9NAu02vLjOO1nWw4kmeRDFyHn+M/ETfHxQUK0oXg8ctgVnl9t3rosNVsZ1jG61nDA==" + "resolved": "https://registry.npmjs.org/spdx-osi/-/spdx-osi-3.0.0.tgz", + "integrity": "sha512-7DZMaD/rNHWGf82qWOazBsLXQsaLsoJb9RRjhEUQr5o86kw3A1ErGzSdvaXl+KalZyKkkU5T2a5NjCCutAKQSw==", + "dev": true }, "spdx-ranges": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/spdx-ranges/-/spdx-ranges-2.0.0.tgz", - "integrity": "sha512-AUUXLfqkwD7GlzZkXv8ePPCpPjeVWI9xJCfysL8re/uKb6H10umMnC7bFRsHmLJan4fslUtekAgpHlSgLc/7mA==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/spdx-ranges/-/spdx-ranges-2.1.0.tgz", + "integrity": "sha512-OOWghvosfmECc9edy/A9j7GabERmn8bJWHc0J1knVytQtO5Rw7VfxK6CDqmivJhfMJqWhWWUfffNNMPYvyvyQA==", "dev": true }, - "spdx-satisfies": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/spdx-satisfies/-/spdx-satisfies-4.0.0.tgz", - "integrity": "sha512-OcARj6U1OuVv98SVrRqgrR30sVocONtoPpnX8Xz4vXNrFVedqtbgkA+0KmQoXIQ2xjfltPPRVIMeNzKEFLWWKQ==", + "spdx-whitelisted": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/spdx-whitelisted/-/spdx-whitelisted-1.0.0.tgz", + "integrity": "sha512-X4FOpUCvZuo42MdB1zAZ/wdX4N0lLcWDozf2KYFVDgtLv8Lx+f31LOYLP2/FcwTzsPi64bS/VwKqklI4RBletg==", "dev": true, "requires": { "spdx-compare": "^1.0.0", - "spdx-expression-parse": "^3.0.0", "spdx-ranges": "^2.0.0" } }, + "split-on-first": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-1.1.0.tgz", + "integrity": "sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw==" + }, "sprintf-js": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.1.tgz", - "integrity": "sha1-Nr54Mgr+WAH2zqPueLblqrlA6gw=", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.2.tgz", + "integrity": "sha512-VE0SOVEHCk7Qc8ulkWw3ntAzXuqf7S2lvwQaDLRnUeIEaKNQJzV6BwmLKhOqT61aGhfUMrXeaBk+oDGCzvhcug==", "dev": true }, "sshpk": { @@ -6951,9 +5438,9 @@ } }, "stack-utils": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-1.0.1.tgz", - "integrity": "sha1-1PM6tU6OOHeLDKXP07OvsS22hiA=", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-1.0.2.tgz", + "integrity": "sha512-MTX+MeG5U994cazkjd/9KNAapsHnibjMLnfXodlkXw76JEea0UiNzrqidzo1emMwk7w5Qhc9jd4Bn9TBb1MFwA==", "dev": true }, "standard": { @@ -6994,9 +5481,9 @@ } }, "statuses": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.3.1.tgz", - "integrity": "sha1-+vUbnrdKrvOzrPStX2Gr8ky3uT4=", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=", "dev": true }, "stream-each": { @@ -7015,6 +5502,30 @@ "requires": { "readable-stream": "^2.1.5", "stream-shift": "^1.0.0" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + } } }, "stream-shift": { @@ -7057,17 +5568,24 @@ } }, "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", "requires": { - "safe-buffer": "~5.1.0" + "safe-buffer": "~5.2.0" + }, + "dependencies": { + "safe-buffer": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz", + "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==" + } } }, "stringify-package": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/stringify-package/-/stringify-package-1.0.0.tgz", - "integrity": "sha512-JIQqiWmLiEozOC0b0BtxZ/AOUtdUZHCBPgqIZ2kSJJqGwgb9neo44XdTHUC4HZSGqi03hOeB7W/E8rAlKnGe9g==" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/stringify-package/-/stringify-package-1.0.1.tgz", + "integrity": "sha512-sa4DUQsYciMP1xhKWGuFM04fB0LG/9DlluZoSVywUMRNvzid6XucHK0/90xGxRoHrAaROrcHK1aPKaijCtSrhg==" }, "strip-ansi": { "version": "3.0.1", @@ -7113,20 +5631,12 @@ "lodash": "^4.17.4", "slice-ansi": "1.0.0", "string-width": "^2.1.1" - }, - "dependencies": { - "lodash": { - "version": "4.17.10", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.10.tgz", - "integrity": "sha512-UejweD1pDoXu+AD825lWwp4ZGtSwgnpZxb3JDViD7StjQz+Nb/6l093lx4OQ0foGWNRoc19mWy7BzL+UAK2iVg==", - "dev": true - } } }, "tacks": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/tacks/-/tacks-1.2.7.tgz", - "integrity": "sha512-gaIFMB31ql8FX9wZylfw68BXj0NlB+e1LUmmQsy3aOXUA8DO2MpiO3mua87sZHfpnp95jrnNzGcoW9iYTTsJtg==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/tacks/-/tacks-1.3.0.tgz", + "integrity": "sha512-jpQhdKlYom3cohYmjSYqny9Ie8QJr6OYe4wdyu/nEgfiIQpOvLwq72vCRucg9+jUlnspnKvL5HASnmkxOf3DXQ==", "dev": true, "requires": { "@iarna/cli": "^2.0.0", @@ -7248,46 +5758,79 @@ } }, "tap": { - "version": "12.0.1", - "resolved": "https://registry.npmjs.org/tap/-/tap-12.0.1.tgz", - "integrity": "sha512-iEJytWaZy8risvfRjuV4+ST+Lrrui/MW2ZCWn01ZaMn0NKFej4+PpBy6bXGOg9+cEGNmI7d3Sdka/zTUZUGidA==", + "version": "12.7.0", + "resolved": "https://registry.npmjs.org/tap/-/tap-12.7.0.tgz", + "integrity": "sha512-SjglJmRv0pqrQQ7d5ZBEY8ZOqv3nYDBXEX51oyycOH7piuhn82JKT/yDNewwmOsodTD/RZL9MccA96EjDgK+Eg==", "dev": true, "requires": { "bind-obj-methods": "^2.0.0", - "bluebird": "^3.5.1", + "browser-process-hrtime": "^1.0.0", + "capture-stack-trace": "^1.0.0", "clean-yaml-object": "^0.1.0", "color-support": "^1.1.0", - "coveralls": "^3.0.1", + "coveralls": "^3.0.2", + "domain-browser": "^1.2.0", + "esm": "^3.2.5", "foreground-child": "^1.3.3", "fs-exists-cached": "^1.0.0", "function-loop": "^1.0.1", - "glob": "^7.0.0", + "glob": "^7.1.3", "isexe": "^2.0.0", - "js-yaml": "^3.11.0", - "minipass": "^2.3.0", + "js-yaml": "^3.13.1", + "minipass": "^2.3.5", "mkdirp": "^0.5.1", - "nyc": "^11.8.0", - "opener": "^1.4.1", + "nyc": "^14.0.0", + "opener": "^1.5.1", "os-homedir": "^1.0.2", "own-or": "^1.0.0", "own-or-env": "^1.0.1", - "rimraf": "^2.6.2", + "rimraf": "^2.6.3", "signal-exit": "^3.0.0", - "source-map-support": "^0.5.6", - "stack-utils": "^1.0.0", - "tap-mocha-reporter": "^3.0.7", + "source-map-support": "^0.5.10", + "stack-utils": "^1.0.2", + "tap-mocha-reporter": "^3.0.9", "tap-parser": "^7.0.0", "tmatch": "^4.0.0", "trivial-deferred": "^1.0.1", - "tsame": "^2.0.0", - "write-file-atomic": "^2.3.0", + "ts-node": "^8.0.2", + "tsame": "^2.0.1", + "typescript": "^3.3.3", + "write-file-atomic": "^2.4.2", "yapool": "^1.0.0" + }, + "dependencies": { + "js-yaml": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, + "minipass": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.3.5.tgz", + "integrity": "sha512-Gi1W4k059gyRbyVUZQ4mEqLm0YIUiGYfvxhF6SIlk3ui1WVxMTGfGdQ2SInh3PDrRTVvPKgULkpJtT4RH10+VA==", + "dev": true, + "requires": { + "safe-buffer": "^5.1.2", + "yallist": "^3.0.0" + } + }, + "yallist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", + "integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==", + "dev": true + } } }, "tap-mocha-reporter": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/tap-mocha-reporter/-/tap-mocha-reporter-3.0.7.tgz", - "integrity": "sha512-GHVXJ38C3oPRpM3YUc43JlGdpVZYiKeT1fmAd3HH2+J+ZWwsNAUFvRRdoGsXLw9+gU9o+zXpBqhS/oXyRQYwlA==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/tap-mocha-reporter/-/tap-mocha-reporter-3.0.9.tgz", + "integrity": "sha512-VO07vhC9EG27EZdOe7bWBj1ldbK+DL9TnRadOgdQmiQOVZjFpUEQuuqO7+rNSO2kfmkq5hWeluYXDWNG/ytXTQ==", "dev": true, "requires": { "color-support": "^1.1.0", @@ -7316,6 +5859,32 @@ "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", "dev": true }, + "readable-stream": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "dev": true, + "optional": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "optional": true, + "requires": { + "safe-buffer": "~5.1.0" + } + }, "tap-parser": { "version": "5.4.0", "resolved": "https://registry.npmjs.org/tap-parser/-/tap-parser-5.4.0.tgz", @@ -7341,40 +5910,43 @@ } }, "tar": { - "version": "4.4.8", - "resolved": "https://registry.npmjs.org/tar/-/tar-4.4.8.tgz", - "integrity": "sha512-LzHF64s5chPQQS0IYBn9IN5h3i98c12bo4NCO7e0sGM2llXQ3p2FGC5sdENN4cTW48O915Sh+x+EXx7XW96xYQ==", + "version": "4.4.13", + "resolved": "https://registry.npmjs.org/tar/-/tar-4.4.13.tgz", + "integrity": "sha512-w2VwSrBoHa5BsSyH+KxEqeQBAllHhccyMFVHtGtdMpF4W7IRWfZjFiQceJPChOeTsSDVUpER2T8FA93pr0L+QA==", "requires": { "chownr": "^1.1.1", "fs-minipass": "^1.2.5", - "minipass": "^2.3.4", - "minizlib": "^1.1.1", + "minipass": "^2.8.6", + "minizlib": "^1.2.1", "mkdirp": "^0.5.0", "safe-buffer": "^5.1.2", - "yallist": "^3.0.2" + "yallist": "^3.0.3" }, "dependencies": { - "chownr": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.1.tgz", - "integrity": "sha512-j38EvO5+LHX84jlo6h4UzmOwi0UgW61WRyPtJz4qaadK5eY3BTS5TY/S1Stc3Uk2lIM6TPevAlULiEJwie860g==" - }, "minipass": { - "version": "2.3.5", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.3.5.tgz", - "integrity": "sha512-Gi1W4k059gyRbyVUZQ4mEqLm0YIUiGYfvxhF6SIlk3ui1WVxMTGfGdQ2SInh3PDrRTVvPKgULkpJtT4RH10+VA==", + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz", + "integrity": "sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==", "requires": { "safe-buffer": "^5.1.2", "yallist": "^3.0.0" } - }, - "yallist": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", - "integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==" } } }, + "tar-stream": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.1.0.tgz", + "integrity": "sha512-+DAn4Nb4+gz6WZigRzKEZl1QuJVOLtAwwF+WUxy1fJ6X63CaGaUAxJRD2KEn1OMfcbCjySTYpNC6WmfQoIEOdw==", + "dev": true, + "requires": { + "bl": "^3.0.0", + "end-of-stream": "^1.4.1", + "fs-constants": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1" + } + }, "term-size": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/term-size/-/term-size-1.2.0.tgz", @@ -7383,6 +5955,121 @@ "execa": "^0.7.0" } }, + "test-exclude": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-5.2.3.tgz", + "integrity": "sha512-M+oxtseCFO3EDtAaGH7iiej3CBkzXqFMbzqYAACdzKui4eZA+pq3tZEwChvOdNfa7xxy8BfbmgJSIr43cC/+2g==", + "dev": true, + "requires": { + "glob": "^7.1.3", + "minimatch": "^3.0.4", + "read-pkg-up": "^4.0.0", + "require-main-filename": "^2.0.0" + }, + "dependencies": { + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "load-json-file": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", + "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^4.0.0", + "pify": "^3.0.0", + "strip-bom": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz", + "integrity": "sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", + "dev": true, + "requires": { + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" + } + }, + "path-type": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", + "dev": true, + "requires": { + "pify": "^3.0.0" + } + }, + "read-pkg": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", + "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", + "dev": true, + "requires": { + "load-json-file": "^4.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^3.0.0" + } + }, + "read-pkg-up": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-4.0.0.tgz", + "integrity": "sha512-6etQSH7nJGsK0RbG/2TeDzZFa8shjQ1um+SwQQ5cwKy0dhSXdOncEhb1CPpvQG4h7FyOV6EB6YlV0yJvZQNAkA==", + "dev": true, + "requires": { + "find-up": "^3.0.0", + "read-pkg": "^3.0.0" + } + }, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + } + } + }, "text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", @@ -7400,6 +6087,30 @@ "requires": { "readable-stream": "^2.1.5", "xtend": "~4.0.1" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + } } }, "timed-out": { @@ -7427,6 +6138,12 @@ "os-tmpdir": "~1.0.2" } }, + "to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", + "dev": true + }, "tough-cookie": { "version": "2.4.3", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", @@ -7436,16 +6153,43 @@ "punycode": "^1.4.1" } }, + "trim-right": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/trim-right/-/trim-right-1.0.1.tgz", + "integrity": "sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM=", + "dev": true + }, "trivial-deferred": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/trivial-deferred/-/trivial-deferred-1.0.1.tgz", "integrity": "sha1-N21NKdlR1jaKb3oK6FwvTV4GWPM=", "dev": true }, + "ts-node": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-8.3.0.tgz", + "integrity": "sha512-dyNS/RqyVTDcmNM4NIBAeDMpsAdaQ+ojdf0GOLqE6nwJOgzEkdRNzJywhDfwnuvB10oa6NLVG1rUJQCpRN7qoQ==", + "dev": true, + "requires": { + "arg": "^4.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "source-map-support": "^0.5.6", + "yn": "^3.0.0" + }, + "dependencies": { + "diff": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.1.tgz", + "integrity": "sha512-s2+XdvhPCOF01LRQBC8hf4vhbVmI2CGS5aZnxLJlT5FtdhPCDFq80q++zK2KlrVorVDdL5BOGZ/VfLrVtYNF+Q==", + "dev": true + } + } + }, "tsame": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/tsame/-/tsame-2.0.0.tgz", - "integrity": "sha512-dAuzcnOPdqZYojylFQzEes95UDjve3HqKrlTCeLZKSDPMTsn3smzHZqsJj/sWD8wOUkg0RD++B11evyLn2+bIw==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/tsame/-/tsame-2.0.1.tgz", + "integrity": "sha512-jxyxgKVKa4Bh5dPcO42TJL22lIvfd9LOVJwdovKOnJa4TLLrHxquK+DlGm4rkGmrcur+GRx+x4oW00O2pY/fFw==", "dev": true }, "tunnel-agent": { @@ -7476,12 +6220,32 @@ "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=" }, - "ua-parser-js": { - "version": "0.7.18", - "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.18.tgz", - "integrity": "sha512-LtzwHlVHwFGTptfNSgezHp7WUlwiqb0gA9AALRbKaERfxwJoiX0A73QbTToxteIAuIaFshhgIZfqK8s7clqgnA==", + "typescript": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.5.2.tgz", + "integrity": "sha512-7KxJovlYhTX5RaRbUdkAXN1KUZ8PwWlTzQdHV6xNqvuFOs7+WBo10TQUqT19Q/Jz2hk5v9TQDIhyLhhJY4p5AA==", "dev": true }, + "uglify-js": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.6.0.tgz", + "integrity": "sha512-W+jrUHJr3DXKhrsS7NUVxn3zqMOFn0hL/Ei6v0anCIMoKC93TjcflTagwIHLW7SfMFfiQuktQyFVCFHGUE0+yg==", + "dev": true, + "optional": true, + "requires": { + "commander": "~2.20.0", + "source-map": "~0.6.1" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "optional": true + } + } + }, "uid-number": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/uid-number/-/uid-number-0.0.6.tgz", @@ -7509,9 +6273,9 @@ "dev": true }, "unique-filename": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-1.1.0.tgz", - "integrity": "sha1-0F8v5AMlYIcfMOk8vnNe6iAVFPM=", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-1.1.1.tgz", + "integrity": "sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==", "requires": { "unique-slug": "^2.0.0" } @@ -7595,6 +6359,14 @@ "resolved": "https://registry.npmjs.org/util-extend/-/util-extend-1.0.3.tgz", "integrity": "sha1-p8IW0mdUUWljeztu3GypEZ4v+T8=" }, + "util-promisify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/util-promisify/-/util-promisify-2.1.0.tgz", + "integrity": "sha1-PCI2R2xNMsX/PEcAKt18E7moKlM=", + "requires": { + "object.getownpropertydescriptors": "^2.0.3" + } + }, "utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", @@ -7602,9 +6374,9 @@ "dev": true }, "uuid": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.3.tgz", + "integrity": "sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ==" }, "validate-npm-package-license": { "version": "3.0.4", @@ -7647,12 +6419,6 @@ "defaults": "^1.0.3" } }, - "whatwg-fetch": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-2.0.4.tgz", - "integrity": "sha512-dcQ1GWpOD/eEQ97k66aiEVpNnapVj90/+R+SXTPYGHpYBBypfKJEQjLrvMZ7YXbKm21gXd4NcuxUTjiv1YtLng==", - "dev": true - }, "which": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", @@ -7695,15 +6461,15 @@ } }, "wordwrap": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", - "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", + "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=", "dev": true }, "worker-farm": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/worker-farm/-/worker-farm-1.6.0.tgz", - "integrity": "sha512-6w+3tHbM87WnSWnENBUvA2pxJPLhQUg5LKwUQHq3r+XPhIM+Gh2R5ycbwPCyuGbNg+lPgdcnQUhuC02kJCvffQ==", + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/worker-farm/-/worker-farm-1.7.0.tgz", + "integrity": "sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw==", "requires": { "errno": "~0.1.7" } @@ -7744,9 +6510,9 @@ } }, "write-file-atomic": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.3.0.tgz", - "integrity": "sha512-xuPeK4OdjWqtfi59ylvVL0Yn35SF3zgcAcv7rBPFHVaEapaDr4GdGgm3j7ckTwH9wHL7fGmgfAnb0+THrHb8tA==", + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", + "integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", "requires": { "graceful-fs": "^4.1.11", "imurmurhash": "^0.1.4", @@ -7769,9 +6535,9 @@ "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==" }, "yallist": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", + "integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==" }, "yapool": { "version": "1.0.0", @@ -7780,15 +6546,15 @@ "dev": true }, "yargs": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-11.0.0.tgz", - "integrity": "sha512-Rjp+lMYQOWtgqojx1dEWorjCofi1YN7AoFvYV7b1gx/7dAAeuI4kN5SZiEvr0ZmsZTOpDRcCqrpI10L31tFkBw==", + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-11.1.1.tgz", + "integrity": "sha512-PRU7gJrJaXv3q3yQZ/+/X6KBswZiaQ+zOmdprZcouPYtQgvNU35i+68M4b1ZHLZtYFT5QObFLV+ZkmJYcwKdiw==", "requires": { "cliui": "^4.0.0", "decamelize": "^1.1.1", "find-up": "^2.1.0", "get-caller-file": "^1.0.1", - "os-locale": "^2.0.0", + "os-locale": "^3.1.0", "require-directory": "^2.1.1", "require-main-filename": "^1.0.1", "set-blocking": "^2.0.0", @@ -7812,6 +6578,12 @@ "requires": { "camelcase": "^4.1.0" } + }, + "yn": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.0.tgz", + "integrity": "sha512-kKfnnYkbTfrAdd0xICNFw7Atm8nKpLcLv9AZGEt+kczL/WQVai4e2V6ZN8U/O+iI6WrNuJjNNOyu4zfhl9D3Hg==", + "dev": true } } } diff --git a/package.json b/package.json index 35f4e956169ff..39d76af655d16 100644 --- a/package.json +++ b/package.json @@ -1,5 +1,5 @@ { - "version": "6.5.0", + "version": "6.14.2", "name": "npm", "description": "a package manager for JavaScript", "keywords": [ @@ -33,22 +33,22 @@ "npx": "./bin/npx-cli.js" }, "dependencies": { - "JSONStream": "^1.3.4", + "JSONStream": "^1.3.5", "abbrev": "~1.1.1", "ansicolors": "~0.3.2", "ansistyles": "~0.1.3", - "aproba": "~1.2.0", + "aproba": "^2.0.0", "archy": "~1.0.0", - "bin-links": "^1.1.2", - "bluebird": "^3.5.3", - "byte-size": "^4.0.3", - "cacache": "^11.2.0", - "call-limit": "~1.1.0", - "chownr": "~1.0.1", - "ci-info": "^1.6.0", + "bin-links": "^1.1.7", + "bluebird": "^3.5.5", + "byte-size": "^5.0.1", + "cacache": "^12.0.3", + "call-limit": "^1.1.1", + "chownr": "^1.1.4", + "ci-info": "^2.0.0", "cli-columns": "^3.1.2", - "cli-table3": "^0.5.0", - "cmd-shim": "~2.0.2", + "cli-table3": "^0.5.1", + "cmd-shim": "^3.0.3", "columnify": "~1.5.4", "config-chain": "^1.1.12", "detect-indent": "~5.0.0", @@ -59,89 +59,95 @@ "find-npm-prefix": "^1.0.2", "fs-vacuum": "~1.2.10", "fs-write-stream-atomic": "~1.0.10", - "gentle-fs": "^2.0.1", - "glob": "^7.1.3", - "graceful-fs": "^4.1.15", + "gentle-fs": "^2.3.0", + "glob": "^7.1.6", + "graceful-fs": "^4.2.3", "has-unicode": "~2.0.1", - "hosted-git-info": "^2.7.1", + "hosted-git-info": "^2.8.8", "iferr": "^1.0.2", + "infer-owner": "^1.0.4", "inflight": "~1.0.6", - "inherits": "~2.0.3", + "inherits": "^2.0.4", "ini": "^1.3.5", "init-package-json": "^1.10.3", - "is-cidr": "^2.0.6", + "is-cidr": "^3.0.0", "json-parse-better-errors": "^1.0.2", "lazy-property": "~1.0.0", - "libcipm": "^2.0.2", - "libnpmhook": "^4.0.1", - "libnpx": "^10.2.0", - "lock-verify": "^2.0.2", + "libcipm": "^4.0.7", + "libnpm": "^3.0.1", + "libnpmaccess": "^3.0.2", + "libnpmhook": "^5.0.3", + "libnpmorg": "^1.0.1", + "libnpmsearch": "^2.0.2", + "libnpmteam": "^1.0.2", + "libnpx": "^10.2.2", + "lock-verify": "^2.1.0", "lockfile": "^1.0.4", "lodash._baseuniq": "~4.6.0", "lodash.clonedeep": "~4.5.0", "lodash.union": "~4.6.0", "lodash.uniq": "~4.5.0", "lodash.without": "~4.4.0", - "lru-cache": "^4.1.3", + "lru-cache": "^5.1.1", "meant": "~1.0.1", "mississippi": "^3.0.0", "mkdirp": "~0.5.1", "move-concurrently": "^1.0.1", - "node-gyp": "^3.8.0", + "node-gyp": "^5.0.7", "nopt": "~4.0.1", - "normalize-package-data": "~2.4.0", - "npm-audit-report": "^1.3.1", + "normalize-package-data": "^2.5.0", + "npm-audit-report": "^1.3.2", "npm-cache-filename": "~1.0.2", - "npm-install-checks": "~3.0.0", - "npm-lifecycle": "^2.1.0", - "npm-package-arg": "^6.1.0", - "npm-packlist": "^1.1.12", - "npm-pick-manifest": "^2.1.0", - "npm-profile": "^3.0.2", - "npm-registry-client": "^8.6.0", - "npm-registry-fetch": "^1.1.0", + "npm-install-checks": "^3.0.2", + "npm-lifecycle": "^3.1.4", + "npm-package-arg": "^6.1.1", + "npm-packlist": "^1.4.8", + "npm-pick-manifest": "^3.0.2", + "npm-profile": "^4.0.4", + "npm-registry-fetch": "^4.0.3", "npm-user-validate": "~1.0.0", "npmlog": "~4.1.2", "once": "~1.4.0", "opener": "^1.5.1", "osenv": "^0.1.5", - "pacote": "^8.1.6", + "pacote": "^9.5.12", "path-is-inside": "~1.0.2", "promise-inflight": "~1.0.1", "qrcode-terminal": "^0.12.0", - "query-string": "^6.1.0", + "query-string": "^6.8.2", "qw": "~1.0.1", "read": "~1.0.7", - "read-cmd-shim": "~1.0.1", + "read-cmd-shim": "^1.0.5", "read-installed": "~4.0.3", - "read-package-json": "^2.0.13", - "read-package-tree": "^5.2.1", - "readable-stream": "^2.3.6", + "read-package-json": "^2.1.1", + "read-package-tree": "^5.3.1", + "readable-stream": "^3.6.0", + "readdir-scoped-modules": "^1.1.0", "request": "^2.88.0", "retry": "^0.12.0", - "rimraf": "~2.6.2", + "rimraf": "^2.6.3", "safe-buffer": "^5.1.2", - "semver": "^5.5.1", - "sha": "~2.0.1", + "semver": "^5.7.1", + "sha": "^3.0.0", "slide": "~1.1.6", "sorted-object": "~2.0.1", "sorted-union-stream": "~2.1.3", "ssri": "^6.0.1", - "stringify-package": "^1.0.0", - "tar": "^4.4.8", + "stringify-package": "^1.0.1", + "tar": "^4.4.13", "text-table": "~0.2.0", "tiny-relative-date": "^1.3.0", "uid-number": "0.0.6", "umask": "~1.1.0", - "unique-filename": "~1.1.0", + "unique-filename": "^1.1.1", "unpipe": "~1.0.0", "update-notifier": "^2.5.0", - "uuid": "^3.3.2", + "uuid": "^3.3.3", "validate-npm-package-license": "^3.0.4", "validate-npm-package-name": "~3.0.0", "which": "^1.3.1", - "worker-farm": "^1.6.0", - "write-file-atomic": "^2.3.0" + "worker-farm": "^1.7.0", + "write-file-atomic": "^2.4.3" }, "bundleDependencies": [ "abbrev", @@ -149,13 +155,15 @@ "ansistyles", "aproba", "archy", + "bin-links", + "bluebird", "byte-size", "cacache", "call-limit", - "bluebird", - "bin-links", "chownr", "ci-info", + "cli-columns", + "cli-table3", "cmd-shim", "columnify", "config-chain", @@ -175,6 +183,7 @@ "hosted-git-info", "iferr", "imurmurhash", + "infer-owner", "inflight", "inherits", "ini", @@ -184,8 +193,14 @@ "JSONStream", "lazy-property", "libcipm", + "libnpm", + "libnpmaccess", "libnpmhook", + "libnpmorg", + "libnpmsearch", + "libnpmteam", "libnpx", + "lock-verify", "lockfile", "lodash._baseindexof", "lodash._baseuniq", @@ -200,20 +215,20 @@ "lodash.without", "lru-cache", "meant", - "mkdirp", "mississippi", + "mkdirp", "move-concurrently", + "node-gyp", "nopt", "normalize-package-data", "npm-audit-report", "npm-cache-filename", - "npm-lifecycle", "npm-install-checks", + "npm-lifecycle", "npm-package-arg", "npm-packlist", "npm-pick-manifest", "npm-profile", - "npm-registry-client", "npm-registry-fetch", "npm-user-validate", "npmlog", @@ -223,27 +238,30 @@ "pacote", "path-is-inside", "promise-inflight", - "query-string", "qrcode-terminal", + "query-string", "qw", - "read", "read-cmd-shim", "read-installed", "read-package-json", "read-package-tree", + "read", "readable-stream", "readdir-scoped-modules", "request", "retry", "rimraf", + "safe-buffer", "semver", "sha", "slide", "sorted-object", "sorted-union-stream", "ssri", + "stringify-package", "tar", "text-table", + "tiny-relative-date", "uid-number", "umask", "unique-filename", @@ -253,40 +271,44 @@ "validate-npm-package-license", "validate-npm-package-name", "which", - "write-file-atomic", - "safe-buffer", "worker-farm", - "tiny-relative-date", - "cli-columns", - "cli-table3", - "node-gyp", - "lock-verify", - "stringify-package" + "write-file-atomic" ], "devDependencies": { - "deep-equal": "~1.0.1", - "licensee": "^5.0.0", - "marked": "^0.5.0", - "marked-man": "~0.2.1", - "npm-registry-couchapp": "^2.7.1", - "npm-registry-mock": "~1.1.0", - "require-inject": "^1.4.3", - "sprintf-js": "~1.1.1", + "deep-equal": "^1.0.1", + "get-stream": "^4.1.0", + "licensee": "^7.0.3", + "marked": "^0.6.3", + "marked-man": "^0.6.0", + "npm-registry-couchapp": "^2.7.3", + "npm-registry-mock": "^1.3.0", + "require-inject": "^1.4.4", + "sprintf-js": "^1.1.2", "standard": "^11.0.1", - "tacks": "^1.2.7", - "tap": "^12.0.1" + "tacks": "^1.3.0", + "tap": "^12.7.0", + "tar-stream": "^2.1.0" }, "scripts": { "dumpconf": "env | grep npm | sort | uniq", - "prepare": "node bin/npm-cli.js --no-audit --no-timing prune --prefix=. --no-global && rimraf test/*/*/node_modules && make -j4 doc", + "prepare": "node bin/npm-cli.js rebuild && node bin/npm-cli.js --no-audit --no-timing prune --prefix=. --no-global && rimraf test/*/*/node_modules && make -j4 mandocs", "preversion": "bash scripts/update-authors.sh && git add AUTHORS && git commit -m \"update AUTHORS\" || true", "licenses": "licensee --production --errors-only", - "tap": "tap --reporter=classic --timeout 300", - "tap-cover": "tap --reporter=classic --nyc-arg='--cache' --coverage --timeout 600", - "test": "standard && npm run test-tap", - "test-coverage": "npm run tap-cover -- \"test/tap/*.js\" \"test/network/*.js\" \"test/broken-under-*/*.js\"", - "test-tap": "npm run tap -- \"test/tap/*.js\" \"test/network/*.js\" \"test/broken-under-*/*.js\"", - "test-node": "tap --timeout 240 \"test/tap/*.js\" \"test/network/*.js\" \"test/broken-under-nyc*/*.js\"" + "tap": "tap -J --timeout 300 --no-esm", + "tap-cover": "tap -J --nyc-arg=--cache --coverage --timeout 600 --no-esm", + "lint": "standard", + "pretest": "npm run lint", + "test": "npm run test-tap --", + "test:nocleanup": "NO_TEST_CLEANUP=1 npm run test --", + "sudotest": "sudo npm run tap -- \"test/tap/*.js\"", + "sudotest:nocleanup": "sudo NO_TEST_CLEANUP=1 npm run tap -- \"test/tap/*.js\"", + "posttest": "rimraf test/npm_cache*", + "test-coverage": "npm run tap-cover -- \"test/tap/*.js\" \"test/network/*.js\"", + "test-tap": "npm run tap -- \"test/tap/*.js\" \"test/network/*.js\"", + "test-node": "tap --timeout 240 \"test/tap/*.js\" \"test/network/*.js\"" }, - "license": "Artistic-2.0" + "license": "Artistic-2.0", + "engines": { + "node": "6 >=6.2.0 || 8 || >=9.3.0" + } } diff --git a/scripts/clean-old.sh b/scripts/clean-old.sh index cda80f2f4845d..32a203e4a5f7e 100755 --- a/scripts/clean-old.sh +++ b/scripts/clean-old.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # look for old 0.x cruft, and get rid of it. # Should already be sitting in the npm folder. diff --git a/scripts/dep-update b/scripts/dep-update index 52abd518c317b..006de17c7203a 100755 --- a/scripts/dep-update +++ b/scripts/dep-update @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash node . install --save $1@$2 &&\ node scripts/gen-dev-ignores.js &&\ git add node_modules package.json package-lock.json &&\ diff --git a/scripts/dev-dep-update b/scripts/dev-dep-update index c8c9604759165..cb0b783a837f4 100755 --- a/scripts/dev-dep-update +++ b/scripts/dev-dep-update @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash node . install --save --save-dev $1@$2 &&\ node scripts/gen-dev-ignores.js &&\ git add package.json package-lock.json &&\ diff --git a/scripts/doc-build.sh b/scripts/doc-build.sh deleted file mode 100755 index a37a5e2618fa8..0000000000000 --- a/scripts/doc-build.sh +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env bash - -if [[ $DEBUG != "" ]]; then - set -x -fi -set -o errexit -set -o pipefail - -src=$1 -dest=$2 -name=$(basename ${src%.*}) -date=$(date -u +'%Y-%m-%d %H:%M:%S') -version=$(node bin/npm-cli.js -v) - -mkdir -p $(dirname $dest) - -html_replace_tokens () { - local url=$1 - sed "s|@NAME@|$name|g" \ - | sed "s|@DATE@|$date|g" \ - | sed "s|@URL@|$url|g" \ - | sed "s|@VERSION@|$version|g" \ - | perl -p -e 's/]*)>([^\(]*\([0-9]\)) -- (.*?)<\/h1>/

    \2<\/h1>

    \3<\/p>/g' \ - | perl -p -e 's/npm-npm/npm/g' \ - | perl -p -e 's/([^"-])(npm-)?README(?!\.html)(\(1\))?/\1README<\/a>/g' \ - | perl -p -e 's/<a href="[^"]+README.html">README<\/a><\/title>/<title>README<\/title>/g' \ - | perl -p -e 's/([^"-])([^\(> ]+)(\(1\))/\1<a href="..\/cli\/\2.html">\2\3<\/a>/g' \ - | perl -p -e 's/([^"-])([^\(> ]+)(\(3\))/\1<a href="..\/api\/\2.html">\2\3<\/a>/g' \ - | perl -p -e 's/([^"-])([^\(> ]+)(\(5\))/\1<a href="..\/files\/\2.html">\2\3<\/a>/g' \ - | perl -p -e 's/([^"-])([^\(> ]+)(\(7\))/\1<a href="..\/misc\/\2.html">\2\3<\/a>/g' \ - | perl -p -e 's/\([1357]\)<\/a><\/h1>/<\/a><\/h1>/g' \ - | (if [ $(basename $(dirname $dest)) == "doc" ]; then - perl -p -e 's/ href="\.\.\// href="/g' - else - cat - fi) -} - -man_replace_tokens () { - sed "s|@VERSION@|$version|g" \ - | perl -p -e 's/(npm\\-)?([a-zA-Z\\\.\-]*)\(1\)/npm help \2/g' \ - | perl -p -e 's/(npm\\-)?([a-zA-Z\\\.\-]*)\(([57])\)/npm help \3 \2/g' \ - | perl -p -e 's/(npm\\-)?([a-zA-Z\\\.\-]*)\(3\)/npm apihelp \2/g' \ - | perl -p -e 's/npm\(1\)/npm help npm/g' \ - | perl -p -e 's/npm\(3\)/npm apihelp npm/g' -} - -case $dest in - *.[1357]) - ./node_modules/.bin/marked-man --roff $src \ - | man_replace_tokens > $dest - exit $? - ;; - *.html) - url=${dest/html\//} - (cat html/dochead.html && \ - cat $src | ./node_modules/.bin/marked && - cat html/docfoot.html)\ - | html_replace_tokens $url \ - > $dest - exit $? - ;; - *) - echo "Invalid destination type: $dest" >&2 - exit 1 - ;; -esac diff --git a/scripts/docs-build.js b/scripts/docs-build.js new file mode 100644 index 0000000000000..390ea20be706e --- /dev/null +++ b/scripts/docs-build.js @@ -0,0 +1,28 @@ +#!/usr/bin/env node + +var fs = require('fs') +var marked = require('marked-man') +var npm = require('../lib/npm.js') +var args = process.argv.slice(2) +var src = args[0] +var dest = args[1] || src + +fs.readFile(src, 'utf8', function (err, data) { + if (err) return console.log(err) + + function replacer (match, p1) { + return 'npm help ' + p1.replace(/npm /, '') + } + + var result = data.replace(/@VERSION@/g, npm.version) + .replace(/^---([\s\S]+?)---/g, '') + .replace(/\[([^\]]+)\]\(\/cli-commands\/([^)]+)\)/g, replacer) + .replace(/\[([^\]]+)\]\(\/configuring-npm\/([^)]+)\)/g, replacer) + .replace(/\[([^\]]+)\]\(\/using-npm\/([^)]+)\)/g, replacer) + .replace(/(# .*)\s+(## (.*))/g, '$1 - $3') + .trim() + + fs.writeFile(dest, marked(result), 'utf8', function (err) { + if (err) return console.log(err) + }) +}) diff --git a/scripts/index-build.js b/scripts/index-build.js deleted file mode 100755 index e782716d77291..0000000000000 --- a/scripts/index-build.js +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/env node -var fs = require('fs') -var path = require('path') -var root = path.resolve(__dirname, '..') -var glob = require('glob') -var conversion = { 'cli': 1, 'api': 3, 'files': 5, 'misc': 7 } - -glob(root + '/{README.md,doc/*/*.md}', function (er, files) { - if (er) throw er - - output(files.map(function (f) { - var b = path.basename(f) - if (b === 'README.md') return [0, b] - if (b === 'index.md') return null - var s = conversion[path.basename(path.dirname(f))] - return [s, f] - }).filter(function (f) { - return f - }).sort(function (a, b) { - return (a[0] === b[0]) - ? (path.basename(a[1]) === 'npm.md' ? -1 - : path.basename(b[1]) === 'npm.md' ? 1 - : a[1] > b[1] ? 1 : -1) - : a[0] - b[0] - })) -}) - -function output (files) { - console.log( - 'npm-index(7) -- Index of all npm documentation\n' + - '==============================================\n') - - writeLines(files, 0) - writeLines(files, 1, 'Command Line Documentation', 'Using npm on the command line') - writeLines(files, 3, 'API Documentation', 'Using npm in your Node programs') - writeLines(files, 5, 'Files', 'File system structures npm uses') - writeLines(files, 7, 'Misc', 'Various other bits and bobs') -} - -function writeLines (files, sxn, heading, desc) { - if (heading) { - console.log('## %s\n\n%s\n', heading, desc) - } - files.filter(function (f) { - return f[0] === sxn - }).forEach(writeLine) -} - -function writeLine (sd) { - var sxn = sd[0] || 1 - var doc = sd[1] - var d = path.basename(doc, '.md') - - var content = fs.readFileSync(doc, 'utf8').split('\n')[0].split('-- ')[1] - - console.log('### %s(%d)\n', d, sxn) - console.log(content + '\n') -} diff --git a/scripts/install.sh b/scripts/install.sh index e352e55caf042..7f66151daea03 100755 --- a/scripts/install.sh +++ b/scripts/install.sh @@ -23,12 +23,14 @@ if [ "x$0" = "xsh" ]; then if [ $ret -eq 0 ]; then (exit 0) else + echo "Uninstalling npm-install-$$.sh" >&2 rm npm-install-$$.sh echo "Failed to download script" >&2 exit $ret fi sh npm-install-$$.sh ret=$? + echo "Uninstalling npm-install-$$.sh" >&2 rm npm-install-$$.sh exit $ret fi @@ -98,8 +100,10 @@ fi if [ $ret -eq 0 ] && [ -x "$tar" ]; then echo "tar=$tar" - echo "version:" - $tar --version + if [ $tar --version > /dev/null 2>&1 ]; then + echo "version:" + $tar --version + fi ret=$? fi diff --git a/scripts/maketest b/scripts/maketest index 118eb5fbc7dc9..4ef1e9266fb04 100755 --- a/scripts/maketest +++ b/scripts/maketest @@ -24,7 +24,6 @@ function generateFromDir (dir) { return `'use strict' const path = require('path') const test = require('tap').test -const mr = require('npm-registry-mock') const Tacks = require('tacks') const File = Tacks.File const Symlink = Tacks.Symlink @@ -39,7 +38,7 @@ const tmpdir = path.join(basedir, 'tmp') const conf = { cwd: testdir, - env: Object.assign({}, process.env, { + env: common.newEnv().extend({ npm_config_cache: cachedir, npm_config_tmp: tmpdir, npm_config_prefix: globaldir, @@ -48,7 +47,6 @@ const conf = { }) } -let server const fixture = new Tacks(Dir({ cache: Dir(), global: Dir(), @@ -65,28 +63,22 @@ function cleanup () { fixture.remove(basedir) } -test('setup', function (t) { +test('setup', t => { setup() - mr({port: common.port, throwOnUnmatched: true}, function (err, s) { - if (err) throw err - server = s - t.done() - }) + return common.fakeRegistry.listen() }) -test('example', function (t) { - common.npm(['install'], conf, function (err, code, stdout, stderr) { - if (err) throw err +test('example', t => { + return common.npm(['install'], conf).then(([code, stdout, stderr]) => { t.is(code, 0, 'command ran ok') t.comment(stdout.trim()) t.comment(stderr.trim()) // your assertions here - t.done() }) }) -test('cleanup', function (t) { - server.close() +test('cleanup', t => { + common.fakeRegistry.close() cleanup() t.done() })\n` diff --git a/scripts/pr b/scripts/pr new file mode 100755 index 0000000000000..30a3b05ffe3b1 --- /dev/null +++ b/scripts/pr @@ -0,0 +1,167 @@ +#!/usr/bin/env bash + +# Land a pull request +# Creates a PR-### branch, pulls the commits, opens up an interactive rebase to +# squash, and then annotates the commit with the changelog goobers +# +# Usage: +# pr <url|number> [<upstream remote>=origin] + +main () { + if [ "$1" = "finish" ]; then + shift + finish "$@" + return $? + fi + + local url="$(prurl "$@")" + local num=$(basename $url) + local prpath="${url#git@github.com:}" + local repo=${prpath%/pull/$num} + local prweb="https://github.com/$prpath" + local root="$(prroot "$url")" + local api="https://api.github.com/repos/${repo}/pulls/${num}" + local user=$(curl -s $api | json user.login) + local ref="$(prref "$url" "$root")" + local curhead="$(git show --no-patch --pretty=%H HEAD)" + local curbranch="$(git rev-parse --abbrev-ref HEAD)" + local cleanlines + IFS=$'\n' cleanlines=($(git status -s -uno)) + if [ ${#cleanlines[@]} -ne 0 ]; then + echo "working dir not clean" >&2 + IFS=$'\n' echo "${cleanlines[@]}" >&2 + echo "aborting PR merge" >&2 + fi + + # ok, ready to rock + branch=PR-$num + if [ "$curbranch" == "$branch" ]; then + echo "already on $branch, you're on your own" >&2 + return 1 + fi + + me=$(git config github.user || git config user.name) + if [ "$me" == "" ]; then + echo "run 'git config --add github.user <username>'" >&2 + return 1 + fi + + exists=$(git show --no-patch --pretty=%H $branch 2>/dev/null) + if [ "$exists" == "" ]; then + git fetch origin pull/$num/head:$branch + git checkout $branch + else + git checkout $branch + git pull --rebase origin pull/$num/head + fi + + git rebase -i $curbranch # squash and test + + if [ $? -eq 0 ]; then + finish "${curbranch}" + else + echo "resolve conflicts and run: $0 finish "'"'${curbranch}'"' + fi +} + +# add the PR-URL to the last commit, after squashing +finish () { + if [ $# -eq 0 ]; then + echo "Usage: $0 finish <branch> (while on a PR-### branch)" >&2 + return 1 + fi + + local curbranch="$1" + local ref=$(cat .git/HEAD) + local prnum + case $ref in + "ref: refs/heads/PR-"*) + prnum=${ref#ref: refs/heads/PR-} + ;; + *) + echo "not on the PR-## branch any more!" >&2 + return 1 + ;; + esac + + local me=$(git config github.user || git config user.name) + if [ "$me" == "" ]; then + echo "run 'git config --add github.user <username>'" >&2 + return 1 + fi + + set -x + + local url="$(prurl "$prnum")" + local num=$prnum + local prpath="${url#git@github.com:}" + local repo=${prpath%/pull/$num} + local prweb="https://github.com/$prpath" + local root="$(prroot "$url")" + + local api="https://api.github.com/repos/${repo}/pulls/${num}" + local user=$(curl -s $api | json user.login) + + local lastmsg="$(git log -1 --pretty=%B)" + local newmsg="${lastmsg} + +PR-URL: ${prweb} +Credit: @${user} +Close: #${num} +Reviewed-by: @${me} +" + git commit --amend -m "$newmsg" + git checkout $curbranch + git merge PR-${prnum} --ff-only + set +x +} + + +prurl () { + local url="$1" + if [ "$url" == "" ] && type pbpaste &>/dev/null; then + url="$(pbpaste)" + fi + if [[ "$url" =~ ^[0-9]+$ ]]; then + local us="$2" + if [ "$us" == "" ]; then + us="origin" + fi + local num="$url" + local o="$(git config --get remote.${us}.url)" + url="${o}" + url="${url#(git:\/\/|https:\/\/)}" + url="${url#git@}" + url="${url#github.com[:\/]}" + url="${url%.git}" + url="https://github.com/${url}/pull/$num" + fi + url=${url%/commits} + url=${url%/files} + url="$(echo $url | perl -p -e 's/#issuecomment-[0-9]+$//g')" + + local p='^https:\/\/github.com\/[^\/]+\/[^\/]+\/pull\/[0-9]+$' + if ! [[ "$url" =~ $p ]]; then + echo "Usage:" + echo " $0 <pull req url>" + echo " $0 <pull req number> [<remote name>=origin]" + type pbpaste &>/dev/null && + echo "(will read url/id from clipboard if not specified)" + exit 1 + fi + url="${url/https:\/\/github\.com\//git@github.com:}" + echo "$url" +} + +prroot () { + local url="$1" + echo "${url/\/pull\/+([0-9])/}" +} + +prref () { + local url="$1" + local root="$2" + echo "refs${url:${#root}}/head" +} + +main "$@" diff --git a/scripts/release.sh b/scripts/release.sh index 705f21502adfb..31fb6fa194141 100644 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # script for creating a zip and tarball for inclusion in node @@ -12,6 +12,8 @@ node ./bin/npm-cli.js pack --loglevel error >/dev/null mv *.tgz release cd release tar xzf *.tgz +cp ../.npmrc package/ +cp -r ../tap-snapshots package/ cp -r ../test package/ mkdir node_modules diff --git a/scripts/relocate.sh b/scripts/relocate.sh index b7483f2963aea..ff40f857a679e 100755 --- a/scripts/relocate.sh +++ b/scripts/relocate.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Change the cli shebang to point at the specified node # Useful for when the program is moved around after install. diff --git a/scripts/update-authors.sh b/scripts/update-authors.sh index 75a6e549b16dd..ac7b99137a987 100755 --- a/scripts/update-authors.sh +++ b/scripts/update-authors.sh @@ -1,6 +1,6 @@ #!/bin/sh -git log --reverse --format='%aN <%aE>' | perl -wnE ' +git log --use-mailmap --reverse --format='%aN <%aE>' | perl -wnE ' BEGIN { say "# Authors sorted by whether or not they\x27re me"; } diff --git a/scripts/update-dist-tags.js b/scripts/update-dist-tags.js new file mode 100644 index 0000000000000..f28bfd0b91739 --- /dev/null +++ b/scripts/update-dist-tags.js @@ -0,0 +1,123 @@ +'use strict' + +/** + * Usage: + * + * node scripts/update-dist-tags.js --otp <one-time password> + * node scripts/update-dist-tags.js --otp=<one-time password> + * node scripts/update-dist-tags.js --otp<one-time password> + */ + +const usage = ` +Usage: + +node scripts/update-dist-tags.js --otp <one-time password> +node scripts/update-dist-tags.js --otp=<one-time password> +node scripts/update-dist-tags.js --otp<one-time password> +` + +const { execSync } = require('child_process') +const semver = require('semver') +const path = require('path') + +const getMajorVersion = (input) => semver.parse(input).major +const getMinorVersion = (input) => semver.parse(input).minor + +// INFO: String templates to generate the tags to update +const LATEST_TAG = (strings, major) => `latest-${major}` +const NEXT_TAG = (strings, major) => `next-${major}` +const TAG_LIST = ['lts', 'next', 'latest'] +const REMOVE_TAG = (strings, major, minor) => `v${major}.${minor}-next` + +// INFO: Finds `--otp` and subsequently otp value (if present) +const PARSE_OTP_FLAG = new RegExp(/(--otp)(=|\s)?([0-9]{6})?/, 'gm') +// INFO: Used to validate otp value (if not found by other regexp) +const PARSE_OTP_VALUE = new RegExp(/^[0-9]{6}$/, 'g') + +const args = process.argv.slice(2) +const versionPath = path.resolve(__dirname, '..', 'package.json') +const { version } = require(versionPath) + +// Run Script +main() + +function main () { + const otp = parseOTP(args) + if (version) { + const major = getMajorVersion(version) + const minor = getMinorVersion(version) + const latestTag = LATEST_TAG`${major}` + const nextTag = NEXT_TAG`${major}` + const removeTag = REMOVE_TAG`${major}${minor}` + const updateList = [].concat(TAG_LIST, latestTag, nextTag) + + updateList.forEach((tag) => { + setDistTag(tag, version, otp) + }) + removeDistTag(removeTag, version, otp) + } else { + console.error('Invalid semver.') + process.exit(1) + } +} + +function parseOTP (args) { + // NOTE: making assumption first _thing_ is a string with "--otp" in it + const parsedArgs = PARSE_OTP_FLAG.exec(args[0]) + if (!parsedArgs) { + console.error('Invalid arguments supplied. Must supply --otp flag.') + console.error(usage) + process.exit(1) + } + // INFO: From the regexp, third group is the OTP code + const otp = parsedArgs[3] + switch (args.length) { + case 0: { + console.error('No arguments supplied.') + console.error(usage) + process.exit(1) + } + case 1: { + // --otp=123456 or --otp123456 + if (otp) { + return otp + } + console.error('Invalid otp value supplied. [CASE 1]') + process.exit(1) + } + case 2: { + // --otp 123456 + // INFO: validating the second argument is an otp code + const isValidOtp = PARSE_OTP_VALUE.test(args[1]) + if (isValidOtp) { + return args[1] + } + console.error('Invalid otp value supplied. [CASE 2]') + process.exit(1) + } + default: { + console.error('Invalid arguments supplied.') + process.exit(1) + } + } +} + +function setDistTag (tag, version, otp) { + try { + const result = execSync(`npm dist-tag set npm@${version} ${tag} --otp=${otp}`, { encoding: 'utf-8' }) + console.log('Result:', result) + } catch (err) { + console.error('Bad dist-tag command.') + process.exit(1) + } +} + +function removeDistTag (tag, version, otp) { + try { + const result = execSync(`npm dist-tag rm npm ${tag} --otp=${otp}`, { encoding: 'utf-8' }) + console.log('Result:', result) + } catch (err) { + console.error('Bad dist-tag command.') + process.exit(1) + } +} diff --git a/tap-snapshots/test-tap-fund.js-TAP.test.js b/tap-snapshots/test-tap-fund.js-TAP.test.js new file mode 100644 index 0000000000000..963c59f3e1286 --- /dev/null +++ b/tap-snapshots/test-tap-fund.js-TAP.test.js @@ -0,0 +1,74 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/tap/fund.js TAP fund containing multi-level nested deps with no funding > should omit dependencies with no funding declared 1`] = ` +nested-no-funding-packages@1.0.0 ++-- https://example.com/lorem +| \`-- lorem@1.0.0 ++-- http://example.com/donate +| \`-- bar@1.0.0 +\`-- https://example.com/sponsor + \`-- sub-bar@1.0.0 + + +` + +exports[`test/tap/fund.js TAP fund does not support global > should throw EFUNDGLOBAL error 1`] = ` + +` + +exports[`test/tap/fund.js TAP fund does not support global > should write error msgs to stderr 1`] = ` +npm ERR! code EFUNDGLOBAL +npm ERR! \`npm fund\` does not support global packages +` + +exports[`test/tap/fund.js TAP fund does not support global, using --json option > should write error msgs to stderr 1`] = ` +npm ERR! code EFUNDGLOBAL +npm ERR! \`npm fund\` does not support global packages +` + +exports[`test/tap/fund.js TAP fund in which same maintainer owns all its deps > should print stack packages together 1`] = ` +maintainer-owns-all-deps@1.0.0 +\`-- http://example.com/donate + \`-- dep-bar@1.0.0, dep-foo@1.0.0, dep-sub-foo@1.0.0 + + +` + +exports[`test/tap/fund.js TAP fund using nested packages with multiple sources > should prompt with all available URLs 1`] = ` +1: Funding available at the following URL: https://one.example.com +2: Funding available at the following URL: https://two.example.com +Run \`npm fund [<@scope>/]<pkg> --which=1\`, for example, to open the first funding URL listed in that package + +` + +exports[`test/tap/fund.js TAP fund using nested packages with multiple sources, with a source number > should open the numbered URL 1`] = ` +Funding available at the following URL: + +https://one.example.com + +` + +exports[`test/tap/fund.js TAP fund using package argument with no browser > should open funding url 1`] = ` +individual funding available at the following URL: + +http://example.com/donate + +` + +exports[`test/tap/fund.js TAP fund using string shorthand > should open string-only url 1`] = ` +Funding available at the following URL: + +https://example.com/sponsor + +` + +exports[`test/tap/fund.js TAP fund with no package containing funding > should print empty funding info 1`] = ` +no-funding-package@0.0.0 + + +` diff --git a/tap-snapshots/test-tap-repo.js-TAP.test.js b/tap-snapshots/test-tap-repo.js-TAP.test.js new file mode 100644 index 0000000000000..3fba79edb8d42 --- /dev/null +++ b/tap-snapshots/test-tap-repo.js-TAP.test.js @@ -0,0 +1,21 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/tap/repo.js TAP npm repo underscore --json > should print json result 1`] = ` +{ + "title": "repository available at the following URL", + "url": "https://github.com/jashkenas/underscore" +} + +` + +exports[`test/tap/repo.js TAP npm repo underscore --no-browser > should print alternative msg 1`] = ` +repository available at the following URL: + +https://github.com/jashkenas/underscore + +` diff --git a/test/broken-under-nyc-and-travis/whoami.js b/test/broken-under-nyc-and-travis/whoami.js deleted file mode 100644 index a5668b121059c..0000000000000 --- a/test/broken-under-nyc-and-travis/whoami.js +++ /dev/null @@ -1,77 +0,0 @@ -var common = require('../common-tap.js') - -var fs = require('fs') -var path = require('path') -var createServer = require('http').createServer - -var test = require('tap').test -var rimraf = require('rimraf') - -var opts = { cwd: __dirname } - -var FIXTURE_PATH = path.resolve(__dirname, 'fixture_npmrc') - -test('npm whoami with basic auth', function (t) { - var s = '//registry.lvh.me/:username = wombat\n' + - '//registry.lvh.me/:_password = YmFkIHBhc3N3b3Jk\n' + - '//registry.lvh.me/:email = lindsay@wdu.org.au\n' - fs.writeFileSync(FIXTURE_PATH, s, 'ascii') - fs.chmodSync(FIXTURE_PATH, '0444') - - common.npm( - [ - 'whoami', - '--userconfig=' + FIXTURE_PATH, - '--registry=http://registry.lvh.me/' - ], - opts, - function (err, code, stdout, stderr) { - t.ifError(err) - - t.equal(stderr, '', 'got nothing on stderr') - t.equal(code, 0, 'exit ok') - t.equal(stdout, 'wombat\n', 'got username') - rimraf.sync(FIXTURE_PATH) - t.end() - } - ) -}) - -test('npm whoami with bearer auth', { timeout: 2 * 1000 }, function (t) { - var s = '//localhost:' + common.port + - '/:_authToken = wombat-developers-union\n' - fs.writeFileSync(FIXTURE_PATH, s, 'ascii') - fs.chmodSync(FIXTURE_PATH, '0444') - - function verify (req, res) { - t.equal(req.method, 'GET') - t.equal(req.url, '/-/whoami') - - res.setHeader('content-type', 'application/json') - res.writeHead(200) - res.end(JSON.stringify({ username: 'wombat' }), 'utf8') - } - - var server = createServer(verify) - - server.listen(common.port, function () { - common.npm( - [ - 'whoami', - '--userconfig=' + FIXTURE_PATH, - '--registry=http://localhost:' + common.port + '/' - ], - opts, - function (err, code, stdout, stderr) { - t.ifError(err) - - t.equal(stderr, '', 'got nothing on stderr') - t.equal(code, 0, 'exit ok') - t.equal(stdout, 'wombat\n', 'got username') - rimraf.sync(FIXTURE_PATH) - server.close() - t.end() - } - ) - }) -}) diff --git a/test/common-config.js b/test/common-config.js new file mode 100644 index 0000000000000..003150335483a --- /dev/null +++ b/test/common-config.js @@ -0,0 +1,90 @@ +if (module === require.main) { + console.log('1..1') + console.log('ok 1 setup done') + process.exit(0) +} + +var fs = require('graceful-fs') +var path = require('path') +var userconfigSrc = path.resolve(__dirname, 'fixtures', 'config', 'userconfig') +exports.userconfig = userconfigSrc + '-with-gc' +exports.globalconfig = path.resolve(__dirname, 'fixtures', 'config', 'globalconfig') + +// if this hasn't been written yet, then do it now. +try { + fs.statSync(exports.userconfig) +} catch (er) { + var uc = fs.readFileSync(userconfigSrc) + var gcini = 'globalconfig = ' + exports.globalconfig + '\n' + // atomic! + fs.writeFileSync(exports.userconfig + '.' + process.pid, gcini + uc) + fs.renameSync(exports.userconfig + '.' + process.pid, exports.userconfig) +} + +exports.builtin = path.resolve(__dirname, 'fixtures', 'config', 'builtin') +exports.malformed = path.resolve(__dirname, 'fixtures', 'config', 'malformed') +exports.ucData = + { globalconfig: exports.globalconfig, + email: 'i@izs.me', + 'env-thing': 'asdf', + 'init.author.name': 'Isaac Z. Schlueter', + 'init.author.email': 'i@izs.me', + 'init.author.url': 'http://blog.izs.me/', + 'init.version': '1.2.3', + 'npm:publishtest': true, + '_npmjs.org:couch': 'https://admin:password@localhost:5984/registry', + 'npm-www:nocache': '1', + nodedir: '/Users/isaacs/dev/js/node-v0.8', + 'sign-git-tag': true, + message: 'v%s', + 'strict-ssl': false, + 'tmp': path.normalize(process.env.HOME + '/.tmp'), + _auth: 'dXNlcm5hbWU6cGFzc3dvcmQ=', + _token: + { AuthSession: 'yabba-dabba-doodle', + version: '1', + expires: '1345001053415', + path: '/', + httponly: true } } + +// set the userconfig in the env +// unset anything else that npm might be trying to foist on us +Object.keys(process.env).forEach(function (k) { + if (k.match(/^npm_config_/i)) { + delete process.env[k] + } +}) +process.env.npm_config_userconfig = exports.userconfig +process.env.npm_config_other_env_thing = '1000' +process.env.random_env_var = 'asdf' +process.env.npm_config__underbar_env_thing = 'underful' +process.env.NPM_CONFIG_UPPERCASE_ENV_THING = '42' + +exports.envData = { + userconfig: exports.userconfig, + '_underbar-env-thing': 'underful', + 'uppercase-env-thing': '42', + 'other-env-thing': '1000' +} +exports.envDataFix = { + userconfig: exports.userconfig, + '_underbar-env-thing': 'underful', + 'uppercase-env-thing': 42, + 'other-env-thing': 1000 +} + +var projectConf = path.resolve(__dirname, '..', '.npmrc') +try { + fs.statSync(projectConf) +} catch (er) { + // project conf not found, probably working with packed npm + fs.writeFileSync(projectConf, '') +} + +var projectRc = path.join(__dirname, 'fixtures', 'config', '.npmrc') +try { + fs.statSync(projectRc) +} catch (er) { + // project conf not found, probably working with packed npm + fs.writeFileSync(projectRc, 'just = testing') +} diff --git a/test/common-tap.js b/test/common-tap.js index 37d5efe9f86c8..9e38c7690fa71 100644 --- a/test/common-tap.js +++ b/test/common-tap.js @@ -1,11 +1,23 @@ 'use strict' /* eslint-disable camelcase */ +const configCommon = require('./common-config.js') var fs = require('graceful-fs') var readCmdShim = require('read-cmd-shim') var isWindows = require('../lib/utils/is-windows.js') var Bluebird = require('bluebird') +if (isWindows) { + var PATH = process.env.PATH ? 'PATH' : 'Path' + process.env[PATH] += ';C:\\Program Files\\Git\\mingw64\\libexec\\git-core' +} + +// remove any git envs so that we don't mess with the main repo +// when running git subprocesses in tests +Object.keys(process.env).filter(k => /^GIT/.test(k)).forEach( + k => delete process.env[k] +) + // cheesy hackaround for test deps (read: nock) that rely on setImmediate if (!global.setImmediate || !require('timers').setImmediate) { require('timers').setImmediate = global.setImmediate = function () { @@ -15,11 +27,73 @@ if (!global.setImmediate || !require('timers').setImmediate) { } var spawn = require('child_process').spawn +const spawnSync = require('child_process').spawnSync var path = require('path') -var port = exports.port = 1337 +// space these out to help prevent collisions +const testId = 3 * (+process.env.TAP_CHILD_ID || 0) + +// provide a working dir unique to each test +const main = require.main.filename +const testName = path.basename(main, '.js') +exports.pkg = path.resolve(path.dirname(main), testName) +var commonCache = path.resolve(__dirname, 'npm_cache_' + testName) +exports.cache = commonCache + +const mkdirp = require('mkdirp') +const rimraf = require('rimraf') +rimraf.sync(exports.pkg) +rimraf.sync(commonCache) +mkdirp.sync(exports.pkg) +mkdirp.sync(commonCache) +// if we're in sudo mode, make sure that the cache is not root-owned +const isRoot = process.getuid && process.getuid() === 0 +const isSudo = isRoot && process.env.SUDO_UID && process.env.SUDO_GID +if (isSudo) { + const sudoUid = +process.env.SUDO_UID + const sudoGid = +process.env.SUDO_GID + fs.chownSync(commonCache, sudoUid, sudoGid) +} + +const returnCwd = path.dirname(__dirname) +const find = require('which').sync('find') +require('tap').teardown(() => { + // work around windows folder locking + process.chdir(returnCwd) + process.on('exit', () => { + try { + if (isSudo) { + // running tests as sudo. ensure we didn't leave any root-owned + // files in the cache by mistake. + const args = [ commonCache, '-uid', '0' ] + const found = spawnSync(find, args) + const output = found && found.stdout && found.stdout.toString() + if (output.length) { + const er = new Error('Root-owned files left in cache!') + er.testName = main + er.files = output.trim().split('\n') + throw er + } + } + if (!process.env.NO_TEST_CLEANUP) { + rimraf.sync(exports.pkg) + rimraf.sync(commonCache) + } + } catch (e) { + if (process.platform !== 'win32') { + throw e + } + } + }) +}) + +var port = exports.port = 15443 + testId exports.registry = 'http://localhost:' + port +exports.altPort = 7331 + testId + +exports.gitPort = 4321 + testId + var fakeRegistry = require('./fake-registry.js') exports.fakeRegistry = fakeRegistry @@ -29,13 +103,14 @@ ourenv.npm_config_progress = 'false' ourenv.npm_config_metrics = 'false' ourenv.npm_config_audit = 'false' -var npm_config_cache = path.resolve(__dirname, 'npm_cache') -ourenv.npm_config_cache = exports.npm_config_cache = npm_config_cache -ourenv.npm_config_userconfig = exports.npm_config_userconfig = path.join(__dirname, 'fixtures', 'config', 'userconfig') -ourenv.npm_config_globalconfig = exports.npm_config_globalconfig = path.join(__dirname, 'fixtures', 'config', 'globalconfig') +ourenv.npm_config_unsafe_perm = 'true' +ourenv.npm_config_cache = commonCache +ourenv.npm_config_userconfig = exports.npm_config_userconfig = configCommon.userconfig +ourenv.npm_config_globalconfig = exports.npm_config_globalconfig = configCommon.globalconfig ourenv.npm_config_global_style = 'false' ourenv.npm_config_legacy_bundling = 'false' ourenv.npm_config_fetch_retries = '0' +ourenv.npm_config_update_notifier = 'false' ourenv.random_env_var = 'foo' // suppress warnings about using a prerelease version of node ourenv.npm_config_node_version = process.version.replace(/-.*$/, '') @@ -64,7 +139,10 @@ exports.npm = function (cmd, opts, cb) { opts.env = opts.env || process.env if (opts.env._storage) opts.env = Object.assign({}, opts.env._storage) if (!opts.env.npm_config_cache) { - opts.env.npm_config_cache = npm_config_cache + opts.env.npm_config_cache = commonCache + } + if (!opts.env.npm_config_unsafe_perm) { + opts.env.npm_config_unsafe_perm = 'true' } if (!opts.env.npm_config_send_metrics) { opts.env.npm_config_send_metrics = 'false' @@ -109,11 +187,15 @@ exports.makeGitRepo = function (params, cb) { var added = params.added || ['package.json'] var message = params.message || 'stub repo' - var opts = { cwd: root, env: { PATH: process.env.PATH } } + var opts = { cwd: root, env: { PATH: process.env.PATH || process.env.Path } } var commands = [ git.chainableExec(['init'], opts), git.chainableExec(['config', 'user.name', user], opts), git.chainableExec(['config', 'user.email', email], opts), + // don't time out tests waiting for a gpg passphrase or 2fa + git.chainableExec(['config', 'commit.gpgSign', 'false'], opts), + git.chainableExec(['config', 'tag.gpgSign', 'false'], opts), + git.chainableExec(['config', 'tag.forceSignAnnotated', 'false'], opts), git.chainableExec(['add'].concat(added), opts), git.chainableExec(['commit', '-m', message], opts) ] @@ -135,17 +217,15 @@ exports.readBinLink = function (path) { exports.skipIfWindows = function (why) { if (!isWindows) return - console.log('1..1') if (!why) why = 'this test not available on windows' - console.log('ok 1 # skip ' + why) + require('tap').plan(0, why) process.exit(0) } exports.pendIfWindows = function (why) { if (!isWindows) return - console.log('1..1') if (!why) why = 'this test is pending further changes on windows' - console.log('not ok 1 # todo ' + why) + require('tap').fail(' ', { todo: why, diagnostic: false }) process.exit(0) } diff --git a/test/fake-registry.js b/test/fake-registry.js index 7566443e64cf1..e09e154c76514 100644 --- a/test/fake-registry.js +++ b/test/fake-registry.js @@ -1,7 +1,14 @@ 'use strict' const common = require('./common-tap.js') const Bluebird = require('bluebird') -const log = require('npmlog') +const silentLogger = { + http: () => {}, + silly: () => {} +} + +const log = process.env.TAP_CHILD_ID + ? silentLogger + : require('npmlog') const http = require('http') const EventEmitter = require('events') diff --git a/test/fake-registry.md b/test/fake-registry.md index 6cebc0bb81154..604fda41670be 100644 --- a/test/fake-registry.md +++ b/test/fake-registry.md @@ -35,7 +35,7 @@ would be to use its singleton. ``` const common = require('../common-tap.js') -const mr = common.mockRegistry +const mr = common.fakeRegistry ``` If you have need of multiple registries at the same time, you can construct @@ -43,7 +43,7 @@ them by hand: ``` const common = require('../common-tap.js') -const FakeRegistry = common.mockRegistry.FakeRegistry +const FakeRegistry = common.fakeRegistry.FakeRegistry const mr = new FakeRegistry(opts) ``` @@ -150,7 +150,7 @@ If no route can be found then a `404` response will be provided. ## Construction const common = require('../common-tap.js') -const mr = common.mockRegistry.compat +const mr = common.fakeRegistry.compat ### mr(options[, callback]) → Promise(server) diff --git a/test/need-npm5-update/ignore-shrinkwrap.js b/test/need-npm5-update/ignore-shrinkwrap.js index 9468162eeef86..4980b659fd23f 100644 --- a/test/need-npm5-update/ignore-shrinkwrap.js +++ b/test/need-npm5-update/ignore-shrinkwrap.js @@ -24,7 +24,7 @@ var json = { name: 'ignore-shrinkwrap', version: '0.0.0', dependencies: { - 'npm-test-ignore-shrinkwrap-file': 'http://localhost:1337/package.js' + 'npm-test-ignore-shrinkwrap-file': 'http://localhost:' + common.port + '/package.js' } } @@ -34,8 +34,8 @@ var shrinkwrap = { dependencies: { 'npm-test-ignore-shrinkwrap-file': { version: '1.2.3', - from: 'http://localhost:1337/shrinkwrap.js', - resolved: 'http://localhost:1337/shrinkwrap.js', + from: 'http://localhost:' + common.port + '/shrinkwrap.js', + resolved: 'http://localhost:' + common.port + '/shrinkwrap.js', dependencies: { opener: { version: '1.3.0', diff --git a/test/need-npm5-update/peer-deps-invalid.js b/test/need-npm5-update/peer-deps-invalid.js index 39ad612e5a9be..15fda253ecc48 100644 --- a/test/need-npm5-update/peer-deps-invalid.js +++ b/test/need-npm5-update/peer-deps-invalid.js @@ -18,8 +18,8 @@ var json = { name: 'peer-deps-invalid', version: '0.0.0', dependencies: { - 'npm-test-peer-deps-file': 'http://localhost:1337/ok.js', - 'npm-test-peer-deps-file-invalid': 'http://localhost:1337/invalid.js' + 'npm-test-peer-deps-file': 'http://localhost:' + common.port + '/ok.js', + 'npm-test-peer-deps-file-invalid': 'http://localhost:' + common.port + '/invalid.js' } } diff --git a/test/network/registry.js b/test/network/registry.js index 3dbe23ffd25bd..d7e11c6007111 100644 --- a/test/network/registry.js +++ b/test/network/registry.js @@ -2,7 +2,7 @@ // This verifies that the server-side stuff still works. var common = require('../common-tap') -var test = require('tap').test +var t = require('tap') var npmExec = require.resolve('../../bin/npm-cli.js') var path = require('path') @@ -17,16 +17,15 @@ if (v[0] === 0 && v[1] < 10) { process.versions.node ) } else { - which('couchdb', function (er) { - if (er) { - console.error('WARNING: need couch to run test: ' + er.message) - } else { - runTests() - } - }) + try { + which.sync('couchdb') + t.test(runTests) + } catch (er) { + console.error('WARNING: need couch to run test: ' + er.message) + } } -function runTests () { +function runTests (t) { var env = Object.assign({ TAP: 1 }, process.env) env.npm = npmExec // TODO: fix tap and / or nyc to handle nested invocations properly @@ -39,10 +38,8 @@ function runTests () { common.npm(['install'], opts, function (err, code) { if (err) { throw err } if (code) { - return test('need install to work', function (t) { - t.fail('install failed with: ' + code) - t.end() - }) + t.fail('install failed with: ' + code) + return t.end() } else { opts = { cwd: ca, @@ -52,10 +49,8 @@ function runTests () { common.npm(['test', '--', '-Rtap', '--no-coverage'], opts, function (err, code) { if (err) { throw err } if (code) { - return test('need test to work', function (t) { - t.fail('test failed with: ' + code) - t.end() - }) + t.fail('test failed with: ' + code) + return t.end() } opts = { cwd: ca, @@ -63,8 +58,9 @@ function runTests () { stdio: 'inherit' } common.npm(['prune', '--production'], opts, function (err, code) { - if (err) { throw err } - process.exit(code || 0) + t.ifError(err) + t.equal(code, 0) + return t.end() }) }) } diff --git a/test/tap/404-parent.js b/test/tap/404-parent.js index 4321f7d829b25..ee9623c545505 100644 --- a/test/tap/404-parent.js +++ b/test/tap/404-parent.js @@ -1,33 +1,24 @@ var common = require('../common-tap.js') var test = require('tap').test var npm = require('../../') -var osenv = require('osenv') var path = require('path') var fs = require('fs') var rimraf = require('rimraf') -var mkdirp = require('mkdirp') -var pkg = path.resolve(__dirname, '404-parent') +const pkg = common.pkg var mr = require('npm-registry-mock') test('404-parent: if parent exists, specify parent in error message', function (t) { setup() - rimraf.sync(path.resolve(pkg, 'node_modules')) - performInstall(function (err) { - t.ok(err instanceof Error, 'error was returned') - t.ok(err.parent === '404-parent-test', "error's parent set") - t.end() + rimraf(path.resolve(pkg, 'node_modules'), () => { + performInstall(function (err) { + t.ok(err instanceof Error, 'error was returned') + t.equal(err.parent, '404-parent', "error's parent set") + t.end() + }) }) }) -test('cleanup', function (t) { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) - t.end() -}) - function setup () { - mkdirp.sync(pkg) - mkdirp.sync(path.resolve(pkg, 'cache')) fs.writeFileSync(path.resolve(pkg, 'package.json'), JSON.stringify({ author: 'Evan Lucas', name: '404-parent-test', @@ -42,6 +33,9 @@ function setup () { function performInstall (cb) { mr({port: common.port}, function (er, s) { // create mock registry. + if (er) { + return cb(er) + } s.get('/test-npm-404-parent-test') .many().reply(404, {'error': 'version not found'}) npm.load({ diff --git a/test/tap/404-private-registry-scoped.js b/test/tap/404-private-registry-scoped.js index 48889376caa37..0aa262f54628c 100644 --- a/test/tap/404-private-registry-scoped.js +++ b/test/tap/404-private-registry-scoped.js @@ -1,24 +1,9 @@ var test = require('tap').test -var path = require('path') -var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var common = require('../common-tap.js') -var mr = require('npm-registry-mock') +var mr = common.fakeRegistry.compat var server -var testdir = path.join(__dirname, path.basename(__filename, '.js')) - -function setup () { - cleanup() - mkdirp.sync(testdir) -} - -function cleanup () { - rimraf.sync(testdir) -} - test('setup', function (t) { - setup() mr({port: common.port, throwOnUnmatched: true}, function (err, s) { t.ifError(err, 'registry mocked successfully') server = s @@ -31,7 +16,7 @@ test('scoped package names not mangled on error with non-root registry', functio common.npm( [ '--registry=' + common.registry, - '--cache=' + testdir, + '--cache=' + common.cache, 'cache', 'add', '@scope/foo@*', @@ -44,14 +29,8 @@ test('scoped package names not mangled on error with non-root registry', functio t.match(stderr, /E404/, 'should notify the sort of error as a 404') t.match(stderr, /@scope(?:%2f|\/)foo/, 'should have package name in error') server.done() + server.close() t.end() } ) }) - -test('cleanup', function (t) { - server.close() - cleanup() - t.pass('cleaned up') - t.end() -}) diff --git a/test/tap/404-private-registry.js b/test/tap/404-private-registry.js index a38fa02c12536..0ca05105dc9f3 100644 --- a/test/tap/404-private-registry.js +++ b/test/tap/404-private-registry.js @@ -1,25 +1,12 @@ var test = require('tap').test var path = require('path') -var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var common = require('../common-tap.js') -var mr = require('npm-registry-mock') +var mr = common.fakeRegistry.compat var server var packageName = path.basename(__filename, '.js') -var testdir = path.join(__dirname, packageName) - -function setup () { - cleanup() - mkdirp.sync(testdir) -} - -function cleanup () { - rimraf.sync(testdir) -} test('setup', function (t) { - setup() mr({port: common.port, throwOnUnmatched: true}, function (err, s) { t.ifError(err, 'registry mocked successfully') server = s @@ -32,7 +19,7 @@ test('package names not mangled on error with non-root registry', function (t) { common.npm( [ '--registry=' + common.registry, - '--cache=' + testdir, + '--cache=' + common.cache, 'cache', 'add', packageName + '@*' @@ -43,14 +30,8 @@ test('package names not mangled on error with non-root registry', function (t) { t.equal(code, 1, 'exited with error') t.match(stderr, packageName, 'should have package name in error') server.done() + server.close() t.end() } ) }) - -test('cleanup', function (t) { - server.close() - cleanup() - t.pass('cleaned up') - t.end() -}) diff --git a/test/tap/404.js b/test/tap/404.js new file mode 100644 index 0000000000000..af146371c7933 --- /dev/null +++ b/test/tap/404.js @@ -0,0 +1,72 @@ +'use strict' +const path = require('path') +const test = require('tap').test +const Tacks = require('tacks') +const File = Tacks.File +const Dir = Tacks.Dir +const common = require('../common-tap.js') + +const e404 = /test-npm-404@latest' is not in the npm registry/ +const invalidPackage = /Your package name is not valid, because[\s\S]+1\. name can only contain URL-friendly characters/ + +const basedir = common.pkg +const testdir = path.join(basedir, 'testdir') +const cachedir = common.cache +const globaldir = path.join(basedir, 'global') +const tmpdir = path.join(basedir, 'tmp') + +const env = common.newEnv().extend({ + npm_config_cache: cachedir, + npm_config_tmp: tmpdir, + npm_config_prefix: globaldir, + npm_config_registry: common.registry, + npm_config_loglevel: 'error' +}) + +const fixture = new Tacks(Dir({ + cache: Dir(), + global: Dir(), + tmp: Dir(), + testdir: Dir({ + 'package.json': File({ + name: 'test', + version: '1.0.0' + }) + }) +})) + +function setup () { + cleanup() + fixture.create(basedir) +} + +function cleanup () { + fixture.remove(basedir) +} + +test('setup', function (t) { + setup() + return common.fakeRegistry.listen() +}) + +test('404 message for basic package', function (t) { + return common.npm(['install', 'test-npm-404'], {cwd: testdir, env}).then(([code, stdout, stderr]) => { + t.is(code, 1, 'error code') + t.match(stderr, e404, 'error output') + t.notMatch(stderr, invalidPackage, 'no invalidity error') + }) +}) + +test('404 message for scoped package', function (t) { + return common.npm(['install', '@npm/test-npm-404'], {cwd: testdir, env}).then(([code, stdout, stderr]) => { + t.is(code, 1, 'error code') + t.match(stderr, e404, 'error output') + t.notMatch(stderr, invalidPackage, 'no invalidity error') + }) +}) + +test('cleanup', function (t) { + common.fakeRegistry.close() + cleanup() + t.done() +}) diff --git a/test/tap/access.js b/test/tap/access.js index 4bed4b4b25797..f537db2586409 100644 --- a/test/tap/access.js +++ b/test/tap/access.js @@ -1,37 +1,36 @@ -var fs = require('fs') -var path = require('path') -var mkdirp = require('mkdirp') -var rimraf = require('rimraf') -var mr = require('npm-registry-mock') +'use strict' -var test = require('tap').test -var common = require('../common-tap.js') +const fs = require('fs') +const path = require('path') +const mkdirp = require('mkdirp') +const rimraf = require('rimraf') +const mr = require('npm-registry-mock') -var pkg = path.resolve(__dirname, 'access') -var server +const test = require('tap').test +const common = require('../common-tap.js') -var scoped = { +const pkg = common.pkg + +let server + +const scoped = { name: '@scoped/pkg', version: '1.1.1' } test('setup', function (t) { - mkdirp(pkg, function (er) { - t.ifError(er, pkg + ' made successfully') - - mr({port: common.port}, function (err, s) { - t.ifError(err, 'registry mocked successfully') - server = s - - fs.writeFile( - path.join(pkg, 'package.json'), - JSON.stringify(scoped), - function (er) { - t.ifError(er, 'wrote package.json') - t.end() - } - ) - }) + mr({port: common.port}, function (err, s) { + t.ifError(err, 'registry mocked successfully') + server = s + + fs.writeFile( + path.join(pkg, 'package.json'), + JSON.stringify(scoped), + function (er) { + t.ifError(er, 'wrote package.json') + t.end() + } + ) }) }) @@ -61,7 +60,7 @@ test('npm access public on current package', function (t) { test('npm access public when no package passed and no package.json', function (t) { // need to simulate a missing package.json - var missing = path.join(__dirname, 'access-public-missing-guard') + var missing = path.join(pkg, 'access-public-missing-guard') mkdirp.sync(path.join(missing, 'node_modules')) common.npm([ @@ -74,14 +73,13 @@ test('npm access public when no package passed and no package.json', function (t function (er, code, stdout, stderr) { t.ifError(er, 'npm access') t.match(stderr, /no package name passed to command and no package.json found/) - rimraf.sync(missing) - t.end() + rimraf(missing, t.end) }) }) test('npm access public when no package passed and invalid package.json', function (t) { // need to simulate a missing package.json - var invalid = path.join(__dirname, 'access-public-invalid-package') + var invalid = path.join(pkg, 'access-public-invalid-package') mkdirp.sync(path.join(invalid, 'node_modules')) // it's hard to force `read-package-json` to break w/o ENOENT, but this will do it fs.writeFileSync(path.join(invalid, 'package.json'), '{\n') @@ -96,8 +94,7 @@ test('npm access public when no package passed and invalid package.json', functi function (er, code, stdout, stderr) { t.ifError(er, 'npm access') t.match(stderr, /Failed to parse json/) - rimraf.sync(invalid) - t.end() + rimraf(invalid, t.end) }) }) @@ -160,19 +157,22 @@ test('npm change access on unscoped package', function (t) { function (er, code, stdout, stderr) { t.ok(code, 'exited with Error') t.matches( - stderr, /access commands are only accessible for scoped packages/) + stderr, /only available for scoped packages/) t.end() } ) }) test('npm access grant read-only', function (t) { - server.put('/-/team/myorg/myteam/package', { - permissions: 'read-only', - package: '@scoped/another' - }).reply(201, { - accessChaged: true + server.filteringRequestBody((body) => { + const data = JSON.parse(body) + t.deepEqual(data, { + permissions: 'read-only', + package: '@scoped/another' + }, 'got the right body') + return true }) + server.put('/-/team/myorg/myteam/package', true).reply(201) common.npm( [ 'access', @@ -191,12 +191,15 @@ test('npm access grant read-only', function (t) { }) test('npm access grant read-write', function (t) { - server.put('/-/team/myorg/myteam/package', { - permissions: 'read-write', - package: '@scoped/another' - }).reply(201, { - accessChaged: true + server.filteringRequestBody((body) => { + const data = JSON.parse(body) + t.deepEqual(data, { + permissions: 'read-write', + package: '@scoped/another' + }, 'got the right body') + return true }) + server.put('/-/team/myorg/myteam/package', true).reply(201) common.npm( [ 'access', @@ -214,6 +217,33 @@ test('npm access grant read-write', function (t) { ) }) +test('npm access grant read-write on unscoped package', function (t) { + server.filteringRequestBody((body) => { + const data = JSON.parse(body) + t.deepEqual(data, { + permissions: 'read-write', + package: 'another' + }, 'got the right body') + return true + }) + server.put('/-/team/myorg/myteam/package', true).reply(201) + common.npm( + [ + 'access', + 'grant', 'read-write', + 'myorg:myteam', + 'another', + '--registry', common.registry + ], + { cwd: pkg }, + function (er, code, stdout, stderr) { + t.ifError(er, 'npm access grant') + t.equal(code, 0, 'exited with Error') + t.end() + } + ) +}) + test('npm access grant others', function (t) { common.npm( [ @@ -372,7 +402,7 @@ test('npm access ls-packages on user', function (t) { test('npm access ls-packages with no package specified or package.json', function (t) { // need to simulate a missing package.json - var missing = path.join(__dirname, 'access-missing-guard') + var missing = path.join(pkg, 'access-missing-guard') mkdirp.sync(path.join(missing, 'node_modules')) var serverPackages = { @@ -400,8 +430,7 @@ test('npm access ls-packages with no package specified or package.json', functio function (er, code, stdout, stderr) { t.ifError(er, 'npm access ls-packages') t.same(JSON.parse(stdout), clientPackages) - rimraf.sync(missing) - t.end() + rimraf(missing, t.end) } ) }) @@ -461,6 +490,34 @@ test('npm access ls-collaborators on package', function (t) { ) }) +test('npm access ls-collaborators on unscoped', function (t) { + var serverCollaborators = { + 'myorg:myteam': 'write', + 'myorg:anotherteam': 'read' + } + var clientCollaborators = { + 'myorg:myteam': 'read-write', + 'myorg:anotherteam': 'read-only' + } + server.get( + '/-/package/pkg/collaborators?format=cli' + ).reply(200, serverCollaborators) + common.npm( + [ + 'access', + 'ls-collaborators', + 'pkg', + '--registry', common.registry + ], + { cwd: pkg }, + function (er, code, stdout, stderr) { + t.ifError(er, 'npm access ls-collaborators') + t.same(JSON.parse(stdout), clientCollaborators) + t.end() + } + ) +}) + test('npm access ls-collaborators on current w/user filter', function (t) { var serverCollaborators = { 'myorg:myteam': 'write', @@ -524,7 +581,6 @@ test('npm access blerg', function (t) { test('cleanup', function (t) { t.pass('cleaned up') - rimraf.sync(pkg) server.done() server.close() t.end() diff --git a/test/tap/add-named-update-protocol-port.js b/test/tap/add-named-update-protocol-port.js index 37851e10686a1..2876b6cdbc480 100644 --- a/test/tap/add-named-update-protocol-port.js +++ b/test/tap/add-named-update-protocol-port.js @@ -15,7 +15,7 @@ var fooPkg = { name: packageName, version: '0.0.0', dist: { - tarball: 'https://localhost:1338/registry/' + packageName + '/-/' + packageName + '-0.0.0.tgz', + tarball: 'https://localhost:' + common.altPort + '/registry/' + packageName + '/-/' + packageName + '-0.0.0.tgz', shasum: '356a192b7913b04c54574d18c28d46e6395428ab' } } @@ -30,7 +30,7 @@ var fooiPkg = { name: iPackageName, version: '0.0.0', dist: { - tarball: 'http://127.0.0.1:1338/registry/' + iPackageName + '/-/' + iPackageName + '-0.0.0.tgz', + tarball: 'http://127.0.0.1:' + common.altPort + '/registry/' + iPackageName + '/-/' + iPackageName + '-0.0.0.tgz', shasum: '356a192b7913b04c54574d18c28d46e6395428ab' } } @@ -39,13 +39,13 @@ var fooiPkg = { test('setup', function (t) { mr({ - port: 1337, + port: common.port, throwOnUnmatched: true }, function (err, s) { t.ifError(err, 'registry mocked successfully') server1 = s mr({ - port: 1338, + port: common.altPort, throwOnUnmatched: true }, function (err, s) { t.ifError(err, 'registry mocked successfully') @@ -67,7 +67,7 @@ test('tarball paths should update port if updating protocol', function (t) { 'add', packageName + '@0.0.0', '--registry', - 'http://localhost:1337/registry' + 'http://localhost:' + common.port + '/registry' ], {}, function (er, code, stdout, stderr) { @@ -92,7 +92,7 @@ test('tarball paths should NOT update if different hostname', function (t) { 'add', iPackageName + '@0.0.0', '--registry', - 'http://localhost:1337/registry' + 'http://localhost:' + common.port + '/registry' ], {}, function (er, code, stdout, stderr) { diff --git a/test/tap/add-remote-git-file.js b/test/tap/add-remote-git-file.js index 20392af8770be..483c6368c6490 100644 --- a/test/tap/add-remote-git-file.js +++ b/test/tap/add-remote-git-file.js @@ -4,17 +4,16 @@ var fs = require('fs') var resolve = require('path').resolve var url = require('url') -var osenv = require('osenv') var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var test = require('tap').test var npm = require('../../lib/npm.js') var fetchPackageMetadata = require('../../lib/fetch-package-metadata.js') var common = require('../common-tap.js') -var pkg = resolve(__dirname, 'add-remote-git-file') -var repo = resolve(__dirname, 'add-remote-git-file-repo') +var pkg = resolve(common.pkg, 'package') +var repo = resolve(common.pkg, 'repo') +mkdirp.sync(pkg) var git var cloneURL = 'git+file://' + resolve(pkg, 'child.git') @@ -25,7 +24,6 @@ var pjChild = JSON.stringify({ }, null, 2) + '\n' test('setup', function (t) { - bootstrap() setup(function (er, r) { t.ifError(er, 'git started up successfully') @@ -70,16 +68,6 @@ test('save install', function (t) { }) }) -test('clean', function (t) { - cleanup() - t.end() -}) - -function bootstrap () { - cleanup() - mkdirp.sync(pkg) -} - function setup (cb) { mkdirp.sync(repo) fs.writeFileSync(resolve(repo, 'package.json'), pjChild) @@ -95,9 +83,3 @@ function setup (cb) { }, cb) }) } - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(repo) - rimraf.sync(pkg) -} diff --git a/test/tap/add-remote-git-shrinkwrap.js b/test/tap/add-remote-git-shrinkwrap.js index c7fb2f9b961aa..01a033e89451f 100644 --- a/test/tap/add-remote-git-shrinkwrap.js +++ b/test/tap/add-remote-git-shrinkwrap.js @@ -1,16 +1,14 @@ var fs = require('fs') var resolve = require('path').resolve -var osenv = require('osenv') var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var test = require('tap').test var npm = require('../../lib/npm.js') var common = require('../common-tap.js') -var pkg = resolve(__dirname, 'add-remote-git-shrinkwrap') -var repo = resolve(__dirname, 'add-remote-git-shrinkwrap-repo') +var pkg = resolve(common.pkg, 'package') +var repo = resolve(common.pkg, 'repo') var daemon var daemonPID @@ -20,7 +18,7 @@ var pjParent = JSON.stringify({ name: 'parent', version: '1.2.3', dependencies: { - 'child': 'git://localhost:1234/child.git#master' + 'child': 'git://localhost:' + common.gitPort + '/child.git#master' } }, null, 2) + '\n' @@ -30,7 +28,8 @@ var pjChild = JSON.stringify({ }, null, 2) + '\n' test('setup', function (t) { - bootstrap() + mkdirp.sync(pkg) + fs.writeFileSync(resolve(pkg, 'package.json'), pjParent) setup(function (er, r) { t.ifError(er, 'git started up successfully') @@ -73,7 +72,7 @@ test('shrinkwrap gets correct _from and _resolved (#7121)', function (t) { t.notOk(stderr, 'no error output') var treeish = stdout.trim() - t.like(shrinkwrap, {dependencies: {child: {version: 'git://localhost:1234/child.git#' + treeish}}}, + t.like(shrinkwrap, {dependencies: {child: {version: 'git://localhost:' + common.gitPort + '/child.git#' + treeish}}}, 'npm shrinkwrapped resolved correctly' ) @@ -85,19 +84,10 @@ test('shrinkwrap gets correct _from and _resolved (#7121)', function (t) { }) test('clean', function (t) { - daemon.on('close', function () { - cleanup() - t.end() - }) + daemon.on('close', t.end) process.kill(daemonPID) }) -function bootstrap () { - cleanup() - mkdirp.sync(pkg) - fs.writeFileSync(resolve(pkg, 'package.json'), pjParent) -} - function setup (cb) { mkdirp.sync(repo) fs.writeFileSync(resolve(repo, 'package.json'), pjChild) @@ -114,7 +104,7 @@ function setup (cb) { '--export-all', '--base-path=.', '--reuseaddr', - '--port=1234' + '--port=' + common.gitPort ], { cwd: pkg, @@ -145,9 +135,3 @@ function setup (cb) { }, cb) }) } - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(repo) - rimraf.sync(pkg) -} diff --git a/test/tap/add-remote-git-submodule.js b/test/tap/add-remote-git-submodule.js index 6e26712c56177..54f2819fb330e 100644 --- a/test/tap/add-remote-git-submodule.js +++ b/test/tap/add-remote-git-submodule.js @@ -1,7 +1,7 @@ var fs = require('fs') var resolve = require('path').resolve -var osenv = require('osenv') +var cwd = process.cwd() var mkdirp = require('mkdirp') var rimraf = require('rimraf') var test = require('tap').test @@ -9,12 +9,12 @@ var test = require('tap').test var npm = require('../../lib/npm.js') var common = require('../common-tap.js') -var pkg = resolve(__dirname, 'add-remote-git-submodule') -var repos = resolve(__dirname, 'add-remote-git-submodule-repos') +var pkg = resolve(common.pkg, 'package') +var repos = resolve(common.pkg, 'repos') var subwt = resolve(repos, 'subwt') var topwt = resolve(repos, 'topwt') -var suburl = 'git://localhost:1234/sub.git' -var topurl = 'git://localhost:1234/top.git' +var suburl = 'git://localhost:' + common.gitPort + '/sub.git' +var topurl = 'git://localhost:' + common.gitPort + '/top.git' var daemon var daemonPID @@ -62,14 +62,13 @@ test('has file in submodule', function (t) { test('clean', function (t) { daemon.on('close', function () { - cleanup() t.end() }) process.kill(daemonPID) }) function bootstrap (t) { - process.chdir(osenv.tmpdir()) + process.chdir(cwd) rimraf.sync(pkg) mkdirp.sync(pkg) process.chdir(pkg) @@ -97,7 +96,7 @@ function setup (cb) { '--export-all', '--base-path=.', '--reuseaddr', - '--port=1234' + '--port=' + common.gitPort ], { cwd: repos, @@ -141,9 +140,3 @@ function setup (cb) { }, cb) }) } - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(repos) - rimraf.sync(pkg) -} diff --git a/test/tap/add-remote-git.js b/test/tap/add-remote-git.js index 24abf33a773fd..2a61963439111 100644 --- a/test/tap/add-remote-git.js +++ b/test/tap/add-remote-git.js @@ -1,16 +1,14 @@ var fs = require('fs') var resolve = require('path').resolve -var osenv = require('osenv') var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var test = require('tap').test var npm = require('../../lib/npm.js') var common = require('../common-tap.js') -var pkg = resolve(__dirname, 'add-remote-git') -var repo = resolve(__dirname, 'add-remote-git-repo') +var pkg = resolve(common.pkg, 'package') +var repo = resolve(pkg, 'repo') var daemon var daemonPID @@ -20,7 +18,7 @@ var pjParent = JSON.stringify({ name: 'parent', version: '1.2.3', dependencies: { - child: 'git://localhost:1234/child.git' + child: 'git://localhost:' + common.gitPort + '/child.git' } }, null, 2) + '\n' @@ -30,7 +28,8 @@ var pjChild = JSON.stringify({ }, null, 2) + '\n' test('setup', function (t) { - bootstrap() + mkdirp.sync(pkg) + fs.writeFileSync(resolve(pkg, 'package.json'), pjParent) setup(function (er, r) { t.ifError(er, 'git started up successfully') @@ -47,24 +46,15 @@ test('install from repo', function (t) { process.chdir(pkg) npm.commands.install('.', [], function (er) { t.ifError(er, 'npm installed via git') - t.end() }) }) test('clean', function (t) { - daemon.on('close', function () { - cleanup() - t.end() - }) + daemon.on('close', t.end) process.kill(daemonPID) }) -function bootstrap () { - mkdirp.sync(pkg) - fs.writeFileSync(resolve(pkg, 'package.json'), pjParent) -} - function setup (cb) { mkdirp.sync(repo) fs.writeFileSync(resolve(repo, 'package.json'), pjChild) @@ -81,12 +71,12 @@ function setup (cb) { '--export-all', '--base-path=.', '--reuseaddr', - '--port=1234' + '--port=' + common.gitPort ], { cwd: pkg, env: process.env, - stdio: ['pipe', 'pipe', 'pipe'] + stdio: ['pipe', 1, 'pipe'] } ) d.stderr.on('data', childFinder) @@ -112,9 +102,3 @@ function setup (cb) { }, cb) }) } - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(repo) - rimraf.sync(pkg) -} diff --git a/test/tap/adduser-always-auth.js b/test/tap/adduser-always-auth.js index d327e35a9e547..26541eebbc7b6 100644 --- a/test/tap/adduser-always-auth.js +++ b/test/tap/adduser-always-auth.js @@ -6,8 +6,8 @@ var mr = require('npm-registry-mock') var test = require('tap').test var common = require('../common-tap.js') -var opts = {cwd: __dirname} -var outfile = path.resolve(__dirname, '_npmrc') +var opts = { cwd: common.pkg } +var outfile = path.resolve(common.pkg, '_npmrc') var responses = { 'Username': 'u\n', 'Password': 'p\n', @@ -98,7 +98,7 @@ test('npm login --scope <scope> uses <scope>:registry as its URI', function (t) ], opts, function (err, code) { - t.notOk(code, 'exited OK') + t.equal(code, 0, 'exited OK') t.notOk(err, 'no error output') var config = fs.readFileSync(outfile, 'utf8') t.like(config, new RegExp(scope + ':registry=' + uri), 'scope:registry is set') diff --git a/test/tap/adduser-legacy-auth.js b/test/tap/adduser-legacy-auth.js index 49015878f7d8e..e46901c2b95fe 100644 --- a/test/tap/adduser-legacy-auth.js +++ b/test/tap/adduser-legacy-auth.js @@ -7,8 +7,8 @@ var mr = require('npm-registry-mock') var test = require('tap').test var common = require('../common-tap.js') -var opts = { cwd: __dirname } -var pkg = path.resolve(__dirname, 'adduser-legacy-auth') +var opts = { cwd: common.pkg } +var pkg = common.pkg var outfile = path.resolve(pkg, '_npmrc') var contents = '_auth=' + Buffer.from('u:x').toString('base64') + '\n' + @@ -42,6 +42,7 @@ function mocks (server) { } test('setup', function (t) { + rimraf.sync(pkg) mkdirp(pkg, function (er) { t.ifError(er, pkg + ' made successfully') @@ -65,8 +66,8 @@ test('npm login', function (t) { opts, function (err, code, stdout, stderr) { if (err) throw err - t.is(code, 0, 'exited OK') t.is(stderr, '', 'no error output') + t.is(code, 0, 'exited OK') var config = fs.readFileSync(outfile, 'utf8') t.like(config, /:always-auth=false/, 'always-auth is scoped and false (by default)') s.close() diff --git a/test/tap/adduser-oauth.js b/test/tap/adduser-oauth.js index 04065048c425b..241fef2a2ed7f 100644 --- a/test/tap/adduser-oauth.js +++ b/test/tap/adduser-oauth.js @@ -7,8 +7,8 @@ var mr = require('npm-registry-mock') var test = require('tap').test var common = require('../common-tap.js') -var opts = { cwd: __dirname } -var pkg = path.resolve(__dirname, 'adduser-oauth') +var opts = { cwd: common.pkg } +var pkg = common.pkg var fakeBrowser = path.resolve(pkg, '_script.sh') var configfile = path.resolve(pkg, '_npmrc') var outfile = path.resolve(pkg, '_outfile') diff --git a/test/tap/adduser-saml.js b/test/tap/adduser-saml.js index 530ebb52b3c0e..17a1a9f7b3de6 100644 --- a/test/tap/adduser-saml.js +++ b/test/tap/adduser-saml.js @@ -7,8 +7,8 @@ var mr = require('npm-registry-mock') var test = require('tap').test var common = require('../common-tap.js') -var opts = { cwd: __dirname } -var pkg = path.resolve(__dirname, 'adduser-saml') +var opts = { cwd: common.pkg } +var pkg = common.pkg var fakeBrowser = path.resolve(pkg, '_script.sh') var configfile = path.resolve(pkg, '_npmrc') var outfile = path.resolve(pkg, '_outfile') diff --git a/test/tap/aliases.js b/test/tap/aliases.js new file mode 100644 index 0000000000000..21a68ac50f7a5 --- /dev/null +++ b/test/tap/aliases.js @@ -0,0 +1,268 @@ +'use strict' + +const BB = require('bluebird') + +const common = require('../common-tap.js') +const fs = require('graceful-fs') +const mockTar = require('../util/mock-tarball.js') +const mr = common.fakeRegistry.compat +const path = require('path') +const rimraf = BB.promisify(require('rimraf')) +const Tacks = require('tacks') +const { test } = require('tap') + +const { Dir, File } = Tacks +const readdirAsync = BB.promisify(fs.readdir) +const readFileAsync = BB.promisify(fs.readFile) + +const testDir = common.pkg + +let server +test('setup', t => { + mr({}, (err, s) => { + t.ifError(err, 'registry mocked successfully') + server = s + t.end() + }) +}) + +test('installs an npm: protocol alias package', t => { + const fixture = new Tacks(Dir({ + 'package.json': File({}) + })) + fixture.create(testDir) + const packument = { + name: 'foo', + 'dist-tags': { latest: '1.2.4' }, + versions: { + '1.2.3': { + name: 'foo', + version: '1.2.3', + dist: { + tarball: `${server.registry}/foo/-/foo-1.2.3.tgz` + } + }, + '1.2.4': { + name: 'foo', + version: '1.2.4', + dist: { + tarball: `${server.registry}/foo/-/foo-1.2.4.tgz` + } + } + } + } + server.get('/foo').reply(200, packument) + return mockTar({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.2.3' + }) + }).then(tarball => { + server.get('/foo/-/foo-1.2.3.tgz').reply(200, tarball) + server.get('/foo/-/foo-1.2.4.tgz').reply(200, tarball) + return common.npm([ + 'install', 'foo@1.2.3', + '--cache', path.join(testDir, 'npmcache'), + '--registry', server.registry + ], { cwd: testDir }) + }).then(([code, stdout, stderr]) => { + t.equal(code, 0) + t.comment(stdout) + t.comment(stderr) + return common.npm([ + 'install', 'bar@npm:foo@1.2.3', + '--cache', path.join(testDir, 'npmcache'), + '--registry', server.registry + ], { cwd: testDir }) + }).then(([code, stdout, stderr]) => { + t.equal(code, 0) + t.comment(stdout) + t.comment(stderr) + t.match(stdout, /\+ foo@1\.2\.3 \(as bar\)/, 'useful message') + return readFileAsync( + path.join(testDir, 'node_modules', 'bar', 'package.json'), + 'utf8' + ) + }).then(JSON.parse).then(pkg => { + t.similar(pkg, { + name: 'foo', + version: '1.2.3' + }, 'successfully installed foo as bar in node_modules') + return common.npm(['ls', '--json'], { cwd: testDir }) + }).then(([code, stdout, stderr]) => { + t.comment(stdout) + t.comment(stderr) + t.equal(code, 0, 'ls is clean') + t.deepEqual(JSON.parse(stdout), { + dependencies: { + bar: { + version: '1.2.3', + from: 'bar@npm:foo@1.2.3', + resolved: 'http://localhost:' + common.port + '/foo/-/foo-1.2.3.tgz' + }, + foo: { + version: '1.2.3', + from: 'foo@1.2.3', + resolved: 'http://localhost:' + common.port + '/foo/-/foo-1.2.3.tgz' + } + } + }, 'both dependencies listed correctly') + return common.npm([ + 'outdated', '--json', + '--registry', server.registry + ], { cwd: testDir }) + }).then(([code, stdout, stderr]) => { + t.equal(code, 1, 'non-zero because some packages outdated') + t.comment(stdout) + t.comment(stderr) + const parsed = JSON.parse(stdout) + t.match(parsed, { + foo: { + current: '1.2.3', + wanted: '1.2.4', + latest: '1.2.4', + location: /node_modules[/\\]foo/ + }, + bar: { + current: 'npm:foo@1.2.3', + wanted: 'npm:foo@1.2.4', + latest: 'npm:foo@1.2.4', + location: /node_modules[/\\]bar/ + } + }, 'both regular and aliased dependency reported') + return common.npm([ + 'update', + '--registry', server.registry + ], { cwd: testDir }) + }).then(([code, stdout, stderr]) => { + t.equal(code, 0, 'update succeeded') + t.comment(stdout) + t.comment(stderr) + return common.npm(['ls', '--json'], { cwd: testDir }) + }).then(([code, stdout, stderr]) => { + t.equal(code, 0, 'ls succeeded') + t.comment(stdout) + t.comment(stderr) + const parsed = JSON.parse(stdout) + t.deepEqual(parsed, { + dependencies: { + bar: { + version: '1.2.4', + from: 'bar@npm:foo@1.2.4', + resolved: 'http://localhost:' + common.port + '/foo/-/foo-1.2.4.tgz' + }, + foo: { + version: '1.2.4', + from: 'foo@1.2.4', + resolved: 'http://localhost:' + common.port + '/foo/-/foo-1.2.4.tgz' + } + } + }, 'ls shows updated packages') + return common.npm([ + 'rm', 'bar', + '--registry', server.registry + ], { cwd: testDir }) + }).then(([code, stdout, stderr]) => { + t.equal(code, 0, 'rm succeeded') + t.comment(stdout) + t.comment(stderr) + t.match(stdout, 'removed 1 package', 'notified of removed package') + return readdirAsync(path.join(testDir, 'node_modules')) + }).then(dir => { + t.deepEqual(dir, ['foo'], 'regular foo left in place') + }).then(() => rimraf(testDir)) +}) + +test('installs a tarball dep as a different name than package.json', t => { + return mockTar({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.2.3' + }) + }).then(tarball => { + const fixture = new Tacks(Dir({ + 'package.json': File({}), + 'foo.tgz': File(tarball) + })) + fixture.create(testDir) + return common.npm([ + 'install', 'file:foo.tgz', + '--cache', path.join(testDir, 'npmcache'), + '--registry', server.registry + ], { cwd: testDir }) + }).then(([code, stdout, stderr]) => { + return common.npm([ + 'install', 'bar@file:foo.tgz', + '--cache', path.join(testDir, 'npmcache'), + '--registry', server.registry + ], { cwd: testDir }) + }).then(([code, stdout, stderr]) => { + t.comment(stdout) + t.comment(stderr) + t.match(stdout, /^\+ foo@1\.2\.3 \(as bar\)/, 'useful message') + return readFileAsync( + path.join(testDir, 'node_modules', 'bar', 'package.json'), + 'utf8' + ) + }).then(JSON.parse).then(pkg => { + t.similar(pkg, { + name: 'foo', + version: '1.2.3' + }, 'successfully installed foo as bar in node_modules') + return common.npm(['ls', '--json'], { cwd: testDir }) + }).then(([code, stdout, stderr]) => { + t.comment(stdout) + t.comment(stderr) + t.similar(JSON.parse(stdout), { + dependencies: { + bar: { + version: '1.2.3', + from: 'file:foo.tgz' + }, + foo: { + version: '1.2.3', + from: 'file:foo.tgz' + } + } + }, 'both dependencies present') + }).then(() => rimraf(testDir)) +}) + +test('installs a symlink dep as a different name than package.json', t => { + const fixture = new Tacks(Dir({ + 'package.json': File({}), + 'foo': Dir({ + 'package.json': File({ + name: 'foo', + version: '1.2.3' + }) + }) + })) + fixture.create(testDir) + return common.npm([ + 'install', 'bar@file:foo', + '--cache', path.join(testDir, 'npmcache'), + '--registry', server.registry + ], { cwd: testDir }).then(([code, stdout, stderr]) => { + t.comment(stdout) + t.comment(stderr) + t.match(stdout, /^\+ foo@1\.2\.3 \(as bar\)/, 'useful message') + return readFileAsync( + path.join(testDir, 'node_modules', 'bar', 'package.json'), + 'utf8' + ) + }).then(JSON.parse).then(pkg => { + t.similar(pkg, { + name: 'foo', + version: '1.2.3' + }, 'successfully installed foo as bar in node_modules') + }).then(() => rimraf(testDir)) +}) + +test('cleanup', t => { + server.close() + return rimraf(testDir) +}) + +test('npm audit supports aliases') +test('npm audit fix supports aliases') diff --git a/test/tap/all-package-metadata-cache-stream-unit.js b/test/tap/all-package-metadata-cache-stream-unit.js index 51be836769050..66b24c5e0c710 100644 --- a/test/tap/all-package-metadata-cache-stream-unit.js +++ b/test/tap/all-package-metadata-cache-stream-unit.js @@ -1,31 +1,39 @@ 'use strict' -require('../common-tap.js') -var test = require('tap').test -var mkdirp = require('mkdirp') -var rimraf = require('rimraf') -var path = require('path') -var ms = require('mississippi') -var Tacks = require('tacks') -var File = Tacks.File +const common = require('../common-tap.js') -var _createCacheEntryStream = require('../../lib/search/all-package-metadata.js')._createCacheEntryStream +const getStream = require('get-stream') +const mkdirp = require('mkdirp') +const path = require('path') +const Tacks = require('tacks') +const {test} = require('tap') -var PKG_DIR = path.resolve(__dirname, 'create-cache-entry-stream') -var CACHE_DIR = path.resolve(PKG_DIR, 'cache') +const {File} = Tacks +const _createCacheEntryStream = require('../../lib/search/all-package-metadata.js')._createCacheEntryStream + +// this test uses a fresh cache for each test block +// create them all in common.cache so that we can verify +// them for root-owned files in sudotest +let CACHE_DIR +let cacheCounter = 1 +const chownr = require('chownr') function setup () { + CACHE_DIR = common.cache + '/' + cacheCounter++ mkdirp.sync(CACHE_DIR) + fixOwner(CACHE_DIR) } -function cleanup () { - rimraf.sync(PKG_DIR) -} +const fixOwner = ( + process.getuid && process.getuid() === 0 && + process.env.SUDO_UID && process.env.SUDO_GID +) ? (path) => chownr.sync(path, +process.env.SUDO_UID, +process.env.SUDO_GID) + : () => {} -test('createCacheEntryStream basic', function (t) { +test('createCacheEntryStream basic', t => { setup() - var cachePath = path.join(CACHE_DIR, '.cache.json') - var fixture = new Tacks(File({ + const cachePath = path.join(CACHE_DIR, '.cache.json') + const fixture = new Tacks(File({ '_updated': 1234, bar: { name: 'bar', @@ -37,16 +45,14 @@ test('createCacheEntryStream basic', function (t) { } })) fixture.create(cachePath) - _createCacheEntryStream(cachePath, function (err, stream, latest) { - if (err) throw err + fixOwner(cachePath) + return _createCacheEntryStream(cachePath, {}).then(({ + updateStream: stream, + updatedLatest: latest + }) => { t.equals(latest, 1234, '`latest` correctly extracted') t.ok(stream, 'returned a stream') - var results = [] - stream.on('data', function (pkg) { - results.push(pkg) - }) - ms.finished(stream, function (err) { - if (err) throw err + return getStream.array(stream).then(results => { t.deepEquals(results, [{ name: 'bar', version: '1.0.0' @@ -54,83 +60,53 @@ test('createCacheEntryStream basic', function (t) { name: 'foo', version: '1.0.0' }]) - cleanup() - t.done() }) }) }) -test('createCacheEntryStream empty cache', function (t) { +test('createCacheEntryStream empty cache', t => { setup() - var cachePath = path.join(CACHE_DIR, '.cache.json') - var fixture = new Tacks(File({})) + const cachePath = path.join(CACHE_DIR, '.cache.json') + const fixture = new Tacks(File({})) fixture.create(cachePath) - _createCacheEntryStream(cachePath, function (err, stream, latest) { - t.ok(err, 'returned an error because there was no _updated') - t.match(err.message, /Empty or invalid stream/, 'useful error message') - t.notOk(stream, 'no stream returned') - t.notOk(latest, 'no latest returned') - cleanup() - t.done() - }) + fixOwner(cachePath) + return _createCacheEntryStream(cachePath, {}).then( + () => { throw new Error('should not succeed') }, + err => { + t.ok(err, 'returned an error because there was no _updated') + t.match(err.message, /Empty or invalid stream/, 'useful error message') + } + ) }) -test('createCacheEntryStream no entry cache', function (t) { +test('createCacheEntryStream no entry cache', t => { setup() - var cachePath = path.join(CACHE_DIR, '.cache.json') - var fixture = new Tacks(File({ + const cachePath = path.join(CACHE_DIR, '.cache.json') + const fixture = new Tacks(File({ '_updated': 1234 })) fixture.create(cachePath) - _createCacheEntryStream(cachePath, function (err, stream, latest) { - if (err) throw err + fixOwner(cachePath) + return _createCacheEntryStream(cachePath, {}).then(({ + updateStream: stream, + updatedLatest: latest + }) => { t.equals(latest, 1234, '`latest` correctly extracted') t.ok(stream, 'returned a stream') - var results = [] - stream.on('data', function (pkg) { - results.push(pkg) - }) - ms.finished(stream, function (err) { - if (err) throw err + return getStream.array(stream).then(results => { t.deepEquals(results, [], 'no results') - cleanup() - t.done() }) }) }) -test('createCacheEntryStream missing cache', function (t) { - setup() - var cachePath = path.join(CACHE_DIR, '.cache.json') - _createCacheEntryStream(cachePath, function (err, stream, latest) { - t.ok(err, 'returned an error because there was no cache') - t.equals(err.code, 'ENOENT', 'useful error message') - t.notOk(stream, 'no stream returned') - t.notOk(latest, 'no latest returned') - cleanup() - t.done() - }) -}) - -test('createCacheEntryStream bad syntax', function (t) { +test('createCacheEntryStream missing cache', t => { setup() - var cachePath = path.join(CACHE_DIR, '.cache.json') - var fixture = new Tacks(File('{"_updated": 1234, uh oh')) - fixture.create(cachePath) - _createCacheEntryStream(cachePath, function (err, stream, latest) { - if (err) throw err - t.equals(latest, 1234, '`latest` correctly extracted') - t.ok(stream, 'returned a stream') - var results = [] - stream.on('data', function (pkg) { - results.push(pkg) - }) - ms.finished(stream, function (err) { - t.ok(err, 'stream errored') - t.match(err.message, /Invalid JSON/i, 'explains there\'s a syntax error') - t.deepEquals(results, [], 'no results') - cleanup() - t.done() - }) - }) + const cachePath = path.join(CACHE_DIR, '.cache.json') + return _createCacheEntryStream(cachePath, {}).then( + () => { throw new Error('should not succeed') }, + err => { + t.ok(err, 'returned an error because there was no cache') + t.equals(err.code, 'ENOENT', 'useful error message') + } + ) }) diff --git a/test/tap/all-package-metadata-entry-stream-unit.js b/test/tap/all-package-metadata-entry-stream-unit.js index 0e02f848246ce..164a34f4c50c8 100644 --- a/test/tap/all-package-metadata-entry-stream-unit.js +++ b/test/tap/all-package-metadata-entry-stream-unit.js @@ -1,36 +1,42 @@ 'use strict' -var common = require('../common-tap.js') -var npm = require('../../') -var test = require('tap').test -var mkdirp = require('mkdirp') -var rimraf = require('rimraf') -var path = require('path') -var mr = require('npm-registry-mock') -var ms = require('mississippi') -var Tacks = require('tacks') -var File = Tacks.File +const common = require('../common-tap.js') +const getStream = require('get-stream') +const mkdirp = require('mkdirp') +const mr = require('npm-registry-mock') +const npm = require('../../') +const path = require('path') +const Tacks = require('tacks') +const test = require('tap').test -var _createEntryStream = require('../../lib/search/all-package-metadata.js')._createEntryStream +const {File} = Tacks -var ALL = common.registry + '/-/all' -var PKG_DIR = path.resolve(__dirname, 'create-entry-update-stream') -var CACHE_DIR = path.resolve(PKG_DIR, 'cache') +const _createEntryStream = require('../../lib/search/all-package-metadata.js')._createEntryStream -var server +let server +// this test uses a fresh cache for each test block +// create them all in common.cache so that we can verify +// them for root-owned files in sudotest +let CACHE_DIR +let cacheCounter = 1 function setup () { + CACHE_DIR = common.cache + '/' + cacheCounter++ mkdirp.sync(CACHE_DIR) + fixOwner(CACHE_DIR) } -function cleanup () { - rimraf.sync(PKG_DIR) -} +const chownr = require('chownr') +const fixOwner = ( + process.getuid && process.getuid() === 0 && + process.env.SUDO_UID && process.env.SUDO_GID +) ? (path) => chownr.sync(path, +process.env.SUDO_UID, +process.env.SUDO_GID) + : () => {} -test('setup', function (t) { - mr({port: common.port, throwOnUnmatched: true}, function (err, s) { +test('setup', t => { + mr({port: common.port, throwOnUnmatched: true}, (err, s) => { t.ifError(err, 'registry mocked successfully') - npm.load({ cache: CACHE_DIR, registry: common.registry }, function (err) { + npm.load({ cache: CACHE_DIR, registry: common.registry }, err => { t.ifError(err, 'npm loaded successfully') server = s t.pass('all set up') @@ -39,10 +45,10 @@ test('setup', function (t) { }) }) -test('createEntryStream full request', function (t) { +test('createEntryStream full request', t => { setup() - var cachePath = path.join(CACHE_DIR, '.cache.json') - var dataTime = +(new Date()) + const cachePath = path.join(CACHE_DIR, '.cache.json') + const dataTime = +(new Date()) server.get('/-/all').once().reply(200, { '_updated': dataTime, 'bar': { name: 'bar', version: '1.0.0' }, @@ -50,37 +56,35 @@ test('createEntryStream full request', function (t) { }, { date: 1234 // should never be used. }) - _createEntryStream(cachePath, ALL, {}, 600, function (err, stream, latest, newEntries) { - if (err) throw err + return _createEntryStream(cachePath, 600, { + registry: common.registry + }).then(({ + entryStream: stream, + latest, + newEntries + }) => { t.equals(latest, dataTime, '`latest` correctly extracted') t.ok(newEntries, 'new entries need to be written to cache') t.ok(stream, 'returned a stream') - var results = [] - stream.on('data', function (pkg) { - results.push(pkg) - }) - ms.finished(stream, function (err) { - if (err) throw err - t.deepEquals(results, [{ - name: 'bar', - version: '1.0.0' - }, { - name: 'foo', - version: '1.0.0' - }]) - server.done() - cleanup() - t.end() - }) + return getStream.array(stream) + }).then(results => { + t.deepEquals(results, [{ + name: 'bar', + version: '1.0.0' + }, { + name: 'foo', + version: '1.0.0' + }]) + server.done() }) }) test('createEntryStream cache only', function (t) { setup() - var now = Date.now() - var cacheTime = now - 100000 - var cachePath = path.join(CACHE_DIR, '.cache.json') - var fixture = new Tacks(File({ + const now = Date.now() + const cacheTime = now - 100000 + const cachePath = path.join(CACHE_DIR, '.cache.json') + const fixture = new Tacks(File({ '_updated': cacheTime, bar: { name: 'bar', version: '1.0.0' }, cool: { name: 'cool', version: '1.0.0' }, @@ -88,33 +92,32 @@ test('createEntryStream cache only', function (t) { other: { name: 'other', version: '1.0.0' } })) fixture.create(cachePath) - _createEntryStream(cachePath, ALL, {}, 600, function (err, stream, latest, newEntries) { - if (err) throw err + fixOwner(cachePath) + return _createEntryStream(cachePath, 600, { + registry: common.registry + }).then(({ + entryStream: stream, + latest, + newEntries + }) => { t.equals(latest, cacheTime, '`latest` is cache time') t.ok(stream, 'returned a stream') t.notOk(newEntries, 'cache only means no need to write to cache') - var results = [] - stream.on('data', function (pkg) { - results.push(pkg) - }) - ms.finished(stream, function (err) { - t.ifError(err, 'stream finished without error') - t.deepEquals( - results.map(function (pkg) { return pkg.name }), - ['bar', 'cool', 'foo', 'other'], - 'packages deduped and sorted' - ) - server.done() - cleanup() - t.end() - }) + return getStream.array(stream) + }).then(results => { + t.deepEquals( + results.map(function (pkg) { return pkg.name }), + ['bar', 'cool', 'foo', 'other'], + 'packages deduped and sorted' + ) + server.done() }) }) test('createEntryStream merged stream', function (t) { setup() - var now = Date.now() - var cacheTime = now - 6000000 + const now = Date.now() + const cacheTime = now - 6000000 server.get('/-/all/since?stale=update_after&startkey=' + cacheTime).once().reply(200, { 'bar': { name: 'bar', version: '2.0.0' }, 'car': { name: 'car', version: '1.0.0' }, @@ -122,8 +125,8 @@ test('createEntryStream merged stream', function (t) { }, { date: (new Date(now)).toISOString() }) - var cachePath = path.join(CACHE_DIR, '.cache.json') - var fixture = new Tacks(File({ + const cachePath = path.join(CACHE_DIR, '.cache.json') + const fixture = new Tacks(File({ '_updated': cacheTime, bar: { name: 'bar', version: '1.0.0' }, cool: { name: 'cool', version: '1.0.0' }, @@ -131,56 +134,57 @@ test('createEntryStream merged stream', function (t) { other: { name: 'other', version: '1.0.0' } })) fixture.create(cachePath) - _createEntryStream(cachePath, ALL, {}, 600, function (err, stream, latest, newEntries) { - if (err) throw err + fixOwner(cachePath) + return _createEntryStream(cachePath, 600, { + registry: common.registry + }).then(({ + entryStream: stream, + latest, + newEntries + }) => { t.equals(latest, now, '`latest` correctly extracted from header') t.ok(stream, 'returned a stream') t.ok(newEntries, 'cache update means entries should be written') - var results = [] - stream.on('data', function (pkg) { - results.push(pkg) - }) - ms.finished(stream, function (err) { - t.ifError(err, 'stream finished without error') - t.deepEquals( - results.map(function (pkg) { return pkg.name }), - ['bar', 'car', 'cool', 'foo', 'other'], - 'packages deduped and sorted' - ) - t.deepEquals(results[0], { - name: 'bar', - version: '2.0.0' - }, 'update stream version wins on dedupe') - t.deepEquals(results[3], { - name: 'foo', - version: '1.0.0' - }, 'update stream version wins on dedupe even when the newer one has a lower semver.') - server.done() - cleanup() - t.end() - }) + return getStream.array(stream) + }).then(results => { + t.deepEquals( + results.map(function (pkg) { return pkg.name }), + ['bar', 'car', 'cool', 'foo', 'other'], + 'packages deduped and sorted' + ) + t.deepEquals(results[0], { + name: 'bar', + version: '2.0.0' + }, 'update stream version wins on dedupe') + t.deepEquals(results[3], { + name: 'foo', + version: '1.0.0' + }, 'update stream version wins on dedupe even when the newer one has a lower semver.') + server.done() }) }) test('createEntryStream no sources', function (t) { setup() - var cachePath = path.join(CACHE_DIR, '.cache.json') + const cachePath = path.join(CACHE_DIR, '.cache.json') server.get('/-/all').once().reply(404, {}) - _createEntryStream(cachePath, ALL, {}, 600, function (err, stream, latest, newEntries) { + return _createEntryStream(cachePath, 600, { + registry: common.registry + }).then(({ + entryStream: stream, + latest, + newEntries + }) => { + throw new Error('should not succeed') + }, err => { t.ok(err, 'no sources, got an error') - t.notOk(stream, 'no stream returned') - t.notOk(latest, 'no latest returned') - t.notOk(newEntries, 'no entries need to be written') t.match(err.message, /No search sources available/, 'useful error message') + }).then(() => { server.done() - cleanup() - t.end() }) }) test('cleanup', function (t) { - cleanup() server.close() - t.pass('all done') t.done() }) diff --git a/test/tap/all-package-metadata-update-stream-unit.js b/test/tap/all-package-metadata-update-stream-unit.js index b9cf337eb9a08..126fe9d398593 100644 --- a/test/tap/all-package-metadata-update-stream-unit.js +++ b/test/tap/all-package-metadata-update-stream-unit.js @@ -1,29 +1,34 @@ 'use strict' -var common = require('../common-tap.js') -var npm = require('../../') -var test = require('tap').test -var mkdirp = require('mkdirp') -var rimraf = require('rimraf') -var path = require('path') -var mr = require('npm-registry-mock') -var ms = require('mississippi') +const common = require('../common-tap.js') +const getStream = require('get-stream') +const npm = require('../../') +const test = require('tap').test +const mkdirp = require('mkdirp') +const mr = require('npm-registry-mock') var _createEntryUpdateStream = require('../../lib/search/all-package-metadata.js')._createEntryUpdateStream -var ALL = common.registry + '/-/all' -var PKG_DIR = path.resolve(__dirname, 'create-entry-update-stream') -var CACHE_DIR = path.resolve(PKG_DIR, 'cache') - var server +// this test uses a fresh cache for each test block +// create them all in common.cache so that we can verify +// them for root-owned files in sudotest +let CACHE_DIR +let cacheCounter = 1 function setup () { + CACHE_DIR = common.cache + '/' + cacheCounter++ mkdirp.sync(CACHE_DIR) + fixOwner(CACHE_DIR) } -function cleanup () { - rimraf.sync(PKG_DIR) -} +const chownr = require('chownr') + +const fixOwner = ( + process.getuid && process.getuid() === 0 && + process.env.SUDO_UID && process.env.SUDO_GID +) ? (path) => chownr.sync(path, +process.env.SUDO_UID, +process.env.SUDO_GID) + : () => {} test('setup', function (t) { mr({port: common.port, throwOnUnmatched: true}, function (err, s) { @@ -46,27 +51,24 @@ test('createEntryUpdateStream full request', function (t) { }, { date: Date.now() // should never be used. }) - _createEntryUpdateStream(ALL, {}, 600, 0, function (err, stream, latest) { - if (err) throw err + return _createEntryUpdateStream(600, 0, { + registry: common.registry + }).then(({ + updateStream: stream, + updatedLatest: latest + }) => { t.equals(latest, 1234, '`latest` correctly extracted') t.ok(stream, 'returned a stream') - var results = [] - stream.on('data', function (pkg) { - results.push(pkg) - }) - ms.finished(stream, function (err) { - if (err) throw err - t.deepEquals(results, [{ - name: 'bar', - version: '1.0.0' - }, { - name: 'foo', - version: '1.0.0' - }]) - server.done() - cleanup() - t.end() - }) + return getStream.array(stream) + }).then(results => { + t.deepEquals(results, [{ + name: 'bar', + version: '1.0.0' + }, { + name: 'foo', + version: '1.0.0' + }]) + server.done() }) }) @@ -79,27 +81,24 @@ test('createEntryUpdateStream partial update', function (t) { }, { date: (new Date(now)).toISOString() }) - _createEntryUpdateStream(ALL, {}, 600, 1234, function (err, stream, latest) { - if (err) throw err + return _createEntryUpdateStream(600, 1234, { + registry: common.registry + }).then(({ + updateStream: stream, + updatedLatest: latest + }) => { t.equals(latest, now, '`latest` correctly extracted from header') t.ok(stream, 'returned a stream') - var results = [] - stream.on('data', function (pkg) { - results.push(pkg) - }) - ms.finished(stream, function (err) { - if (err) throw err - t.deepEquals(results, [{ - name: 'bar', - version: '1.0.0' - }, { - name: 'foo', - version: '1.0.0' - }]) - server.done() - cleanup() - t.end() - }) + return getStream.array(stream) + }).then(results => { + t.deepEquals(results, [{ + name: 'bar', + version: '1.0.0' + }, { + name: 'foo', + version: '1.0.0' + }]) + server.done() }) }) @@ -113,27 +112,25 @@ test('createEntryUpdateStream authed request', function (t) { }, { date: Date.now() // should never be used. }) - _createEntryUpdateStream(ALL, { token: token }, 600, 0, function (err, stream, latest) { - if (err) throw err + return _createEntryUpdateStream(600, 0, { + registry: common.registry, + token + }).then(({ + updateStream: stream, + updatedLatest: latest + }) => { t.equals(latest, 1234, '`latest` correctly extracted') t.ok(stream, 'returned a stream') - var results = [] - stream.on('data', function (pkg) { - results.push(pkg) - }) - ms.finished(stream, function (err) { - if (err) throw err - t.deepEquals(results, [{ - name: 'bar', - version: '1.0.0' - }, { - name: 'foo', - version: '1.0.0' - }]) - server.done() - cleanup() - t.end() - }) + return getStream.array(stream) + }).then(results => { + t.deepEquals(results, [{ + name: 'bar', + version: '1.0.0' + }, { + name: 'foo', + version: '1.0.0' + }]) + server.done() }) }) @@ -143,14 +140,16 @@ test('createEntryUpdateStream bad auth', function (t) { server.get('/-/all', { authorization: 'Bearer ' + token }).once().reply(401, { error: 'unauthorized search request' }) - _createEntryUpdateStream(ALL, { token: token }, 600, 0, function (err, stream, latest) { + return _createEntryUpdateStream(600, 0, { + registry: common.registry, + token + }).then(() => { + throw new Error('should not succeed') + }, err => { t.ok(err, 'got an error from auth failure') - t.notOk(stream, 'no stream returned') - t.notOk(latest, 'no latest returned') t.match(err, /unauthorized/, 'failure message from request used') + }).then(() => { server.done() - cleanup() - t.end() }) }) @@ -158,19 +157,20 @@ test('createEntryUpdateStream not stale', function (t) { setup() var now = Date.now() var staleness = 600 - _createEntryUpdateStream(ALL, {}, staleness, now, function (err, stream, latest) { - t.ifError(err, 'completed successfully') + return _createEntryUpdateStream(staleness, now, { + registry: common.registry + }).then(({ + updateStream: stream, + updatedLatest: latest + }) => { t.notOk(stream, 'no stream returned') t.notOk(latest, 'no latest returned') server.done() - cleanup() t.end() }) }) test('cleanup', function (t) { - cleanup() server.close() - t.pass('all done') t.done() }) diff --git a/test/tap/all-package-metadata-write-stream-unit.js b/test/tap/all-package-metadata-write-stream-unit.js index 410f7f9e9d927..8cdfe96da05e2 100644 --- a/test/tap/all-package-metadata-write-stream-unit.js +++ b/test/tap/all-package-metadata-write-stream-unit.js @@ -1,26 +1,33 @@ 'use strict' -var common = require('../common-tap.js') -var npm = require('../../') -var test = require('tap').test -var mkdirp = require('mkdirp') -var rimraf = require('rimraf') -var path = require('path') -var fs = require('fs') -var ms = require('mississippi') +const common = require('../common-tap.js') +const getStream = require('get-stream') +const npm = require('../../') +const test = require('tap').test +const mkdirp = require('mkdirp') +const path = require('path') +const fs = require('fs') +const ms = require('mississippi') -var _createCacheWriteStream = require('../../lib/search/all-package-metadata.js')._createCacheWriteStream - -var PKG_DIR = path.resolve(__dirname, 'create-cache-write-stream') -var CACHE_DIR = path.resolve(PKG_DIR, 'cache') +const _createCacheWriteStream = require('../../lib/search/all-package-metadata.js')._createCacheWriteStream +// this test uses a fresh cache for each test block +// create them all in common.cache so that we can verify +// them for root-owned files in sudotest +let CACHE_DIR +let cacheCounter = 1 function setup () { + CACHE_DIR = common.cache + '/' + cacheCounter++ mkdirp.sync(CACHE_DIR) + fixOwner(CACHE_DIR) } -function cleanup () { - rimraf.sync(PKG_DIR) -} +const chownr = require('chownr') +const fixOwner = ( + process.getuid && process.getuid() === 0 && + process.env.SUDO_UID && process.env.SUDO_GID +) ? (path) => chownr.sync(path, +process.env.SUDO_UID, +process.env.SUDO_GID) + : () => {} function fromArray (array) { var idx = 0 @@ -46,60 +53,52 @@ test('createCacheEntryStream basic', function (t) { { name: 'foo', version: '1.0.0' } ] var srcStream = fromArray(src) - _createCacheWriteStream(cachePath, latest, function (err, stream) { - if (err) throw err + return _createCacheWriteStream(cachePath, latest, { + cache: CACHE_DIR + }).then(stream => { t.ok(stream, 'returned a stream') stream = ms.pipeline.obj(srcStream, stream) - var results = [] - stream.on('data', function (pkg) { - results.push(pkg) - }) - ms.finished(stream, function (err) { - if (err) throw err - t.deepEquals(results, [{ + return getStream.array(stream) + }).then(results => { + t.deepEquals(results, [{ + name: 'bar', + version: '1.0.0' + }, { + name: 'foo', + version: '1.0.0' + }]) + var fileData = JSON.parse(fs.readFileSync(cachePath)) + t.ok(fileData, 'cache contents written to the right file') + t.deepEquals(fileData, { + '_updated': latest, + bar: { name: 'bar', version: '1.0.0' - }, { + }, + foo: { name: 'foo', version: '1.0.0' - }]) - var fileData = JSON.parse(fs.readFileSync(cachePath)) - t.ok(fileData, 'cache contents written to the right file') - t.deepEquals(fileData, { - '_updated': latest, - bar: { - name: 'bar', - version: '1.0.0' - }, - foo: { - name: 'foo', - version: '1.0.0' - } - }, 'cache contents based on what was written') - cleanup() - t.done() - }) + } + }, 'cache contents based on what was written') }) }) test('createCacheEntryStream no entries', function (t) { - cleanup() // wipe out the cache dir - var cachePath = path.join(CACHE_DIR, '.cache.json') + setup() + const cachePath = path.join(CACHE_DIR, '.cache.json') var latest = 12345 - var src = [] - var srcStream = fromArray(src) - _createCacheWriteStream(cachePath, latest, function (err, stream) { - if (err) throw err + const src = [] + const srcStream = fromArray(src) + return _createCacheWriteStream(cachePath, latest, { + cache: CACHE_DIR + }).then(stream => { t.ok(stream, 'returned a stream') stream = ms.pipeline.obj(srcStream, stream) stream.resume() - ms.finished(stream, function (err) { - if (err) throw err - var fileData = JSON.parse(fs.readFileSync(cachePath)) - t.ok(fileData, 'cache file exists and has stuff in it') - cleanup() - t.done() - }) + return getStream(stream) + }).then(() => { + const fileData = JSON.parse(fs.readFileSync(cachePath)) + t.ok(fileData, 'cache file exists and has stuff in it') }) }) @@ -109,22 +108,18 @@ test('createCacheEntryStream missing cache dir', function (t) { var latest = 12345 var src = [] var srcStream = fromArray(src) - _createCacheWriteStream(cachePath, latest, function (err, stream) { - if (err) throw err + return _createCacheWriteStream(cachePath, latest, { + cache: CACHE_DIR + }).then(stream => { t.ok(stream, 'returned a stream') stream = ms.pipeline.obj(srcStream, stream) - stream.on('data', function (pkg) { - t.notOk(pkg, 'stream should not have output any data') - }) - ms.finished(stream, function (err) { - if (err) throw err - var fileData = JSON.parse(fs.readFileSync(cachePath)) - t.ok(fileData, 'cache contents written to the right file') - t.deepEquals(fileData, { - '_updated': latest - }, 'cache still contains `_updated`') - cleanup() - t.done() - }) + return getStream.array(stream) + }).then(res => { + t.deepEqual(res, [], 'no data returned') + var fileData = JSON.parse(fs.readFileSync(cachePath)) + t.ok(fileData, 'cache contents written to the right file') + t.deepEquals(fileData, { + '_updated': latest + }, 'cache still contains `_updated`') }) }) diff --git a/test/tap/all-package-metadata.js b/test/tap/all-package-metadata.js index 9b60822e4eb00..75afa9bad2c0c 100644 --- a/test/tap/all-package-metadata.js +++ b/test/tap/all-package-metadata.js @@ -1,41 +1,41 @@ 'use strict' -var common = require('../common-tap.js') -var npm = require('../../') -var test = require('tap').test -var mkdirp = require('mkdirp') -var rimraf = require('rimraf') -var path = require('path') -var fs = require('fs') -var cacheFile = require('npm-cache-filename') -var mr = require('npm-registry-mock') -var ms = require('mississippi') -var Tacks = require('tacks') -var File = Tacks.File +const common = require('../common-tap.js') +const npm = require('../../') +const test = require('tap').test +const mkdirp = require('mkdirp') +const rimraf = require('rimraf') +const path = require('path') +const fs = require('fs') +const cacheFile = require('npm-cache-filename') +const mr = require('npm-registry-mock') +const ms = require('mississippi') +const Tacks = require('tacks') +const File = Tacks.File -var allPackageMetadata = require('../../lib/search/all-package-metadata.js') +const allPackageMetadata = require('../../lib/search/all-package-metadata.js') -var PKG_DIR = path.resolve(__dirname, 'update-index') -var CACHE_DIR = path.resolve(PKG_DIR, 'cache') -var cacheBase -var cachePath +const PKG_DIR = path.resolve(common.cache, 'update-index') +const CACHE_DIR = path.resolve(PKG_DIR, 'cache', '_cacache') +let cacheBase +let cachePath -var server +let server function setup () { mkdirp.sync(cacheBase) } -function cleanup () { - rimraf.sync(PKG_DIR) +function cleanup (cb) { + rimraf(PKG_DIR, cb) } test('setup', function (t) { mr({port: common.port, throwOnUnmatched: true}, function (err, s) { t.ifError(err, 'registry mocked successfully') - npm.load({ cache: CACHE_DIR, registry: common.registry }, function (err) { + npm.load({ cache: path.dirname(CACHE_DIR), registry: common.registry }, function (err) { t.ifError(err, 'npm loaded successfully') - npm.config.set('cache', CACHE_DIR) + npm.config.set('cache', path.dirname(CACHE_DIR)) cacheBase = cacheFile(npm.config.get('cache'))(common.registry + '/-/all') cachePath = path.join(cacheBase, '.cache.json') server = s @@ -55,7 +55,11 @@ test('allPackageMetadata full request', function (t) { }, { date: updated }) - var stream = allPackageMetadata(600) + var stream = allPackageMetadata({ + cache: CACHE_DIR, + registry: common.registry, + staleness: 600 + }) t.ok(stream, 'returned a stream') var results = [] stream.on('data', function (pkg) { @@ -84,8 +88,7 @@ test('allPackageMetadata full request', function (t) { } }, 'cache contents based on what was written') server.done() - cleanup() - t.end() + cleanup(t.end) }) }) @@ -101,7 +104,11 @@ test('allPackageMetadata cache only', function (t) { } var fixture = new Tacks(File(cacheContents)) fixture.create(cachePath) - var stream = allPackageMetadata(10000000) + var stream = allPackageMetadata({ + cache: CACHE_DIR, + registry: common.registry, + staleness: 10000000 + }) t.ok(stream, 'returned a stream') var results = [] stream.on('data', function (pkg) { @@ -118,8 +125,7 @@ test('allPackageMetadata cache only', function (t) { t.ok(fileData, 'cache contents written to the right file') t.deepEquals(fileData, cacheContents, 'cacheContents written directly') server.done() - cleanup() - t.end() + cleanup(t.end) }) }) @@ -143,7 +149,11 @@ test('createEntryStream merged stream', function (t) { other: { name: 'other', version: '1.0.0' } })) fixture.create(cachePath) - var stream = allPackageMetadata(600) + var stream = allPackageMetadata({ + cache: CACHE_DIR, + registry: common.registry, + staleness: 600 + }) t.ok(stream, 'returned a stream') var results = [] stream.on('data', function (pkg) { @@ -176,27 +186,27 @@ test('createEntryStream merged stream', function (t) { t.ok(fileData, 'cache contents written to the right file') t.deepEquals(fileData, cacheContents, 'cache updated correctly') server.done() - cleanup() - t.end() + cleanup(t.end) }) }) test('allPackageMetadata no sources', function (t) { setup() server.get('/-/all').once().reply(404, {}) - var stream = allPackageMetadata(600) + var stream = allPackageMetadata({ + cache: CACHE_DIR, + registry: common.registry, + staleness: 600 + }) ms.finished(stream, function (err) { t.ok(err, 'no sources, got an error') t.match(err.message, /No search sources available/, 'useful error message') server.done() - cleanup() - t.end() + cleanup(t.end) }) }) test('cleanup', function (t) { - cleanup() server.close() - t.pass('all done') - t.done() + cleanup(t.end) }) diff --git a/test/tap/anon-cli-metrics.js b/test/tap/anon-cli-metrics.js index 100ca526cf168..729d9e607a4a0 100644 --- a/test/tap/anon-cli-metrics.js +++ b/test/tap/anon-cli-metrics.js @@ -9,9 +9,9 @@ var File = Tacks.File var Dir = Tacks.Dir var common = require('../common-tap.js') -var basedir = path.join(__dirname, path.basename(__filename, '.js')) +var basedir = common.pkg var testdir = path.join(basedir, 'testdir') -var cachedir = path.join(basedir, 'cache') +var cachedir = common.cache var globaldir = path.join(basedir, 'global') var tmpdir = path.join(basedir, 'tmp') var metricsFile = path.join(cachedir, 'anonymous-cli-metrics.json') @@ -54,7 +54,7 @@ var fixture = new Tacks(Dir({ name: 'slow', version: '1.0.0', scripts: { - preinstall: "node -e 'setTimeout(function(){}, 500)'" + preinstall: 'node -e "setTimeout(function(){}, 500)"' } }) }), diff --git a/test/tap/audit-fix.js b/test/tap/audit-fix.js index 7e95568289005..a832078ae9369 100644 --- a/test/tap/audit-fix.js +++ b/test/tap/audit-fix.js @@ -13,7 +13,7 @@ const test = tap.test const Dir = Tacks.Dir const File = Tacks.File -const testDir = path.join(__dirname, path.basename(__filename, '.js')) +const testDir = common.pkg const EXEC_OPTS = { cwd: testDir } diff --git a/test/tap/audit.js b/test/tap/audit.js index 3384579f77ba7..ca3da87a3af62 100644 --- a/test/tap/audit.js +++ b/test/tap/audit.js @@ -12,21 +12,10 @@ const test = tap.test const Dir = Tacks.Dir const File = Tacks.File -const testDir = path.join(__dirname, path.basename(__filename, '.js')) +const testDir = common.pkg const EXEC_OPTS = { cwd: testDir } -tap.tearDown(function () { - process.chdir(__dirname) - try { - rimraf.sync(testDir) - } catch (e) { - if (process.platform !== 'win32') { - throw e - } - } -}) - function tmock (t) { return mr({port: common.port}).then(s => { t.tearDown(function () { @@ -38,6 +27,66 @@ function tmock (t) { }) } +const quickAuditResult = { + actions: [], + advisories: { + '1316': { + findings: [ + { + version: '1.0.0', + paths: [ + 'baddep' + ] + } + ], + 'id': 1316, + 'created': '2019-11-14T15:29:41.991Z', + 'updated': '2019-11-14T19:35:30.677Z', + 'deleted': null, + 'title': 'Arbitrary Code Execution', + 'found_by': { + 'link': '', + 'name': 'François Lajeunesse-Robert', + 'email': '' + }, + 'reported_by': { + 'link': '', + 'name': 'François Lajeunesse-Robert', + 'email': '' + }, + 'module_name': 'baddep', + 'cves': [], + 'vulnerable_versions': '<4.5.2', + 'patched_versions': '>=4.5.2', + 'overview': 'a nice overview of the advisory', + 'recommendation': 'how you should fix it', + 'references': '', + 'access': 'public', + 'severity': 'high', + 'cwe': 'CWE-79', + 'metadata': { + 'module_type': '', + 'exploitability': 6, + 'affected_components': '' + }, + 'url': 'https://npmjs.com/advisories/1234542069' + } + }, + 'muted': [], + 'metadata': { + 'vulnerabilities': { + 'info': 0, + 'low': 0, + 'moderate': 0, + 'high': 1, + 'critical': 0 + }, + 'dependencies': 1, + 'devDependencies': 0, + 'totalDependencies': 1 + } +} + test('exits with zero exit code for vulnerabilities below the `audit-level` flag', t => { const fixture = new Tacks(new Dir({ 'package.json': new File({ @@ -51,7 +100,7 @@ test('exits with zero exit code for vulnerabilities below the `audit-level` flag fixture.create(testDir) return tmock(t).then(srv => { srv.filteringRequestBody(req => 'ok') - srv.post('/-/npm/v1/security/audits/quick', 'ok').reply(200, 'yeah') + srv.post('/-/npm/v1/security/audits/quick', 'ok').reply(200, quickAuditResult) srv.get('/baddep').twice().reply(200, { name: 'baddep', 'dist-tags': { @@ -86,6 +135,8 @@ test('exits with zero exit code for vulnerabilities below the `audit-level` flag '--registry', common.registry, '--cache', path.join(testDir, 'npm-cache') ], EXEC_OPTS).then(([code, stdout, stderr]) => { + const result = JSON.parse(stdout) + t.same(result.audit, quickAuditResult, 'printed quick audit result') srv.filteringRequestBody(req => 'ok') srv.post('/-/npm/v1/security/audits', 'ok').reply(200, { actions: [{ @@ -113,6 +164,62 @@ test('exits with zero exit code for vulnerabilities below the `audit-level` flag }) }) +test('shows quick audit results summary for human', t => { + const fixture = new Tacks(new Dir({ + 'package.json': new File({ + name: 'foo', + version: '1.0.0', + dependencies: { + baddep: '1.0.0' + } + }) + })) + fixture.create(testDir) + return tmock(t).then(srv => { + srv.filteringRequestBody(req => 'ok') + srv.post('/-/npm/v1/security/audits/quick', 'ok').reply(200, quickAuditResult) + srv.get('/baddep').twice().reply(200, { + name: 'baddep', + 'dist-tags': { + 'latest': '1.2.3' + }, + versions: { + '1.0.0': { + name: 'baddep', + version: '1.0.0', + _hasShrinkwrap: false, + dist: { + shasum: 'deadbeef', + tarball: common.registry + '/idk/-/idk-1.0.0.tgz' + } + }, + '1.2.3': { + name: 'baddep', + version: '1.2.3', + _hasShrinkwrap: false, + dist: { + shasum: 'deadbeef', + tarball: common.registry + '/idk/-/idk-1.2.3.tgz' + } + } + } + }) + return common.npm([ + 'install', + '--audit', + '--no-json', + '--package-lock-only', + '--registry', common.registry, + '--cache', path.join(testDir, 'npm-cache') + ], EXEC_OPTS).then(([code, stdout, stderr]) => { + t.match(stdout, new RegExp('added 1 package and audited 1 package in .*\\n' + + 'found 1 high severity vulnerability\\n' + + ' run `npm audit fix` to fix them, or `npm audit` for details\\n'), + 'shows quick audit result') + }) + }) +}) + test('exits with non-zero exit code for vulnerabilities at the `audit-level` flag', t => { const fixture = new Tacks(new Dir({ 'package.json': new File({ @@ -263,6 +370,209 @@ test('exits with non-zero exit code for vulnerabilities at the `audit-level` fla }) }) +test('exits with zero exit code for vulnerabilities in devDependencies when running with production flag', t => { + const fixture = new Tacks(new Dir({ + 'package.json': new File({ + name: 'foo', + version: '1.0.0', + dependencies: { + gooddep: '1.0.0' + }, + devDependencies: { + baddep: '1.0.0' + } + }) + })) + fixture.create(testDir) + return tmock(t).then(srv => { + srv.filteringRequestBody(req => 'ok') + srv.post('/-/npm/v1/security/audits/quick', 'ok').reply(200, 'yeah') + srv.get('/gooddep').twice().reply(200, { + name: 'gooddep', + 'dist-tags': { + 'latest': '1.2.3' + }, + versions: { + '1.0.0': { + name: 'gooddep', + version: '1.0.0', + _hasShrinkwrap: false, + dist: { + shasum: 'deadbeef', + tarball: common.registry + '/idk/-/idk-1.0.0.tgz' + } + }, + '1.2.3': { + name: 'gooddep', + version: '1.2.3', + _hasShrinkwrap: false, + dist: { + shasum: 'deadbeef', + tarball: common.registry + '/idk/-/idk-1.2.3.tgz' + } + } + } + }) + srv.get('/baddep').twice().reply(200, { + name: 'baddep', + 'dist-tags': { + 'latest': '1.2.3' + }, + versions: { + '1.0.0': { + name: 'baddep', + version: '1.0.0', + _hasShrinkwrap: false, + dist: { + shasum: 'deadbeef', + tarball: common.registry + '/idk/-/idk-1.0.0.tgz' + } + }, + '1.2.3': { + name: 'baddep', + version: '1.2.3', + _hasShrinkwrap: false, + dist: { + shasum: 'deadbeef', + tarball: common.registry + '/idk/-/idk-1.2.3.tgz' + } + } + } + }) + return common.npm([ + 'install', + '--audit', + '--json', + '--production', + '--package-lock-only', + '--registry', common.registry, + '--cache', path.join(testDir, 'npm-cache') + ], EXEC_OPTS).then(([code, stdout, stderr]) => { + srv.filteringRequestBody(req => 'ok') + srv.post('/-/npm/v1/security/audits', 'ok').reply(200, { + actions: [], + metadata: { + vulnerabilities: {} + } + }) + return common.npm([ + 'audit', + '--json', + '--production', + '--registry', common.registry, + '--cache', path.join(testDir, 'npm-cache') + ], EXEC_OPTS).then(([code, stdout, stderr]) => { + t.equal(code, 0, 'exited OK') + }) + }) + }) +}) + +test('exits with non-zero exit code for vulnerabilities in dependencies when running with production flag', t => { + const fixture = new Tacks(new Dir({ + 'package.json': new File({ + name: 'foo', + version: '1.0.0', + dependencies: { + baddep: '1.0.0' + }, + devDependencies: { + gooddep: '1.0.0' + } + }) + })) + fixture.create(testDir) + return tmock(t).then(srv => { + srv.filteringRequestBody(req => 'ok') + srv.post('/-/npm/v1/security/audits/quick', 'ok').reply(200, 'yeah') + srv.get('/baddep').twice().reply(200, { + name: 'baddep', + 'dist-tags': { + 'latest': '1.2.3' + }, + versions: { + '1.0.0': { + name: 'baddep', + version: '1.0.0', + _hasShrinkwrap: false, + dist: { + shasum: 'deadbeef', + tarball: common.registry + '/idk/-/idk-1.0.0.tgz' + } + }, + '1.2.3': { + name: 'baddep', + version: '1.2.3', + _hasShrinkwrap: false, + dist: { + shasum: 'deadbeef', + tarball: common.registry + '/idk/-/idk-1.2.3.tgz' + } + } + } + }) + srv.get('/gooddep').twice().reply(200, { + name: 'gooddep', + 'dist-tags': { + 'latest': '1.2.3' + }, + versions: { + '1.0.0': { + name: 'gooddep', + version: '1.0.0', + _hasShrinkwrap: false, + dist: { + shasum: 'deadbeef', + tarball: common.registry + '/idk/-/idk-1.0.0.tgz' + } + }, + '1.2.3': { + name: 'gooddep', + version: '1.2.3', + _hasShrinkwrap: false, + dist: { + shasum: 'deadbeef', + tarball: common.registry + '/idk/-/idk-1.2.3.tgz' + } + } + } + }) + return common.npm([ + 'install', + '--audit', + '--json', + '--production', + '--package-lock-only', + '--registry', common.registry, + '--cache', path.join(testDir, 'npm-cache') + ], EXEC_OPTS).then(([code, stdout, stderr]) => { + srv.filteringRequestBody(req => 'ok') + srv.post('/-/npm/v1/security/audits', 'ok').reply(200, { + actions: [{ + action: 'update', + module: 'baddep', + target: '1.2.3', + resolves: [{path: 'baddep'}] + }], + metadata: { + vulnerabilities: { + low: 1 + } + } + }) + return common.npm([ + 'audit', + '--json', + '--production', + '--registry', common.registry, + '--cache', path.join(testDir, 'npm-cache') + ], EXEC_OPTS).then(([code, stdout, stderr]) => { + t.equal(code, 1, 'exited OK') + }) + }) + }) +}) + test('cleanup', t => { return rimraf(testDir) }) diff --git a/test/tap/auto-prune.js b/test/tap/auto-prune.js index 3fa5d5d05b360..aab3692a3b242 100644 --- a/test/tap/auto-prune.js +++ b/test/tap/auto-prune.js @@ -7,9 +7,9 @@ const File = Tacks.File const Dir = Tacks.Dir const common = require('../common-tap.js') -const basedir = path.join(__dirname, path.basename(__filename, '.js')) +const basedir = common.pkg const testdir = path.join(basedir, 'testdir') -const cachedir = path.join(basedir, 'cache') +const cachedir = common.cache const globaldir = path.join(basedir, 'global') const tmpdir = path.join(basedir, 'tmp') diff --git a/test/tap/bearer-token-check.js b/test/tap/bearer-token-check.js index e3af793e602e5..86602b303c9f0 100644 --- a/test/tap/bearer-token-check.js +++ b/test/tap/bearer-token-check.js @@ -4,14 +4,12 @@ var writeFileSync = require('graceful-fs').writeFileSync var fs = require('fs') var mkdirp = require('mkdirp') var http = require('http') -var osenv = require('osenv') -var rimraf = require('rimraf') -var test = require('tap').test +const t = require('tap') var common = require('../common-tap.js') var toNerfDart = require('../../lib/config/nerf-dart.js') -var pkg = resolve(__dirname, 'install-bearer-check') +var pkg = common.pkg var outfile = resolve(pkg, '_npmrc') var modules = resolve(pkg, 'node_modules') var tarballPath = '/scoped-underscore/-/scoped-underscore-1.3.1.tgz' @@ -38,14 +36,42 @@ server.on('request', (req, res) => { } }) -test('setup', function (t) { - server.listen(common.port, () => { - setup() - t.done() - }) +var contents = '@scoped:registry=' + common.registry + '\n' + + toNerfDart(common.registry) + ':_authToken=0xabad1dea\n' + +var json = { + name: 'test-package-install', + version: '1.0.0', + dependencies: { + '@scoped/underscore': '1.3.1' + } +} + +var shrinkwrap = { + name: 'test-package-install', + version: '1.0.0', + dependencies: { + '@scoped/underscore': { + resolved: tarballURL, + version: '1.3.1' + } + } +} + +t.teardown(() => server.close()) + +t.test('setup', function (t) { + mkdirp.sync(modules) + writeFileSync(resolve(pkg, 'package.json'), JSON.stringify(json, null, 2) + '\n') + writeFileSync(outfile, contents) + writeFileSync( + resolve(pkg, 'npm-shrinkwrap.json'), + JSON.stringify(shrinkwrap, null, 2) + '\n' + ) + server.listen(common.port, t.end) }) -test('authed npm install with tarball not on registry', function (t) { +t.test('authed npm install with tarball not on registry', function (t) { common.npm( [ 'install', @@ -81,48 +107,3 @@ test('authed npm install with tarball not on registry', function (t) { } ) }) - -test('cleanup', function (t) { - server.close(() => { - cleanup() - t.end() - }) -}) - -var contents = '@scoped:registry=' + common.registry + '\n' + - toNerfDart(common.registry) + ':_authToken=0xabad1dea\n' - -var json = { - name: 'test-package-install', - version: '1.0.0', - dependencies: { - '@scoped/underscore': '1.3.1' - } -} - -var shrinkwrap = { - name: 'test-package-install', - version: '1.0.0', - dependencies: { - '@scoped/underscore': { - resolved: tarballURL, - version: '1.3.1' - } - } -} - -function setup () { - cleanup() - mkdirp.sync(modules) - writeFileSync(resolve(pkg, 'package.json'), JSON.stringify(json, null, 2) + '\n') - writeFileSync(outfile, contents) - writeFileSync( - resolve(pkg, 'npm-shrinkwrap.json'), - JSON.stringify(shrinkwrap, null, 2) + '\n' - ) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/bin-overwriting.js b/test/tap/bin-overwriting.js new file mode 100644 index 0000000000000..faaa78745a742 --- /dev/null +++ b/test/tap/bin-overwriting.js @@ -0,0 +1,108 @@ +const t = require('tap') +const common = require('../common-tap.js') +const pkg = common.pkg + +const { writeFileSync, readFileSync, readlink } = require('fs') +const readCmdShim = require('read-cmd-shim') +const path = require('path') +const readBinCb = process.platform === 'win32' ? readCmdShim : readlink +const readBin = bin => new Promise((resolve, reject) => { + readBinCb(bin, (er, target) => { + if (er) { + reject(er) + } else { + resolve(path.resolve(path.dirname(bin), target)) + } + }) +}) + +// verify that we can't overwrite bins that we shouldn't be able to + +const mkdirp = require('mkdirp').sync + +process.env.npm_config_prefix = pkg + '/global' +process.env.npm_config_global = true + +const globalBin = process.platform === 'win32' + ? path.resolve(pkg, 'global') + : path.resolve(pkg, 'global/bin') + +const globalDir = process.platform === 'win32' + ? path.resolve(pkg, 'global/node_modules') + : path.resolve(pkg, 'global/lib/node_modules') + +const beep = path.resolve(globalBin, 'beep') +const firstBin = path.resolve(globalDir, 'first/first.js') +const secondBin = path.resolve(globalDir, 'second/second.js') + +t.test('setup', { bail: true }, t => { + // set up non-link bin in place + mkdirp(globalBin) + writeFileSync(beep, 'beep boop') + + // create first package + mkdirp(pkg + '/first') + writeFileSync(pkg + '/first/package.json', JSON.stringify({ + name: 'first', + version: '1.0.0', + bin: { beep: 'first.js' } + })) + writeFileSync(pkg + '/first/first.js', `#!/usr/bin/env node + console.log('first')`) + + // create second package + mkdirp(pkg + '/second') + writeFileSync(pkg + '/second/package.json', JSON.stringify({ + name: 'second', + version: '1.0.0', + bin: { beep: 'second.js' } + })) + writeFileSync(pkg + '/second/second.js', `#!/usr/bin/env node + console.log('second')`) + + // pack both to install globally + return common.npm(['pack'], { cwd: pkg + '/first' }) + .then(() => common.npm(['pack'], { cwd: pkg + '/second' })) +}) + +t.test('installing first fails, because pre-existing bin in place', t => { + return common.npm([ + 'install', + pkg + '/first/first-1.0.0.tgz' + ]).then(([code, stdout, stderr]) => { + t.notEqual(code, 0) + t.match(stderr, 'EEXIST') + t.equal(readFileSync(beep, 'utf8'), 'beep boop') + }) +}) + +t.test('installing first with --force succeeds', t => { + return common.npm([ + 'install', + pkg + '/first/first-1.0.0.tgz', + '--force' + ]).then(() => { + return t.resolveMatch(readBin(beep), firstBin, 'bin written to first.js') + }) +}) + +t.test('installing second fails, because bin links to other package', t => { + return common.npm([ + 'install', + pkg + '/second/second-1.0.0.tgz' + ]).then(([code, stdout, stderr]) => { + t.notEqual(code, 0) + t.match(stderr, 'EEXIST') + return t.resolveMatch(readBin(beep), firstBin, 'bin still linked to first') + }) +}) + +t.test('installing second with --force succeeds', t => { + return common.npm([ + 'install', + pkg + '/second/second-1.0.0.tgz', + '--force' + ]).then(() => { + return t.resolveMatch(readBin(beep), secondBin, 'bin written to second.js') + }) +}) diff --git a/test/tap/bin.js b/test/tap/bin.js index 33320bc21f59a..bf2397777ce40 100644 --- a/test/tap/bin.js +++ b/test/tap/bin.js @@ -2,12 +2,12 @@ var path = require('path') var test = require('tap').test var rimraf = require('rimraf') var common = require('../common-tap.js') -var opts = { cwd: __dirname } -var binDir = '../../node_modules/.bin' -var fixture = path.resolve(__dirname, binDir) +var opts = { cwd: common.pkg } +var binDir = '../../../node_modules/.bin' +var fixture = path.resolve(common.pkg, binDir) test('setup', function (t) { - rimraf.sync(path.join(__dirname, 'node_modules')) + rimraf.sync(path.join(common.pkg, 'node_modules')) t.end() }) diff --git a/test/tap/bitbucket-https-url-with-creds-package.js b/test/tap/bitbucket-https-url-with-creds-package.js index ce65f15b28fba..f0f14dcb34475 100644 --- a/test/tap/bitbucket-https-url-with-creds-package.js +++ b/test/tap/bitbucket-https-url-with-creds-package.js @@ -5,15 +5,12 @@ const BB = require('bluebird') var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') -var osenv = require('osenv') var requireInject = require('require-inject') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'bitbucket-https-url-with-creds-package') +var pkg = common.pkg var json = { name: 'bitbucket-https-url-with-creds-package', @@ -50,7 +47,7 @@ test('bitbucket-https-url-with-creds-package', function (t) { }) var opts = { - cache: path.resolve(pkg, 'cache'), + cache: common.cache, prefix: pkg, registry: common.registry, loglevel: 'silent' @@ -64,22 +61,10 @@ test('bitbucket-https-url-with-creds-package', function (t) { }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - function setup () { - cleanup() - mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) ) process.chdir(pkg) } - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/bitbucket-https-url-with-creds.js b/test/tap/bitbucket-https-url-with-creds.js index ae87d85848752..703d0d9a6ab60 100644 --- a/test/tap/bitbucket-https-url-with-creds.js +++ b/test/tap/bitbucket-https-url-with-creds.js @@ -6,14 +6,12 @@ var fs = require('graceful-fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') var requireInject = require('require-inject') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'bitbucket-https-url-with-creds') +var pkg = common.pkg var json = { name: 'bitbucket-https-url-with-creds', @@ -47,7 +45,7 @@ test('bitbucket-https-url-with-creds', function (t) { }) var opts = { - cache: path.resolve(pkg, 'cache'), + cache: common.cache, prefix: pkg, registry: common.registry, loglevel: 'silent' @@ -61,13 +59,7 @@ test('bitbucket-https-url-with-creds', function (t) { }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - function setup () { - cleanup() mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), @@ -75,8 +67,3 @@ function setup () { ) process.chdir(pkg) } - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/bitbucket-shortcut-package.js b/test/tap/bitbucket-shortcut-package.js index 8c418d37dcd0a..a148c598c6870 100644 --- a/test/tap/bitbucket-shortcut-package.js +++ b/test/tap/bitbucket-shortcut-package.js @@ -6,14 +6,12 @@ var fs = require('graceful-fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') var requireInject = require('require-inject') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'bitbucket-shortcut-package') +var pkg = common.pkg var json = { name: 'bitbucket-shortcut-package', @@ -51,7 +49,7 @@ test('bitbucket-shortcut', function (t) { }) var opts = { - cache: path.resolve(pkg, 'cache'), + cache: common.cache, prefix: pkg, registry: common.registry, loglevel: 'silent' @@ -65,13 +63,7 @@ test('bitbucket-shortcut', function (t) { }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - function setup () { - cleanup() mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), @@ -79,8 +71,3 @@ function setup () { ) process.chdir(pkg) } - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/bitbucket-shortcut.js b/test/tap/bitbucket-shortcut.js index f8167fdd2dce2..6d750f869a306 100644 --- a/test/tap/bitbucket-shortcut.js +++ b/test/tap/bitbucket-shortcut.js @@ -6,14 +6,12 @@ var fs = require('graceful-fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') var requireInject = require('require-inject') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'bitbucket-shortcut') +var pkg = common.pkg var json = { name: 'bitbucket-shortcut', @@ -48,7 +46,7 @@ test('bitbucket-shortcut', function (t) { }) var opts = { - cache: path.resolve(pkg, 'cache'), + cache: common.cache, prefix: pkg, registry: common.registry, loglevel: 'silent' @@ -62,13 +60,7 @@ test('bitbucket-shortcut', function (t) { }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - function setup () { - cleanup() mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), @@ -76,8 +68,3 @@ function setup () { ) process.chdir(pkg) } - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/bugs.js b/test/tap/bugs.js index 4d2ee37e132d1..8e2685220dd93 100644 --- a/test/tap/bugs.js +++ b/test/tap/bugs.js @@ -8,15 +8,15 @@ var rimraf = require('rimraf') var fs = require('fs') var path = require('path') var join = path.join -var outFile = path.join(__dirname, '/_output') +var outFile = path.join(common.pkg, '/_output') -var opts = { cwd: __dirname } +var opts = { cwd: common.pkg } test('setup', function (t) { var s = '#!/usr/bin/env bash\n' + - 'echo "$@" > ' + JSON.stringify(__dirname) + '/_output\n' - fs.writeFileSync(join(__dirname, '/_script.sh'), s, 'ascii') - fs.chmodSync(join(__dirname, '/_script.sh'), '0755') + 'echo "$@" > ' + JSON.stringify(common.pkg) + '/_output\n' + fs.writeFileSync(join(common.pkg, '/_script.sh'), s, 'ascii') + fs.chmodSync(join(common.pkg, '/_script.sh'), '0755') t.pass('made script') t.end() }) @@ -28,7 +28,7 @@ test('npm bugs underscore', function (t) { 'bugs', 'underscore', '--registry=' + common.registry, '--loglevel=silent', - '--browser=' + join(__dirname, '/_script.sh') + '--browser=' + join(common.pkg, '/_script.sh') ], opts, function (err, code, stdout, stderr) { @@ -52,7 +52,7 @@ test('npm bugs optimist - github (https://)', function (t) { 'bugs', 'optimist', '--registry=' + common.registry, '--loglevel=silent', - '--browser=' + join(__dirname, '/_script.sh') + '--browser=' + join(common.pkg, '/_script.sh') ], opts, function (err, code, stdout, stderr) { @@ -76,7 +76,7 @@ test('npm bugs npm-test-peer-deps - no repo', function (t) { 'bugs', 'npm-test-peer-deps', '--registry=' + common.registry, '--loglevel=silent', - '--browser=' + join(__dirname, '/_script.sh') + '--browser=' + join(common.pkg, '/_script.sh') ], opts, function (err, code, stdout, stderr) { @@ -100,7 +100,7 @@ test('npm bugs test-repo-url-http - non-github (http://)', function (t) { 'bugs', 'test-repo-url-http', '--registry=' + common.registry, '--loglevel=silent', - '--browser=' + join(__dirname, '/_script.sh') + '--browser=' + join(common.pkg, '/_script.sh') ], opts, function (err, code, stdout, stderr) { @@ -124,7 +124,7 @@ test('npm bugs test-repo-url-https - gitlab (https://)', function (t) { 'bugs', 'test-repo-url-https', '--registry=' + common.registry, '--loglevel=silent', - '--browser=' + join(__dirname, '/_script.sh') + '--browser=' + join(common.pkg, '/_script.sh') ], opts, function (err, code, stdout, stderr) { @@ -148,7 +148,7 @@ test('npm bugs test-repo-url-ssh - gitlab (ssh://)', function (t) { 'bugs', 'test-repo-url-ssh', '--registry=' + common.registry, '--loglevel=silent', - '--browser=' + join(__dirname, '/_script.sh') + '--browser=' + join(common.pkg, '/_script.sh') ], opts, function (err, code, stdout, stderr) { @@ -166,7 +166,7 @@ test('npm bugs test-repo-url-ssh - gitlab (ssh://)', function (t) { }) test('cleanup', function (t) { - fs.unlinkSync(join(__dirname, '/_script.sh')) + rimraf.sync(common.pkg) t.pass('cleaned up') t.end() }) diff --git a/test/tap/build-already-built.js b/test/tap/build-already-built.js index 8a66f66e69f93..3410432ab2244 100644 --- a/test/tap/build-already-built.js +++ b/test/tap/build-already-built.js @@ -2,22 +2,15 @@ // message "already built" should not be error var test = require('tap').test var path = require('path') -var osenv = require('osenv') -var rimraf = require('rimraf') var npmlog = require('npmlog') var mkdirp = require('mkdirp') var requireInject = require('require-inject') var npm = require('../../lib/npm.js') -var PKG_DIR = path.resolve(__dirname, 'build-already-built') -var fakePkg = 'foo' - -test('setup', function (t) { - cleanup() - - t.end() -}) +const common = require('../common-tap.js') +var PKG_DIR = common.pkg +var fakePkg = path.resolve(PKG_DIR, 'foo') test("issue #6735 build 'already built' message", function (t) { npm.load({ loglevel: 'warn' }, function () { @@ -65,14 +58,3 @@ test("issue #6735 build 'already built' message", function (t) { t.end() }) }) - -test('cleanup', function (t) { - cleanup() - - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(PKG_DIR) -} diff --git a/test/tap/builtin-config.js b/test/tap/builtin-config.js index b960ed0383b7b..dddd40565101a 100644 --- a/test/tap/builtin-config.js +++ b/test/tap/builtin-config.js @@ -10,7 +10,7 @@ var common = require('../common-tap.js') var path = require('path') var rimraf = require('rimraf') var mkdirp = require('mkdirp') -var folder = path.resolve(__dirname, 'builtin-config') +var folder = common.pkg var test = require('tap').test var npm = path.resolve(__dirname, '../..') var spawn = require('child_process').spawn diff --git a/test/tap/bundled-dependencies-nonarray.js b/test/tap/bundled-dependencies-nonarray.js index 924e1b27b60a3..fdc32c2883af1 100644 --- a/test/tap/bundled-dependencies-nonarray.js +++ b/test/tap/bundled-dependencies-nonarray.js @@ -9,7 +9,7 @@ var test = require('tap').test var common = require('../common-tap.js') -var dir = path.resolve(__dirname, path.basename(__filename, '.js')) +var dir = common.pkg var pkg = path.resolve(dir, 'pkg-with-bundled') var dep = path.resolve(dir, 'a-bundled-dep') diff --git a/test/tap/bundled-dependencies.js b/test/tap/bundled-dependencies.js index c6d67e3c503a5..6dbfa8cb08d74 100644 --- a/test/tap/bundled-dependencies.js +++ b/test/tap/bundled-dependencies.js @@ -6,7 +6,7 @@ var rimraf = require('rimraf') var mkdirp = require('mkdirp') var fs = require('graceful-fs') var tar = require('tar') -var basepath = path.resolve(__dirname, path.basename(__filename, '.js')) +var basepath = common.pkg var fixturepath = path.resolve(basepath, 'npm-test-bundled-deps') var targetpath = path.resolve(basepath, 'target') var Tacks = require('tacks') diff --git a/test/tap/bundled-transitive-deps.js b/test/tap/bundled-transitive-deps.js index fe59149062636..d3f296ab1612f 100644 --- a/test/tap/bundled-transitive-deps.js +++ b/test/tap/bundled-transitive-deps.js @@ -9,7 +9,7 @@ var common = require('../common-tap.js') var npm = require('../../lib/npm.js') var tar = require('tar') var mkdirp = require('mkdirp') -var testdir = path.join(__dirname, path.basename(__filename, '.js')) +var testdir = common.pkg var packed = path.join(testdir, 'packed') var fixture = new Tacks( diff --git a/test/tap/cache-add-unpublished.js b/test/tap/cache-add-unpublished.js index 8966e43ae40a2..08592a50deebb 100644 --- a/test/tap/cache-add-unpublished.js +++ b/test/tap/cache-add-unpublished.js @@ -11,14 +11,14 @@ test('cache add', function (t) { 'cache', 'add', 'superfoo', - '--registry=http://localhost:1337/' + '--registry=http://localhost:' + common.port + '/' ], {}, function (er, c, so, se) { if (er) throw er t.ok(c, 'got non-zero exit code') t.equal(so, '', 'nothing printed to stdout') - t.similar(se, /404 Not Found: superfoo/, 'got expected error') + t.similar(se, /404 Not Found.*superfoo/, 'got expected error') s.close() t.end() } @@ -31,7 +31,7 @@ function setup (cb) { res.statusCode = 404 res.end('{"error":"not_found"}\n') }) - s.listen(1337, function () { + s.listen(common.port, function () { cb(null, s) }) } diff --git a/test/tap/cache-eacces-error-message.js b/test/tap/cache-eacces-error-message.js new file mode 100644 index 0000000000000..fe76875c3e024 --- /dev/null +++ b/test/tap/cache-eacces-error-message.js @@ -0,0 +1,35 @@ +const npm = require('../../lib/npm.js') +const t = require('tap') + +const common = require('../common-tap.js') + +common.skipIfWindows('this is a unix-only thing') + +const errorMessage = require('../../lib/utils/error-message.js') + +t.plan(1) + +npm.load({ cache: common.cache }, () => { + npm.config.set('cache', common.cache) + const er = new Error('access is e, i am afraid') + er.code = 'EACCES' + er.errno = -13 + er.path = common.cache + '/src' + er.dest = common.cache + '/to' + + t.match(errorMessage(er), { + summary: [ + [ + '', + new RegExp('\n' + + 'Your cache folder contains root-owned files, due to a bug in\n' + + 'previous versions of npm which has since been addressed.\n' + + '\n' + + 'To permanently fix this problem, please run:\n' + + ' sudo chown -R [0-9]+:[0-9]+ ".*npm_cache_cache-eacces-error-message"' + ) + ] + ], + detail: [] + }, 'get the helpful error message') +}) diff --git a/test/tap/cache-shasum-fork.js b/test/tap/cache-shasum-fork.js index 1e92d437560d7..fade5ffb646c5 100644 --- a/test/tap/cache-shasum-fork.js +++ b/test/tap/cache-shasum-fork.js @@ -3,8 +3,6 @@ var path = require('path') var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') @@ -14,12 +12,13 @@ var common = require('../common-tap.js') var forkPath = path.resolve( __dirname, '..', 'fixtures', 'forked-underscore-1.5.1.tgz' ) -var pkg = path.resolve(__dirname, 'cache-shasum-fork') -var cache = path.join(pkg, 'cache') +var pkg = common.pkg +var cache = common.cache var server test('setup', function (t) { - setup() + mkdirp.sync(path.join(pkg, 'node_modules')) + process.chdir(pkg) t.comment('test for https://github.com/npm/npm/issues/3265') mr({ port: common.port }, function (er, s) { server = s @@ -28,7 +27,6 @@ test('setup', function (t) { }) test('npm cache - install from fork', function (t) { - setup() common.npm( [ '--loglevel', 'silent', @@ -42,7 +40,7 @@ test('npm cache - install from fork', function (t) { }, function (err, code, stdout, stderr) { t.ifErr(err, 'install finished without error') - t.notOk(stderr, 'Should not get data on stderr: ' + stderr) + t.equal(stderr, '', 'Should not get data on stderr') t.equal(code, 0, 'install finished successfully') var deps = {} @@ -60,7 +58,6 @@ test('npm cache - install from fork', function (t) { // Now install the real 1.5.1. test('npm cache - install from origin', function (t) { - setup() common.npm( [ '--loglevel', 'silent', @@ -91,17 +88,5 @@ test('npm cache - install from origin', function (t) { test('cleanup', function (t) { server.close() - cleanup() t.end() }) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} - -function setup () { - mkdirp.sync(cache) - mkdirp.sync(path.join(pkg, 'node_modules')) - process.chdir(pkg) -} diff --git a/test/tap/check-cpu-reqs.js b/test/tap/check-cpu-reqs.js index 4d8c3dc6551c4..d70660b05887f 100644 --- a/test/tap/check-cpu-reqs.js +++ b/test/tap/check-cpu-reqs.js @@ -2,12 +2,10 @@ var path = require('path') var fs = require('fs') var test = require('tap').test -var osenv = require('osenv') var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var common = require('../common-tap.js') -var base = path.join(__dirname, path.basename(__filename, '.js')) +var base = common.pkg var installFrom = path.join(base, 'from') var installIn = path.join(base, 'in') @@ -41,18 +39,7 @@ test('force install bad cpu', function (t) { }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(base) -} - function setup () { - cleanup() mkdirp.sync(path.resolve(installFrom, 'node_modules')) fs.writeFileSync( path.join(installFrom, 'package.json'), diff --git a/test/tap/check-engine-reqs.js b/test/tap/check-engine-reqs.js index 8dd9b8231fb5e..eec07562885c9 100644 --- a/test/tap/check-engine-reqs.js +++ b/test/tap/check-engine-reqs.js @@ -2,12 +2,10 @@ var path = require('path') var fs = require('fs') var test = require('tap').test -var osenv = require('osenv') var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var common = require('../common-tap.js') -var base = path.join(__dirname, path.basename(__filename, '.js')) +var base = common.pkg var installFrom = path.join(base, 'from') var installIn = path.join(base, 'in') @@ -27,7 +25,6 @@ test('setup', function (t) { var INSTALL_OPTS = ['--loglevel', 'silly'] var EXEC_OPTS = {cwd: installIn} - test('install bad engine', function (t) { common.npm(['install', '--engine-strict', installFrom].concat(INSTALL_OPTS), EXEC_OPTS, function (err, code) { t.ifError(err, 'npm ran without issue') @@ -43,18 +40,19 @@ test('force install bad engine', function (t) { }) }) -test('cleanup', function (t) { - cleanup() - t.end() +test('warns on bad engine not strict', function (t) { + common.npm(['install', '--json', installFrom], EXEC_OPTS, function (err, code, stdout, stderr) { + t.ifError(err, 'npm ran without issue') + t.is(code, 0, 'result code') + var result = JSON.parse(stdout) + t.match(result.warnings[0], /Unsupported engine/, 'reason for optional failure in JSON') + t.match(result.warnings[0], /1.0.0-not-a-real-version/, 'should print mismatch version info') + t.match(result.warnings[0], /Not compatible with your version of node/, 'incompatibility message') + t.done() + }) }) -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(base) -} - function setup () { - cleanup() mkdirp.sync(path.resolve(installFrom, 'node_modules')) fs.writeFileSync( path.join(installFrom, 'package.json'), diff --git a/test/tap/check-install-self.js b/test/tap/check-install-self.js index 2cde1606a91e0..63901a12df671 100644 --- a/test/tap/check-install-self.js +++ b/test/tap/check-install-self.js @@ -2,12 +2,10 @@ var path = require('path') var fs = require('fs') var test = require('tap').test -var osenv = require('osenv') var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var common = require('../common-tap.js') -var base = path.join(__dirname, path.basename(__filename, '.js')) +var base = common.pkg var installFrom = path.join(base, 'from') var installIn = path.join(base, 'in') @@ -43,18 +41,7 @@ test('force install self', function (t) { }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(base) -} - function setup () { - cleanup() mkdirp.sync(path.resolve(installFrom, 'node_modules')) fs.writeFileSync( path.join(installFrom, 'package.json'), diff --git a/test/tap/check-os-reqs.js b/test/tap/check-os-reqs.js index 5d0c1ecf78b07..66dcbd5328e06 100644 --- a/test/tap/check-os-reqs.js +++ b/test/tap/check-os-reqs.js @@ -2,12 +2,10 @@ var path = require('path') var fs = require('fs') var test = require('tap').test -var osenv = require('osenv') var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var common = require('../common-tap.js') -var base = path.join(__dirname, path.basename(__filename, '.js')) +var base = common.pkg var installFrom = path.join(base, 'from') var installIn = path.join(base, 'in') @@ -41,18 +39,7 @@ test('force install bad os', function (t) { }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(base) -} - function setup () { - cleanup() mkdirp.sync(path.resolve(installFrom, 'node_modules')) fs.writeFileSync( path.join(installFrom, 'package.json'), diff --git a/test/tap/check-permissions.js b/test/tap/check-permissions.js index 0a5f2e038adf1..b8238891d9075 100644 --- a/test/tap/check-permissions.js +++ b/test/tap/check-permissions.js @@ -8,7 +8,8 @@ var writableFallback = require('../../lib/install/writable.js').fsOpenImplementa var exists = require('../../lib/install/exists.js').fsAccessImplementation var existsFallback = require('../../lib/install/exists.js').fsStatImplementation -var testBase = path.resolve(__dirname, 'check-permissions') +const common = require('../common-tap.js') +var testBase = common.pkg var existingDir = path.resolve(testBase, 'exists') var nonExistingDir = path.resolve(testBase, 'does-not-exist') var writableDir = path.resolve(testBase, 'writable') @@ -78,13 +79,14 @@ function writableTests (t, writable) { writable(writableDir, function (er) { t.error(er, 'writable dir is writable') }) - if (process.platform !== 'win32') { - // Windows folders cannot be set to be read-only. + if (process.platform === 'win32') { + t.pass('windows folders cannot be read-only') + } else if (process.getuid && process.getuid() === 0) { + t.pass('root is not blocked by read-only dirs') + } else { writable(nonWritableDir, function (er) { t.ok(er, 'non-writable dir resulted in an error') }) - } else { - t.pass('windows folders cannot be read-only') } } diff --git a/test/tap/ci-header.js b/test/tap/ci-header.js index dc20cc53c305a..fc791c6e6710d 100644 --- a/test/tap/ci-header.js +++ b/test/tap/ci-header.js @@ -8,9 +8,9 @@ var Dir = Tacks.Dir var chain = require('slide').chain var common = require('../common-tap.js') -var basedir = path.join(__dirname, path.basename(__filename, '.js')) +var basedir = common.pkg var testdir = path.join(basedir, 'testdir') -var cachedir = path.join(basedir, 'cache') +var cachedir = common.cache var globaldir = path.join(basedir, 'global') var tmpdir = path.join(basedir, 'tmp') diff --git a/test/tap/ci-permissions.js b/test/tap/ci-permissions.js new file mode 100644 index 0000000000000..c73d464236540 --- /dev/null +++ b/test/tap/ci-permissions.js @@ -0,0 +1,53 @@ +const t = require('tap') +const tar = require('tar') +const common = require('../common-tap.js') +const pkg = common.pkg +const rimraf = require('rimraf') +const { writeFileSync, statSync, chmodSync } = require('fs') +const { resolve } = require('path') +const mkdirp = require('mkdirp') + +t.test('setup', t => { + mkdirp.sync(resolve(pkg, 'package')) + const pj = resolve(pkg, 'package', 'package.json') + writeFileSync(pj, JSON.stringify({ + name: 'foo', + version: '1.2.3' + })) + chmodSync(pj, 0o640) + tar.c({ + sync: true, + file: resolve(pkg, 'foo.tgz'), + gzip: true, + cwd: pkg + }, ['package']) + writeFileSync(resolve(pkg, 'package.json'), JSON.stringify({ + name: 'root', + version: '1.2.3', + dependencies: { + foo: 'file:foo.tgz' + } + })) + t.end() +}) + +t.test('run install to generate package-lock', t => + common.npm(['install'], { cwd: pkg }).then(([code]) => t.equal(code, 0))) + +t.test('remove node_modules', t => rimraf(resolve(pkg, 'node_modules'), t.end)) + +t.test('run ci and check modes', t => + common.npm(['ci'], { cwd: pkg, stdio: 'inherit' }).then(([code]) => { + t.equal(code, 0) + const file = resolve(pkg, 'node_modules', 'foo', 'package.json') + // bitwise AND against 0o705 so that we can detect whether + // the file is world-readable. + // Typical unix systems would leave the file 0o644 + // Travis-ci and some other Linux systems will be 0o664 + // Windows is 0o666 + // The regression this is detecting (ie, the default in the tarball) + // leaves the file as 0o640. + // Bitwise-AND 0o705 should always result in 0o604, and never 0o600 + const mode = statSync(file).mode & 0o705 + t.equal(mode, 0o604) + })) diff --git a/test/tap/ci-with-local-dependency.js b/test/tap/ci-with-local-dependency.js new file mode 100644 index 0000000000000..376dc97818153 --- /dev/null +++ b/test/tap/ci-with-local-dependency.js @@ -0,0 +1,82 @@ +const fs = require('graceful-fs') +const path = require('path') + +const mkdirp = require('mkdirp') +const t = require('tap') + +const common = require('../common-tap.js') + +const pkg = common.pkg + '/package' + +const EXEC_OPTS = { + cwd: pkg, + stdio: [0, 1, 2], + env: common.newEnv().extend({ + npm_config_registry: common.registry + }) +} + +const localDependencyJson = { + name: 'local-dependency', + version: '0.0.0', + dependencies: { + 'test-package': '0.0.0' + } +} + +const dependentJson = { + name: 'dependent', + version: '0.0.0', + dependencies: { + 'local-dependency': '../local-dependency' + } +} + +const target = path.resolve(pkg, '../local-dependency') +const mr = require('npm-registry-mock') +let server +t.teardown(() => { + if (server) { + server.close() + } +}) + +t.test('setup', function (t) { + mkdirp.sync(target) + fs.writeFileSync( + path.join(target, 'package.json'), + JSON.stringify(localDependencyJson, null, 2) + ) + mkdirp.sync(pkg) + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(dependentJson, null, 2) + ) + mr({ port: common.port }, (er, s) => { + if (er) { + throw er + } + server = s + t.end() + }) +}) + +t.test('\'npm install\' should install local pkg from sub path', function (t) { + common.npm(['install', '--loglevel=silent'], EXEC_OPTS, function (err, code) { + if (err) throw err + t.equal(code, 0, 'npm install exited with code') + t.ok(fs.statSync(path.resolve(pkg, 'node_modules/local-dependency/package.json')).isFile(), 'local dependency package.json exists') + t.ok(fs.statSync(path.resolve(pkg, 'node_modules/local-dependency/node_modules/test-package')).isDirectory(), 'transitive dependency installed') + t.end() + }) +}) + +t.test('\'npm ci\' should work', function (t) { + common.npm(['ci', '--loglevel=silent'], EXEC_OPTS, function (err, code) { + if (err) throw err + t.equal(code, 0, 'npm install exited with code') + t.ok(fs.statSync(path.resolve(pkg, 'node_modules/local-dependency/package.json')).isFile(), 'local dependency package.json exists') + t.ok(fs.statSync(path.resolve(pkg, 'node_modules/local-dependency/node_modules/test-package')).isDirectory(), 'transitive dependency installed') + t.end() + }) +}) diff --git a/test/tap/ci.js b/test/tap/ci.js index 9d69e3e5eb524..e1c2d73b7a43d 100644 --- a/test/tap/ci.js +++ b/test/tap/ci.js @@ -12,13 +12,20 @@ const test = require('tap').test const Dir = Tacks.Dir const File = Tacks.File -const testDir = path.join(__dirname, 'ci') +const cacheDir = common.cache +const testDir = common.pkg -const EXEC_OPTS = { cwd: testDir } +const EXEC_OPTS = { + cwd: testDir, + nodeExecPath: process.execPath +} const PKG = { name: 'top', version: '1.2.3', + scripts: { + install: 'node -p process.env.npm_config_foo' + }, dependencies: { optimist: '0.6.0', clean: '2.1.6' @@ -44,25 +51,27 @@ test('setup', () => { const fixture = new Tacks(Dir({ 'package.json': File(PKG) })) - fixture.create(testDir) - return mr({port: common.port}) + return Promise.all([rimraf(cacheDir), rimraf(testDir)]).then(() => { + fixture.create(testDir) + return mr({port: common.port}) + }) .then((server) => { SERVER = server return common.npm([ 'install', '--registry', common.registry ], EXEC_OPTS) - .then(() => fs.readFileAsync( - path.join(testDir, 'package-lock.json'), - 'utf8') - ) - .then((lock) => { - RAW_LOCKFILE = lock - }) - .then(() => common.npm(['ls', '--json'], EXEC_OPTS)) - .then((ret) => { - TREE = scrubFrom(JSON.parse(ret[1])) - }) + }) + .then(() => fs.readFileAsync( + path.join(testDir, 'package-lock.json'), + 'utf8') + ) + .then((lock) => { + RAW_LOCKFILE = lock + }) + .then(() => common.npm(['ls', '--json'], EXEC_OPTS)) + .then((ret) => { + TREE = scrubFrom(JSON.parse(ret[1])) }) }) @@ -75,6 +84,7 @@ test('basic installation', (t) => { .then(() => fixture.create(testDir)) .then(() => common.npm([ 'ci', + '--foo=asdf', '--registry', common.registry, '--loglevel', 'warn' ], EXEC_OPTS)) @@ -86,7 +96,7 @@ test('basic installation', (t) => { t.equal(stderr.trim(), '', 'no output on stderr') t.match( stdout.trim(), - /^added 6 packages in \d+(?:\.\d+)?s$/, + /\nasdf\nadded 6 packages in \d+(?:\.\d+)?s$/, 'no warnings on stderr, and final output has right number of packages' ) return fs.readdirAsync(path.join(testDir, 'node_modules')) @@ -142,6 +152,7 @@ test('supports npm-shrinkwrap.json as well', (t) => { .then(() => fixture.create(testDir)) .then(() => common.npm([ 'ci', + '--foo=asdf', '--registry', common.registry, '--loglevel', 'warn' ], EXEC_OPTS)) @@ -153,7 +164,7 @@ test('supports npm-shrinkwrap.json as well', (t) => { t.equal(stderr.trim(), '', 'no output on stderr') t.match( stdout.trim(), - /^added 6 packages in \d+(?:\.\d+)?s$/, + /\nasdf\nadded 6 packages in \d+(?:\.\d+)?s$/, 'no warnings on stderr, and final output has right number of packages' ) }) @@ -192,6 +203,7 @@ test('removes existing node_modules/ before installing', (t) => { .then(() => fixture.create(testDir)) .then(() => common.npm([ 'ci', + '--foo=asdf', '--registry', common.registry, '--loglevel', 'warn' ], EXEC_OPTS)) @@ -230,6 +242,7 @@ test('errors if package-lock.json missing', (t) => { .then(() => fixture.create(testDir)) .then(() => common.npm([ 'ci', + '--foo=asdf', '--registry', common.registry, '--loglevel', 'warn' ], EXEC_OPTS)) @@ -266,6 +279,7 @@ test('errors if package-lock.json invalid', (t) => { .then(() => fixture.create(testDir)) .then(() => common.npm([ 'ci', + '--foo=asdf', '--registry', common.registry, '--loglevel', 'warn' ], EXEC_OPTS)) @@ -296,7 +310,35 @@ test('errors if package-lock.json invalid', (t) => { ) }) +test('correct cache location when using cache config', (t) => { + const fixture = new Tacks(Dir({ + 'package.json': File(PKG), + 'package-lock.json': File(RAW_LOCKFILE) + })) + return Promise.all([rimraf(cacheDir), rimraf(testDir)]) + .then(() => fixture.create(cacheDir)) + .then(() => fixture.create(testDir)) + .then(() => common.npm([ + 'ci', + `--cache=${cacheDir}`, + '--foo=asdf', + '--registry', common.registry, + '--loglevel', 'warn' + ], EXEC_OPTS)) + .then((ret) => { + const code = ret[0] + const stderr = ret[2] + t.equal(code, 0, 'command completed without error') + t.equal(stderr.trim(), '', 'no output on stderr') + return fs.readdirAsync(path.join(cacheDir, '_cacache')) + }) + .then((modules) => { + t.ok(modules, 'should create _cacache folder') + t.end() + }) +}) + test('cleanup', () => { SERVER.close() - return rimraf(testDir) + return Promise.all([rimraf(cacheDir), rimraf(testDir)]) }) diff --git a/test/tap/circular-dep.js b/test/tap/circular-dep.js index d7f66eacef1ac..f7e018d02346c 100644 --- a/test/tap/circular-dep.js +++ b/test/tap/circular-dep.js @@ -4,19 +4,17 @@ var existsSync = fs.existsSync || path.existsSync var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') var server -var pkg = path.resolve(__dirname, 'circular-dep') +var pkg = common.pkg var minimist = path.join(pkg, 'minimist') var EXEC_OPTS = { cwd: path.join(pkg, 'minimist/node_modules'), - npm_config_cache: path.join(pkg, 'cache') + npm_config_cache: common.cache } var json = { @@ -75,13 +73,11 @@ test('installing a package that depends on the current package', function (t) { }) test('cleanup', function (t) { - cleanup() server.close() t.end() }) function setup (cb) { - cleanup() mkdirp.sync(minimist) fs.writeFileSync( path.join(minimist, 'package.json'), @@ -95,8 +91,3 @@ function setup (cb) { cb() }) } - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/config-basic.js b/test/tap/config-basic.js index 0483695cf5f25..a95e94db29fd6 100644 --- a/test/tap/config-basic.js +++ b/test/tap/config-basic.js @@ -1,6 +1,6 @@ var test = require('tap').test +var common = require('../common-config.js') var npmconf = require('../../lib/config/core.js') -var common = require('./00-config-setup.js') var path = require('path') var projectData = { @@ -58,15 +58,7 @@ var expectSources = { } function isDeeplyDetails (t, aa, bb, msg, seen) { - if (aa == null && bb == null) return t.pass(msg) - if (typeof bb !== 'object') return t.is(aa, bb, msg) - if (!seen) seen = [] - for (var kk in seen) if (seen[kk] === aa || seen[kk] === bb) return - seen.push(aa, bb) - t.is(Object.keys(aa).length, Object.keys(bb).length, msg + ': # of elements') - Object.keys(bb).forEach(function (key) { - isDeeplyDetails(t, aa[key], bb[key], msg + ' -> ' + key, seen) - }) + return t.same(aa, bb, msg) } test('no builtin', function (t) { diff --git a/test/tap/config-builtin.js b/test/tap/config-builtin.js index 885d099a1fc30..713522d6ccfa2 100644 --- a/test/tap/config-builtin.js +++ b/test/tap/config-builtin.js @@ -1,6 +1,6 @@ var test = require('tap').test var npmconf = require('../../lib/config/core.js') -var common = require('./00-config-setup.js') +var common = require('../common-config.js') var path = require('path') var ucData = common.ucData diff --git a/test/tap/config-certfile.js b/test/tap/config-certfile.js index 4b960672ad79e..904a8c0147718 100644 --- a/test/tap/config-certfile.js +++ b/test/tap/config-certfile.js @@ -1,4 +1,4 @@ -require('./00-config-setup.js') +require('../common-config.js') var path = require('path') var fs = require('fs') diff --git a/test/tap/config-credentials.js b/test/tap/config-credentials.js index f6f00ee0e6518..d8f6770666e2d 100644 --- a/test/tap/config-credentials.js +++ b/test/tap/config-credentials.js @@ -1,7 +1,7 @@ var test = require('tap').test var npmconf = require('../../lib/config/core.js') -var common = require('./00-config-setup.js') +var common = require('../common-config.js') var URI = 'https://registry.lvh.me:8661/' diff --git a/test/tap/config-edit.js b/test/tap/config-edit.js index 7d6eb2588d48b..a4f78a262c8b9 100644 --- a/test/tap/config-edit.js +++ b/test/tap/config-edit.js @@ -6,7 +6,7 @@ var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'npm-global-edit') +var pkg = common.pkg var editorSrc = function () { /* #!/usr/bin/env node diff --git a/test/tap/config-envReplace.js b/test/tap/config-envReplace.js index 0b4f628d67339..f65609f050f75 100644 --- a/test/tap/config-envReplace.js +++ b/test/tap/config-envReplace.js @@ -8,7 +8,8 @@ const ini = require('ini') const test = require('tap').test const npmconf = require('../../lib/config/core.js') -const packagePath = path.resolve(__dirname, 'config-envReplace') +const common = require('../common-tap.js') +const packagePath = common.pkg const packageJsonFile = JSON.stringify({ name: 'config-envReplace' diff --git a/test/tap/config-list.js b/test/tap/config-list.js index 05402d4bbe6ee..7559214a727a5 100644 --- a/test/tap/config-list.js +++ b/test/tap/config-list.js @@ -5,7 +5,7 @@ var rimraf = require('rimraf') var mkdirp = require('mkdirp') var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'config-list') +var pkg = common.pkg var opts = { cwd: pkg, env: common.emptyEnv() } var npmrc = path.resolve(pkg, '.npmrc') var npmrcContents = ` diff --git a/test/tap/config-malformed.js b/test/tap/config-malformed.js index 4f74ea5387e59..0d859e00340f3 100644 --- a/test/tap/config-malformed.js +++ b/test/tap/config-malformed.js @@ -1,7 +1,7 @@ var test = require('tap').test var npmconf = require('../../lib/config/core.js') -var common = require('./00-config-setup.js') +var common = require('../common-config.js') test('with malformed', function (t) { npmconf.load({}, common.malformed, function (er, conf) { diff --git a/test/tap/config-meta.js b/test/tap/config-meta.js index 735c161fb87e2..a98d5e6c4dabd 100644 --- a/test/tap/config-meta.js +++ b/test/tap/config-meta.js @@ -11,7 +11,7 @@ var root = path.resolve(__dirname, '..', '..') var lib = path.resolve(root, 'lib') var bin = path.resolve(root, 'bin') var nm = path.resolve(root, 'node_modules') -var doc = path.resolve(root, 'doc/misc/npm-config.md') +var doc = path.resolve(root, 'docs/content/using-npm/config.md') var FILES = [] var CONFS = {} var DOC = {} @@ -85,12 +85,12 @@ test('get lines', function (t) { test('get docs', function (t) { var d = fs.readFileSync(doc, 'utf8').split(/\r|\n/) // walk down until the '## Config Settings' section - for (var i = 0; i < d.length && d[i] !== '## Config Settings'; i++); + for (var i = 0; i < d.length && d[i] !== '### Config Settings'; i++); i++ // now gather up all the ^###\s lines until the next ^##\s - for (; i < d.length && !d[i].match(/^## /); i++) { - if (d[i].match(/^### /)) { - DOC[ d[i].replace(/^### /, '').trim() ] = true + for (; i < d.length && !d[i].match(/^### /); i++) { + if (d[i].match(/^#### /)) { + DOC[ d[i].replace(/^#### /, '').trim() ] = true } } t.pass('read the docs') @@ -110,25 +110,26 @@ test('check configs', function (t) { } } - for (var c2 in DOC) { - if (c2 !== 'versions' && c2 !== 'version' && c2 !== 'init.version' && c2 !== 'ham-it-up') { - t.ok(CONFS[c2], 'config in doc should be used somewhere ' + c2) - t.ok(types.indexOf(c2) !== -1, 'should be defined in npmconf ' + c2) - t.ok(defaults.indexOf(c2) !== -1, 'should have default in npmconf ' + c2) - } - } + // TODO - needs better figgy-pudding introspection + // for (var c2 in DOC) { + // if (c2 !== 'versions' && c2 !== 'version' && c2 !== 'init.version' && c2 !== 'ham-it-up') { + // t.ok(CONFS[c2], 'config in doc should be used somewhere ' + c2) + // t.ok(types.indexOf(c2) !== -1, 'should be defined in npmconf ' + c2) + // t.ok(defaults.indexOf(c2) !== -1, 'should have default in npmconf ' + c2) + // } + // } types.forEach(function (c) { if (!c.match(/^_/) && c !== 'argv' && !c.match(/^versions?$/) && c !== 'ham-it-up') { t.ok(DOC[c], 'defined type should be documented ' + c) - t.ok(CONFS[c], 'defined type should be used ' + c) + // t.ok(CONFS[c], 'defined type should be used ' + c) } }) defaults.forEach(function (c) { if (!c.match(/^_/) && c !== 'argv' && !c.match(/^versions?$/) && c !== 'ham-it-up') { t.ok(DOC[c], 'defaulted type should be documented ' + c) - t.ok(CONFS[c], 'defaulted type should be used ' + c) + // t.ok(CONFS[c], 'defaulted type should be used ' + c) } }) diff --git a/test/tap/config-new-cafile.js b/test/tap/config-new-cafile.js index 9cffb19008ed4..e4cc65ec747a6 100644 --- a/test/tap/config-new-cafile.js +++ b/test/tap/config-new-cafile.js @@ -1,24 +1,20 @@ -require('./00-config-setup.js') +const common = require('../common-tap.js') var path = require('path') var fs = require('graceful-fs') var test = require('tap').test -var mkdirp = require('mkdirp') var rimraf = require('rimraf') -var osenv = require('osenv') var npmconf = require('../../lib/config/core.js') -var dir = path.resolve(__dirname, 'config-new-cafile') +var dir = common.pkg var beep = path.resolve(dir, 'beep.pem') - -test('setup', function (t) { - bootstrap() - t.end() -}) +var npmrc = path.resolve(dir, 'npmrc') test('can set new cafile when old is gone', function (t) { t.plan(5) - npmconf.load(function (error, conf) { + fs.writeFileSync(npmrc, '') + fs.writeFileSync(beep, '') + npmconf.load({ userconfig: npmrc }, function (error, conf) { npmconf.loaded = false t.ifError(error) conf.set('cafile', beep, 'user') @@ -26,7 +22,7 @@ test('can set new cafile when old is gone', function (t) { t.ifError(error) t.equal(conf.get('cafile'), beep) rimraf.sync(beep) - npmconf.load(function (error, conf) { + npmconf.load({ userconfig: npmrc }, function (error, conf) { if (error) { throw error } @@ -39,18 +35,3 @@ test('can set new cafile when old is gone', function (t) { }) }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function bootstrap () { - mkdirp.sync(dir) - fs.writeFileSync(beep, '') -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(dir) -} diff --git a/test/tap/config-private.js b/test/tap/config-private.js index 91582921e6dff..e6bf9d2e83cee 100644 --- a/test/tap/config-private.js +++ b/test/tap/config-private.js @@ -5,7 +5,7 @@ var rimraf = require('rimraf') var mkdirp = require('mkdirp') var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'config-private') +var pkg = common.pkg var opts = { cwd: pkg } test('setup', function (t) { diff --git a/test/tap/config-project.js b/test/tap/config-project.js index b9e8b7ac04a2e..0f7be913c097f 100644 --- a/test/tap/config-project.js +++ b/test/tap/config-project.js @@ -3,7 +3,7 @@ var path = require('path') var fix = path.resolve(__dirname, '..', 'fixtures', 'config') var projectRc = path.resolve(fix, '.npmrc') var npmconf = require('../../lib/config/core.js') -var common = require('./00-config-setup.js') +var common = require('../common-config.js') var projectData = { just: 'testing' } diff --git a/test/tap/config-save.js b/test/tap/config-save.js index a7f85d6015e4b..23dc7b5960785 100644 --- a/test/tap/config-save.js +++ b/test/tap/config-save.js @@ -1,13 +1,15 @@ var fs = require('fs') var ini = require('ini') -var test = require('tap').test +var t = require('tap') +const test = t.test +var common = require('../common-config.js') +var commonTap = require('../common-tap.js') var npmconf = require('../../lib/config/core.js') -var common = require('./00-config-setup.js') var expectConf = [ 'globalconfig = ' + common.globalconfig, 'email = i@izs.me', - 'env-thing = asdf', + 'env-thing = foo', 'init.author.name = Isaac Z. Schlueter', 'init.author.email = i@izs.me', 'init.author.url = http://blog.izs.me/', @@ -32,7 +34,7 @@ var expectConf = [ var expectFile = [ 'globalconfig = ' + common.globalconfig, 'email = i@izs.me', - 'env-thing = asdf', + 'env-thing = foo', 'init.author.name = Isaac Z. Schlueter', 'init.author.email = i@izs.me', 'init.author.url = http://blog.izs.me/', @@ -54,6 +56,10 @@ var expectFile = [ '' ].join('\n') +const userconfig = commonTap.pkg + '/userconfig' +fs.writeFileSync(userconfig, fs.readFileSync(common.userconfig)) +process.env.npm_config_userconfig = userconfig + test('saving configs', function (t) { npmconf.load(function (er, conf) { if (er) throw er diff --git a/test/tap/correct-mkdir.js b/test/tap/correct-mkdir.js index 2c93f943ad5c4..175fb34a003bc 100644 --- a/test/tap/correct-mkdir.js +++ b/test/tap/correct-mkdir.js @@ -1,18 +1,20 @@ /* eslint-disable camelcase */ -var test = require('tap').test +var t = require('tap') +var test = t.test var assert = require('assert') -var path = require('path') var requireInject = require('require-inject') -var cache_dir = path.resolve(__dirname, 'correct-mkdir') +const common = require('../common-tap.js') +var cache_dir = common.pkg +common.skipIfWindows('windows does not use correct-mkdir behavior') test('correct-mkdir: no race conditions', function (t) { var mock_fs = {} var did_hook = false - mock_fs.stat = function (path, cb) { + mock_fs.lstat = function (path, cb) { if (path === cache_dir) { // Return a non-matching owner cb(null, { - uid: +process.uid + 1, + uid: +process.getuid() + 1, isDirectory: function () { return true } @@ -35,7 +37,8 @@ test('correct-mkdir: no race conditions', function (t) { } var mocks = { 'graceful-fs': mock_fs, - 'chownr': mock_chownr + 'chownr': mock_chownr, + 'infer-owner': requireInject('infer-owner', { fs: mock_fs }) } var correctMkdir = requireInject('../../lib/utils/correct-mkdir.js', mocks) @@ -60,7 +63,7 @@ test('correct-mkdir: no race conditions', function (t) { test('correct-mkdir: ignore ENOENTs from chownr', function (t) { var mock_fs = {} - mock_fs.stat = function (path, cb) { + mock_fs.lstat = function (path, cb) { if (path === cache_dir) { cb(null, { isDirectory: function () { @@ -99,7 +102,7 @@ test('correct-mkdir: SUDO_UID and SUDO_GID non-Windows', function (t) { process.getuid = function () { return 0 } process.getgid = function () { return 0 } var mock_fs = {} - mock_fs.stat = function (path, cb) { + mock_fs.lstat = function (path, cb) { if (path === cache_dir) { cb(null, { uid: 0, @@ -134,7 +137,7 @@ test('correct-mkdir: SUDO_UID and SUDO_GID Windows', function (t) { delete process.getuid delete process.getgid var mock_fs = {} - mock_fs.stat = function (path, cb) { + mock_fs.lstat = function (path, cb) { if (path === cache_dir) { cb(null, { uid: 0, diff --git a/test/tap/cruft-test.js b/test/tap/cruft-test.js index 0cbcc6f56c61d..4f2ed6c0d0a71 100644 --- a/test/tap/cruft-test.js +++ b/test/tap/cruft-test.js @@ -6,7 +6,7 @@ var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var base = path.join(__dirname, path.basename(__filename, '.js')) +var base = common.pkg var cruft = path.join(base, 'node_modules', 'cruuuft') var pkg = { name: 'example', diff --git a/test/tap/debug-logs.js b/test/tap/debug-logs.js index a6997352636fe..4da52bfcee8ae 100644 --- a/test/tap/debug-logs.js +++ b/test/tap/debug-logs.js @@ -8,16 +8,15 @@ var File = Tacks.File var Dir = Tacks.Dir var common = require('../common-tap.js') -var basedir = path.join(__dirname, path.basename(__filename, '.js')) +var basedir = common.pkg var testdir = path.join(basedir, 'testdir') -var cachedir = path.join(basedir, 'cache') +var cachedir = common.cache var globaldir = path.join(basedir, 'global') var tmpdir = path.join(basedir, 'tmp') var conf = { cwd: testdir, env: Object.assign({}, process.env, { - npm_config_cache: cachedir, npm_config_tmp: tmpdir, npm_config_prefix: globaldir, npm_config_registry: common.registry, @@ -26,7 +25,6 @@ var conf = { } var fixture = new Tacks(Dir({ - cache: Dir(), global: Dir(), tmp: Dir(), testdir: Dir({ @@ -41,17 +39,8 @@ var fixture = new Tacks(Dir({ }) })) -function setup () { - cleanup() - fixture.create(basedir) -} - -function cleanup () { - fixture.remove(basedir) -} - test('setup', function (t) { - setup() + fixture.create(basedir) t.done() }) @@ -59,23 +48,36 @@ test('example', function (t) { common.npm(['run', 'false'], conf, function (err, code, stdout, stderr) { if (err) throw err t.is(code, 1, 'command errored') - var matches = stderr.match(/A complete log of this run can be found in:.*\nnpm ERR! {5,5}(.*)/) - t.ok(matches, 'debug log mentioned in error message') + const re = /A complete log of this run can be found in:.*\nnpm ERR! {5,5}(.*)/ + const matches = stderr.match(re) + t.match(stderr, re, 'debug log mentioned in error message') if (matches) { var logfile = matches[1] t.matches(path.relative(cachedir, logfile), /^_logs/, 'debug log is inside the cache in _logs') } - // we run a bunch concurrently, this will actually create > than our limit as the check is done - // when the command starts - var todo = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] + // we run a bunch concurrently, this will actually create > than our limit + // as the check is done when the command starts + // + // It has to be > the log count (10) but also significantly higher than + // the number of cores on the machine, or else some might be able to get + // to the log folder pruning logic in parallel, resulting in FEWER files + // than we expect being present at the end. + var procCount = Math.max(require('os').cpus().length * 2, 12) + var todo = new Array(procCount).join(',').split(',').map((v, k) => k) asyncMap(todo, function (num, next) { + // another way would be to just simulate this? + // var f = path.join(cachedir, '_logs', num + '-debug.log') + // require('fs').writeFile(f, 'log ' + num, next) common.npm(['run', '--logs-max=10', 'false'], conf, function (err, code) { if (err) throw err t.is(code, 1, 'run #' + num + ' errored as expected') next() }) }, function () { + var files = glob.sync(path.join(cachedir, '_logs', '*')) + t.ok(files.length > 10, 'there should be more than 10 log files') + // now we do one more and that should clean up the list common.npm(['run', '--logs-max=10', 'false'], conf, function (err, code) { if (err) throw err @@ -93,8 +95,3 @@ test('example', function (t) { }) }) }) - -test('cleanup', function (t) { - cleanup() - t.done() -}) diff --git a/test/tap/dedupe-git-semver.js b/test/tap/dedupe-git-semver.js new file mode 100644 index 0000000000000..b7e31b08614f0 --- /dev/null +++ b/test/tap/dedupe-git-semver.js @@ -0,0 +1,138 @@ +'use strict' +const fs = require('fs') +const path = require('path') +const test = require('tap').test +const requireInject = require('require-inject') +const Tacks = require('tacks') +const File = Tacks.File +const Dir = Tacks.Dir + +const manifests = { + 'git-wrap': { + name: 'git-wrap', + version: '1.0.0', + dependencies: { + git: 'git+https://example.com/git#semver:1.0' + } + }, + git: { + name: 'git', + version: '1.0.0' + } +} + +const npm = requireInject.installGlobally('../../lib/npm.js', { + pacote: { + manifest: function (spec) { + const manifest = manifests[spec.name] + manifest._requested = spec + manifest._resolved = spec.saveSpec.replace(/^file:/, '').replace(/(:?#.*)$/, '#0000000000000000000000000000000000000000') + manifest._from = spec.rawSpec + return Promise.resolve(manifest) + } + }, + '../../lib/utils/output.js': function () { + // do not output to stdout + } +}) + +const common = require('../common-tap.js') +const basedir = common.pkg +const testdir = path.join(basedir, 'testdir') +const cachedir = common.cache +const tmpdir = path.join(basedir, 'tmp') + +const cwd = process.cwd() + +const conf = { + cache: cachedir, + tmp: tmpdir, + loglevel: 'silent', + 'package-lock-only': true +} + +const fixture = new Tacks(Dir({ + cache: Dir(), + global: Dir(), + tmp: Dir(), + testdir: Dir({ + 'package.json': File({ + name: 'fixture', + version: '1.0.0', + dependencies: { + git: 'git+https://example.com/git#semver:1', + 'git-wrap': 'file:git-wrap-1.0.0.tgz' + } + }), + // Tarball source: + // 'git-wrap': Dir({ + // 'package.json': File({ + // name: 'git-wrap', + // version: '1.0.0', + // dependencies: { + // git: 'git+https://example.com/git#semver:1.0' + // } + // }) + // }), + 'git-wrap-1.0.0.tgz': File(Buffer.from( + '1f8b0800000000000003ed8fcd0ac23010843df729423caaf9c13642df26' + + 'b44bad9a3434f107a4efeec68aa7de2c8898ef32cb0c3bec3a5d1d7503dc' + + '8dca0ebeb38b991142a83c27537e44ee30db164a48a994c01987a210a873' + + '1f32c5d907dde3299ff68cbf90b7fe08f78c106ab5015a12dab46173edb5' + + 'a3ebe85ea0f76d676320996062746b70606bb0550b1ea3b84f9e9baf82d5' + + '3e04e74bcee1a68d3b01ab3ac3d15f7a30d8586215c59d211bb26fff9e48' + + '2412ffcc034458283d00080000', + 'hex' + )) + }) +})) + +function setup () { + cleanup() + fixture.create(basedir) +} + +function cleanup () { + fixture.remove(basedir) +} + +test('setup', function (t) { + setup() + process.chdir(testdir) + npm.load(conf, function (err) { + if (err) throw err + t.done() + }) +}) + +test('dedupe matching git semver ranges', function (t) { + npm.commands.install(function (err) { + if (err) throw err + const packageLock = JSON.parse(fs.readFileSync('package-lock.json')) + t.same(packageLock, { + name: 'fixture', + version: '1.0.0', + lockfileVersion: 1, + requires: true, + dependencies: { + git: { + from: 'git+https://example.com/git#semver:1', + version: 'git+https://example.com/git#0000000000000000000000000000000000000000' + }, + 'git-wrap': { + version: 'file:git-wrap-1.0.0.tgz', + requires: { + git: 'git+https://example.com/git#semver:1' + } + } + } + }) + t.done() + }) +}) + +test('cleanup', function (t) { + process.chdir(cwd) + cleanup() + t.done() +}) diff --git a/test/tap/dedupe-scoped.js b/test/tap/dedupe-scoped.js index 9d5ff9ac4c003..957f67e141c40 100644 --- a/test/tap/dedupe-scoped.js +++ b/test/tap/dedupe-scoped.js @@ -6,7 +6,7 @@ var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = join(__dirname, 'dedupe-scoped') +var pkg = common.pkg var modules = join(pkg, 'node_modules') var EXEC_OPTS = { cwd: pkg } diff --git a/test/tap/dedupe.js b/test/tap/dedupe.js index b6c601cbad37e..109f81656237b 100644 --- a/test/tap/dedupe.js +++ b/test/tap/dedupe.js @@ -10,7 +10,7 @@ var test = require('tap').test var common = require('../common-tap.js') var server -var pkg = path.join(__dirname, 'dedupe') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } diff --git a/test/tap/dist-tag.js b/test/tap/dist-tag.js index 651639f32a5fe..a5cce5d2a0f93 100644 --- a/test/tap/dist-tag.js +++ b/test/tap/dist-tag.js @@ -7,7 +7,7 @@ var mr = require('npm-registry-mock') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'dist-tag') +var pkg = common.pkg var server var scoped = { @@ -20,10 +20,16 @@ function mocks (server) { server.get('/-/package/@scoped%2fpkg/dist-tags') .reply(200, { latest: '1.0.0', a: '0.0.1', b: '0.5.0' }) + server.get('/-/package/@scoped%2fpkg/dist-tags') + .reply(200, { latest: '1.0.0', a: '0.0.1', b: '0.5.0' }) + // ls named package server.get('/-/package/@scoped%2fanother/dist-tags') .reply(200, { latest: '2.0.0', a: '0.0.2', b: '0.6.0' }) + server.get('/-/package/@scoped%2fanother/dist-tags') + .reply(200, { latest: '2.0.0', a: '0.0.2', b: '0.6.0' }) + // add c server.get('/-/package/@scoped%2fanother/dist-tags') .reply(200, { latest: '2.0.0', a: '0.0.2', b: '0.6.0' }) @@ -40,6 +46,12 @@ function mocks (server) { server.delete('/-/package/@scoped%2fanother/dist-tags/c') .reply(200, { c: '7.7.7' }) + // using a scoped registry + server.get('/-/package/@scoped%2ffoo/dist-tags') + .reply(200, { latest: '2.0.0', a: '0.0.2', b: '0.6.0' }) + server.delete('/-/package/@scoped%2ffoo/dist-tags/b') + .reply(200, { b: '0.6.0' }) + // rm server.get('/-/package/@scoped%2fanother/dist-tags') .reply(200, { latest: '4.0.0' }) @@ -83,6 +95,25 @@ test('npm dist-tags ls in current package', function (t) { ) }) +test('npm dist-tags ls default in current package', function (t) { + common.npm( + [ + 'dist-tags', + '--registry', common.registry, + '--loglevel', 'silent' + ], + { cwd: pkg }, + function (er, code, stdout, stderr) { + t.ifError(er, 'npm access') + t.notOk(code, 'exited OK') + t.notOk(stderr, 'no error output') + t.equal(stdout, 'a: 0.0.1\nb: 0.5.0\nlatest: 1.0.0\n') + + t.end() + } + ) +}) + test('npm dist-tags ls on named package', function (t) { common.npm( [ @@ -103,6 +134,26 @@ test('npm dist-tags ls on named package', function (t) { ) }) +test('npm dist-tags ls default, named package', function (t) { + common.npm( + [ + 'dist-tags', + '@scoped/another', + '--registry', common.registry, + '--loglevel', 'silent' + ], + { cwd: pkg }, + function (er, code, stdout, stderr) { + t.ifError(er, 'npm access') + t.notOk(code, 'exited OK') + t.notOk(stderr, 'no error output') + t.equal(stdout, 'a: 0.0.2\nb: 0.6.0\nlatest: 2.0.0\n') + + t.end() + } + ) +}) + test('npm dist-tags add @scoped/another@7.7.7 c', function (t) { common.npm( [ @@ -187,6 +238,31 @@ test('npm dist-tags rm @scoped/another nonexistent', function (t) { ) }) +test('npm dist-tags rm with registry assigned to scope', function (t) { + fs.writeFileSync(path.resolve(pkg, '.npmrc'), ` +@scoped:registry=${common.registry} +${common.registry.replace(/^https?:/, '')}:_authToken=taken +`) + + common.npm( + [ + 'dist-tags', + 'rm', '@scoped/foo', 'b', + '--loglevel', 'silent', + '--userconfig', path.resolve(pkg, '.npmrc') + ], + { cwd: pkg }, + function (er, code, stdout, stderr) { + t.ifError(er, 'npm access') + t.notOk(code, 'exited OK') + t.notOk(stderr, 'no error output') + t.equal(stdout, '-b: @scoped/foo@0.6.0\n') + + t.end() + } + ) +}) + test('cleanup', function (t) { t.pass('cleaned up') rimraf.sync(pkg) diff --git a/test/tap/do-not-remove-other-bins.js b/test/tap/do-not-remove-other-bins.js index 6fec728d43bcc..a614043221f0d 100644 --- a/test/tap/do-not-remove-other-bins.js +++ b/test/tap/do-not-remove-other-bins.js @@ -3,13 +3,11 @@ var fs = require('fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap') -var base = path.resolve(__dirname, path.basename(__filename, '.js')) +var base = common.pkg var installPath = path.resolve(base, 'install') var installBin = path.resolve(installPath, 'node_modules', '.bin') var packageApath = path.resolve(base, 'packageA') @@ -41,7 +39,6 @@ var EXEC_OPTS = { } test('setup', function (t) { - cleanup() mkdirp.sync(path.join(installPath, 'node_modules')) mkdirp.sync(packageApath) fs.writeFileSync( @@ -118,14 +115,3 @@ test('verify postremoval bins', function (t) { t.is(bin, path.join(installPath, 'node_modules', 'b')) t.end() }) - -test('cleanup', function (t) { - cleanup() - t.pass('cleaned up') - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(base) -} diff --git a/test/tap/doctor.js b/test/tap/doctor.js index 26c74833e2bfc..9a1b42cdd5361 100644 --- a/test/tap/doctor.js +++ b/test/tap/doctor.js @@ -5,16 +5,15 @@ const http = require('http') const mr = require('npm-registry-mock') const npm = require('../../lib/npm.js') const path = require('path') -const rimraf = require('rimraf') const Tacks = require('tacks') -const test = require('tap').test +const t = require('tap') const which = require('which') const Dir = Tacks.Dir const File = Tacks.File -const ROOT = path.join(__dirname, path.basename(__filename, '.js')) -const CACHE = path.join(ROOT, 'cache') +const ROOT = common.pkg +const CACHE = common.cache const TMP = path.join(ROOT, 'tmp') const PREFIX = path.join(ROOT, 'global-prefix') const PKG = path.join(ROOT, 'pkg') @@ -44,12 +43,23 @@ const npmResponse = { } } -test('setup', (t) => { +let nodeServer + +t.teardown(() => { + if (server) { + server.close() + } + if (nodeServer) { + nodeServer.close() + } +}) + +t.test('setup', (t) => { const port = common.port + 1 - http.createServer(function (q, s) { + nodeServer = http.createServer(function (q, s) { s.end(JSON.stringify([{lts: true, version: '0.0.0'}])) - this.close() - }).listen(port, () => { + }) + nodeServer.listen(port, () => { node_url = 'http://localhost:' + port mr({port: common.port}, (err, s) => { t.ifError(err, 'registry mocked successfully') @@ -78,11 +88,11 @@ test('setup', (t) => { }) }) -test('npm doctor', function (t) { +t.test('npm doctor', function (t) { npm.commands.doctor({'node-url': node_url}, true, function (e, list) { t.ifError(e, 'npm loaded successfully') t.same(list.length, 9, 'list should have 9 prop') - t.same(list[0][1], 'OK', 'npm ping') + t.same(list[0][1], 'ok', 'npm ping') t.same(list[1][1], 'v' + npm.version, 'npm -v') t.same(list[2][1], process.version, 'node -v') t.same(list[3][1], common.registry + '/', 'npm config get registry') @@ -93,13 +103,29 @@ test('npm doctor', function (t) { which('git', function (e, resolvedPath) { t.ifError(e, 'git command is installed') t.same(list[4][1], resolvedPath, 'which git') - server.close() t.done() }) }) }) -test('cleanup', (t) => { - rimraf.sync(ROOT) - t.done() +t.test('npm doctor works without registry', function (t) { + npm.config.set('registry', false) + npm.commands.doctor({'node-url': node_url}, true, function (e, list) { + t.ifError(e, 'npm loaded successfully') + t.same(list.length, 9, 'list should have 9 prop') + t.same(list[0][1], 'ok', 'npm ping') + t.same(list[1][1], 'v' + npm.version, 'npm -v') + t.same(list[2][1], process.version, 'node -v') + t.same(list[3][1], '', 'no registry, but no crash') + t.same(list[5][1], 'ok', 'Perms check on cached files') + t.same(list[6][1], 'ok', 'Perms check on global node_modules') + t.same(list[7][1], 'ok', 'Perms check on local node_modules') + t.match(list[8][1], /^verified \d+ tarballs?$/, 'Cache verified') + which('git', function (e, resolvedPath) { + t.ifError(e, 'git command is installed') + t.same(list[4][1], resolvedPath, 'which git') + server.close() + t.done() + }) + }) }) diff --git a/test/tap/extraneous-dep-cycle-ls-ok.js b/test/tap/extraneous-dep-cycle-ls-ok.js index d483b3e22ac33..11f52209ed835 100644 --- a/test/tap/extraneous-dep-cycle-ls-ok.js +++ b/test/tap/extraneous-dep-cycle-ls-ok.js @@ -8,7 +8,7 @@ var rimraf = require('rimraf') var common = require('../common-tap') -var pkg = path.resolve(__dirname, path.basename(__filename, '.js')) +var pkg = common.pkg var pathModA = path.join(pkg, 'node_modules', 'moduleA') var pathModB = path.join(pkg, 'node_modules', 'moduleB') diff --git a/test/tap/false-name.js b/test/tap/false-name.js index 1e2a4d43ddbb9..57d2a2ad2f8dd 100644 --- a/test/tap/false-name.js +++ b/test/tap/false-name.js @@ -11,15 +11,13 @@ var fs = require('graceful-fs') var path = require('path') var existsSync = fs.existsSync || path.existsSync -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.join(__dirname, 'false-name') -var cache = path.join(pkg, 'cache') +var pkg = common.pkg +var cache = common.cache var server var EXEC_OPTS = { cwd: pkg } @@ -60,7 +58,7 @@ test('not every pkg.name can be required', function (t) { ) t.ok( existsSync(path.join(pkg, 'node_modules', 'test-package')), - 'test-pacakge subdep installed OK' + 'test-package subdep installed OK' ) t.end() } @@ -69,17 +67,10 @@ test('not every pkg.name can be required', function (t) { test('cleanup', function (t) { server.close() - cleanup() t.end() }) -function cleanup () { - rimraf.sync(pkg) -} - function setup () { - cleanup() - mkdirp.sync(cache) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) diff --git a/test/tap/fetch-package-metadata.js b/test/tap/fetch-package-metadata.js index 2fc501369e52f..2e666772ed204 100644 --- a/test/tap/fetch-package-metadata.js +++ b/test/tap/fetch-package-metadata.js @@ -1,34 +1,18 @@ 'use strict' -var path = require('path') -var mkdirp = require('mkdirp') - var mr = require('npm-registry-mock') var npa = require('npm-package-arg') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') var npm = require('../../lib/npm.js') -var pkg = path.resolve(__dirname, path.basename(__filename, '.js')) - -function setup (cb) { - cleanup() - mkdirp.sync(pkg) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} +var pkg = common.pkg test('setup', function (t) { - setup() process.chdir(pkg) var opts = { - cache: path.resolve(pkg, 'cache'), + cache: common.cache, registry: common.registry, // important to make sure devDependencies don't get stripped dev: true @@ -56,14 +40,9 @@ test('fetch-package-metadata provides resolved metadata', function (t) { function thenVerifyMetadata (err, pkg) { t.ifError(err, 'fetched metadata') - t.equals(pkg._resolved, 'http://localhost:1337/test-package/-/test-package-0.0.0.tgz', '_resolved') + t.equals(pkg._resolved, 'http://localhost:' + common.port + '/test-package/-/test-package-0.0.0.tgz', '_resolved') t.equals(pkg._integrity, 'sha1-sNMrbEXCWcV4uiADdisgUTG9+9E=', '_integrity') server.close() t.end() } }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) diff --git a/test/tap/format-package-lock.js b/test/tap/format-package-lock.js new file mode 100644 index 0000000000000..ddf40915d9bd3 --- /dev/null +++ b/test/tap/format-package-lock.js @@ -0,0 +1,116 @@ +'use strict' +const fs = require('fs') +const path = require('path') +const test = require('tap').test +const mr = require('npm-registry-mock') +const Tacks = require('tacks') +const File = Tacks.File +const Dir = Tacks.Dir +const common = require('../common-tap.js') + +const basedir = common.pkg +const testdir = path.join(basedir, 'testdir') +const cachedir = common.cache +const globaldir = path.join(basedir, 'global') +const tmpdir = path.join(basedir, 'tmp') + +const pkgPath = path.join(testdir, 'package.json') +const pkgLockPath = path.join(testdir, 'package-lock.json') +const shrinkwrapPath = path.join(testdir, 'npm-shrinkwrap.json') +const CRLFreg = /\r\n|\r|\n/ + +const env = common.newEnv().extend({ + npm_config_cache: cachedir, + npm_config_tmp: tmpdir, + npm_config_prefix: globaldir, + npm_config_registry: common.registry, + npm_config_loglevel: 'warn', + npm_config_format_package_lock: false +}) + +var server +var fixture = new Tacks(Dir({ + cache: Dir(), + global: Dir(), + tmp: Dir(), + testdir: Dir({ + 'package.json': File({ + name: 'install-package-lock-only', + version: '1.0.0', + dependencies: { + mkdirp: '^0.3.4' + } + }) + }) +})) + +function setup () { + cleanup() + fixture.create(basedir) +} + +function cleanup () { + fixture.remove(basedir) +} + +test('setup', function (t) { + mr({port: common.port, throwOnUnmatched: true}, function (err, s) { + if (err) throw err + server = s + t.done() + }) +}) + +test('package-lock.json unformatted, package.json formatted when config has `format-package-lock: false`', function (t) { + setup() + common.npm(['install'], {cwd: testdir, env}).spread((code, stdout, stderr) => { + t.is(code, 0, 'ok') + t.ok(fs.existsSync(pkgLockPath), 'ensure that package-lock.json was created') + const pkgLockUtf8 = fs.readFileSync(pkgLockPath, 'utf-8') + t.equal(pkgLockUtf8.split(CRLFreg).length, 2, 'package-lock.json is unformatted') + const pkgUtf8 = fs.readFileSync(pkgPath, 'utf-8') + t.notEqual(pkgUtf8.split(CRLFreg).length, 2, 'package.json is formatted') + t.done() + }) +}) + +test('npm-shrinkwrap.json unformatted when config has `format-package-lock: false`', function (t) { + setup() + common.npm(['shrinkwrap'], {cwd: testdir, env}).spread((code, stdout, stderr) => { + t.is(code, 0, 'ok') + t.ok(fs.existsSync(shrinkwrapPath), 'ensure that npm-shrinkwrap.json was created') + const shrinkwrapUtf8 = fs.readFileSync(shrinkwrapPath, 'utf-8') + t.equal(shrinkwrapUtf8.split(CRLFreg).length, 2, 'npm-shrinkwrap.json is unformatted') + t.done() + }) +}) + +test('package-lock.json and package.json formatted when config has `format-package-lock: true`', function (t) { + setup() + common.npm(['install'], {cwd: testdir}).spread((code, stdout, stderr) => { + t.is(code, 0, 'ok') + t.ok(fs.existsSync(pkgLockPath), 'ensure that package-lock.json was created') + const pkgLockUtf8 = fs.readFileSync(pkgLockPath, 'utf-8') + t.notEqual(pkgLockUtf8.split(CRLFreg).length, 2, 'package-lock.json is formatted') + const pkgUtf8 = fs.readFileSync(pkgPath, 'utf-8') + t.notEqual(pkgUtf8.split(CRLFreg).length, 2, 'package.json is formatted') + t.done() + }) +}) + +test('npm-shrinkwrap.json formatted when config has `format-package-lock: true`', function (t) { + setup() + common.npm(['shrinkwrap'], {cwd: testdir}).spread((code, stdout, stderr) => { + t.is(code, 0, 'ok') + t.ok(fs.existsSync(shrinkwrapPath), 'ensure that npm-shrinkwrap.json was created') + const shrinkwrapUtf8 = fs.readFileSync(shrinkwrapPath, 'utf-8') + t.notEqual(shrinkwrapUtf8.split(CRLFreg).length, 2, 'npm-shrinkwrap.json is unformatted') + t.done() + }) +}) + +test('cleanup', function (t) { + server.close() + cleanup() + t.done() +}) diff --git a/test/tap/full-warning-messages.js b/test/tap/full-warning-messages.js index c986695d67e99..945cf66a629dd 100644 --- a/test/tap/full-warning-messages.js +++ b/test/tap/full-warning-messages.js @@ -6,7 +6,7 @@ var rimraf = require('rimraf') var fs = require('graceful-fs') var common = require('../common-tap') -var base = path.resolve(__dirname, path.basename(__filename, '.js')) +var base = common.pkg var modA = path.resolve(base, 'modA') var modB = path.resolve(base, 'modB') diff --git a/test/tap/fund.js b/test/tap/fund.js new file mode 100644 index 0000000000000..48d903f9897f6 --- /dev/null +++ b/test/tap/fund.js @@ -0,0 +1,405 @@ +'use strict' + +const fs = require('fs') +const path = require('path') + +const test = require('tap').test +const Tacks = require('tacks') +const Dir = Tacks.Dir +const File = Tacks.File +const common = require('../common-tap.js') +const isWindows = require('../../lib/utils/is-windows.js') + +const base = common.pkg +const noFunding = path.join(base, 'no-funding-package') +const maintainerOwnsAllDeps = path.join(base, 'maintainer-owns-all-deps') +const nestedNoFundingPackages = path.join(base, 'nested-no-funding-packages') +const nestedMultipleFundingPackages = path.join(base, 'nested-multiple-funding-packages') +const fundingStringShorthand = path.join(base, 'funding-string-shorthand') + +function getFixturePackage ({ name, version, dependencies, funding }, extras) { + const getDeps = () => Object + .keys(dependencies) + .reduce((res, dep) => (Object.assign({}, res, { + [dep]: '*' + })), {}) + + return Dir(Object.assign({ + 'package.json': File({ + name, + version: version || '1.0.0', + funding: (funding === undefined) ? { + type: 'individual', + url: 'http://example.com/donate' + } : funding, + dependencies: dependencies && getDeps(dependencies) + }) + }, extras)) +} + +const fixture = new Tacks(Dir({ + 'funding-string-shorthand': Dir({ + 'package.json': File({ + name: 'funding-string-shorthand', + version: '0.0.0', + funding: 'https://example.com/sponsor' + }) + }), + 'no-funding-package': Dir({ + 'package.json': File({ + name: 'no-funding-package', + version: '0.0.0' + }) + }), + 'maintainer-owns-all-deps': getFixturePackage({ + name: 'maintainer-owns-all-deps', + dependencies: { + 'dep-foo': '*', + 'dep-bar': '*' + } + }, { + node_modules: Dir({ + 'dep-foo': getFixturePackage({ + name: 'dep-foo', + dependencies: { + 'dep-sub-foo': '*' + } + }, { + node_modules: Dir({ + 'dep-sub-foo': getFixturePackage({ + name: 'dep-sub-foo' + }) + }) + }), + 'dep-bar': getFixturePackage({ + name: 'dep-bar' + }) + }) + }), + 'nested-no-funding-packages': getFixturePackage({ + name: 'nested-no-funding-packages', + funding: null, + dependencies: { + foo: '*' + }, + devDependencies: { + lorem: '*' + } + }, { + node_modules: Dir({ + foo: getFixturePackage({ + name: 'foo', + dependencies: { + bar: '*' + }, + funding: null + }, { + node_modules: Dir({ + bar: getFixturePackage({ + name: 'bar' + }, { + node_modules: Dir({ + 'sub-bar': getFixturePackage({ + name: 'sub-bar', + funding: 'https://example.com/sponsor' + }) + }) + }) + }) + }), + lorem: getFixturePackage({ + name: 'lorem', + funding: { + url: 'https://example.com/lorem' + } + }) + }) + }), + 'nested-multiple-funding-packages': getFixturePackage({ + name: 'nested-multiple-funding-packages', + funding: [ + 'https://one.example.com', + 'https://two.example.com' + ], + dependencies: { + foo: '*' + }, + devDependencies: { + bar: '*' + } + }, { + node_modules: Dir({ + foo: getFixturePackage({ + name: 'foo', + funding: [ + 'http://example.com', + { url: 'http://sponsors.example.com/me' }, + 'http://collective.example.com' + ] + }), + bar: getFixturePackage({ + name: 'bar', + funding: [ + 'http://collective.example.com', + { url: 'http://sponsors.example.com/you' } + ] + }) + }) + }) +})) + +function checkOutput (t, { code, stdout, stderr }) { + t.is(code, 0, `exited code 0`) + t.is(stderr, '', 'no warnings') +} + +function jsonTest (t, { assertionMsg, expected, stdout }) { + let parsed = JSON.parse(stdout) + t.deepEqual(parsed, expected, assertionMsg) +} + +function snapshotTest (t, { stdout, assertionMsg }) { + t.matchSnapshot(stdout, assertionMsg) +} + +function testFundCmd ({ title, assertionMsg, args = [], opts = {}, output = checkOutput, assertion = snapshotTest, expected }) { + const validate = (t) => (err, code, stdout, stderr) => { + if (err) throw err + + output(t, { code, stdout, stderr }) + assertion(t, { assertionMsg, expected, stdout }) + } + + return test(title, (t) => { + t.plan(3) + common.npm(['fund', '--unicode=false'].concat(args), opts, validate(t)) + }) +} + +test('setup', function (t) { + fixture.remove(base) + fixture.create(base) + t.end() +}) + +testFundCmd({ + title: 'fund with no package containing funding', + assertionMsg: 'should print empty funding info', + opts: { cwd: noFunding } +}) + +testFundCmd({ + title: 'fund in which same maintainer owns all its deps', + assertionMsg: 'should print stack packages together', + opts: { cwd: maintainerOwnsAllDeps } +}) + +testFundCmd({ + title: 'fund in which same maintainer owns all its deps, using --json option', + assertionMsg: 'should print stack packages together', + args: ['--json'], + opts: { cwd: maintainerOwnsAllDeps }, + assertion: jsonTest, + expected: { + length: 3, + name: 'maintainer-owns-all-deps', + version: '1.0.0', + funding: { type: 'individual', url: 'http://example.com/donate' }, + dependencies: { + 'dep-bar': { + version: '1.0.0', + funding: { type: 'individual', url: 'http://example.com/donate' } + }, + 'dep-foo': { + version: '1.0.0', + funding: { type: 'individual', url: 'http://example.com/donate' }, + dependencies: { + 'dep-sub-foo': { + version: '1.0.0', + funding: { type: 'individual', url: 'http://example.com/donate' } + } + } + } + } + } +}) + +testFundCmd({ + title: 'fund containing multi-level nested deps with no funding', + assertionMsg: 'should omit dependencies with no funding declared', + opts: { cwd: nestedNoFundingPackages } +}) + +testFundCmd({ + title: 'fund containing multi-level nested deps with no funding, using --json option', + assertionMsg: 'should omit dependencies with no funding declared', + args: ['--json'], + opts: { cwd: nestedNoFundingPackages }, + assertion: jsonTest, + expected: { + length: 3, + name: 'nested-no-funding-packages', + version: '1.0.0', + dependencies: { + lorem: { version: '1.0.0', funding: { url: 'https://example.com/lorem' } }, + bar: { + version: '1.0.0', + funding: { type: 'individual', url: 'http://example.com/donate' }, + dependencies: { + 'sub-bar': { + version: '1.0.0', + funding: { url: 'https://example.com/sponsor' } + } + } + } + } + } +}) + +testFundCmd({ + title: 'fund containing multi-level nested deps with multiple funding sources, using --json option', + assertionMsg: 'should omit dependencies with no funding declared', + args: ['--json'], + opts: { cwd: nestedMultipleFundingPackages }, + assertion: jsonTest, + expected: { + length: 2, + name: 'nested-multiple-funding-packages', + version: '1.0.0', + funding: [ + { + url: 'https://one.example.com' + }, + { + url: 'https://two.example.com' + } + ], + dependencies: { + bar: { + version: '1.0.0', + funding: [ + { + url: 'http://collective.example.com' + }, + { + url: 'http://sponsors.example.com/you' + } + ] + }, + foo: { + version: '1.0.0', + funding: [ + { + url: 'http://example.com' + }, + { + url: 'http://sponsors.example.com/me' + }, + { + url: 'http://collective.example.com' + } + ] + } + } + } +}) + +testFundCmd({ + title: 'fund does not support global', + assertionMsg: 'should throw EFUNDGLOBAL error', + args: ['--global'], + output: (t, { code, stdout, stderr }) => { + t.is(code, 1, `exited code 0`) + const [ errCode, errCmd ] = stderr.split('\n') + t.matchSnapshot(`${errCode}\n${errCmd}`, 'should write error msgs to stderr') + } +}) + +testFundCmd({ + title: 'fund does not support global, using --json option', + assertionMsg: 'should throw EFUNDGLOBAL error', + args: ['--global', '--json'], + output: (t, { code, stdout, stderr }) => { + t.is(code, 1, `exited code 0`) + const [ errCode, errCmd ] = stderr.split('\n') + t.matchSnapshot(`${errCode}\n${errCmd}`, 'should write error msgs to stderr') + }, + assertion: jsonTest, + expected: { + error: { + code: 'EFUNDGLOBAL', + summary: '`npm fund` does not support global packages', + detail: '' + } + } +}) + +testFundCmd({ + title: 'fund using package argument with no browser', + assertionMsg: 'should open funding url', + args: ['.', '--no-browser'], + opts: { cwd: maintainerOwnsAllDeps } +}) + +testFundCmd({ + title: 'fund using string shorthand', + assertionMsg: 'should open string-only url', + args: ['.', '--no-browser'], + opts: { cwd: fundingStringShorthand } +}) + +testFundCmd({ + title: 'fund using nested packages with multiple sources', + assertionMsg: 'should prompt with all available URLs', + args: ['.'], + opts: { cwd: nestedMultipleFundingPackages } +}) + +testFundCmd({ + title: 'fund using nested packages with multiple sources, with a source number', + assertionMsg: 'should open the numbered URL', + args: ['.', '--which=1', '--no-browser'], + opts: { cwd: nestedMultipleFundingPackages } +}) + +testFundCmd({ + title: 'fund using package argument with no browser, using --json option', + assertionMsg: 'should open funding url', + args: ['.', '--json', '--no-browser'], + opts: { cwd: maintainerOwnsAllDeps }, + assertion: jsonTest, + expected: { + title: 'individual funding available at the following URL', + url: 'http://example.com/donate' + } +}) + +if (!isWindows) { + test('fund using package argument', function (t) { + const fakeBrowser = path.join(common.pkg, '_script.sh') + const outFile = path.join(common.pkg, '_output') + + const s = '#!/usr/bin/env bash\n' + + 'echo "$@" > ' + JSON.stringify(common.pkg) + '/_output\n' + fs.writeFileSync(fakeBrowser, s) + fs.chmodSync(fakeBrowser, '0755') + + common.npm([ + 'fund', '.', + '--loglevel=silent', + '--browser=' + fakeBrowser + ], { cwd: maintainerOwnsAllDeps }, function (err, code, stdout, stderr) { + t.ifError(err, 'repo command ran without error') + t.equal(code, 0, 'exit ok') + var res = fs.readFileSync(outFile, 'utf8') + t.equal(res, 'http://example.com/donate\n') + t.end() + }) + }) +} + +test('cleanup', function (t) { + t.pass(base) + fixture.remove(base) + t.end() +}) diff --git a/test/tap/gently-rm-cmdshims.js b/test/tap/gently-rm-cmdshims.js index 304c6956bdfd6..6726fe76d8dba 100644 --- a/test/tap/gently-rm-cmdshims.js +++ b/test/tap/gently-rm-cmdshims.js @@ -7,7 +7,8 @@ var mkdirp = require('mkdirp') var rimraf = require('rimraf') var npm = require('../../lib/npm.js') -var work = path.join(__dirname, path.basename(__filename, '.js')) +const common = require('../common-tap.js') +var work = common.pkg var doremove = path.join(work, 'doremove') var dontremove = path.join(work, 'dontremove') var example_json = { diff --git a/test/tap/gently-rm-linked-module.js b/test/tap/gently-rm-linked-module.js index 877c683c9f04e..ab595e10c740f 100644 --- a/test/tap/gently-rm-linked-module.js +++ b/test/tap/gently-rm-linked-module.js @@ -1,6 +1,5 @@ var common = require('../common-tap.js') -var basename = require('path').basename var resolve = require('path').resolve var fs = require('graceful-fs') var test = require('tap').test @@ -11,7 +10,7 @@ var Dir = Tacks.Dir var Symlink = Tacks.Symlink var isWindows = require('../../lib/utils/is-windows.js') -var base = resolve(__dirname, basename(__filename, '.js')) +var base = common.pkg var fixture = new Tacks(Dir({ 'working-dir': Dir({ 'node_modules': Dir({}) // so it doesn't try to install into npm's own node_modules diff --git a/test/tap/gently-rm-overeager.js b/test/tap/gently-rm-overeager.js index 7299b2c3f4ce1..91a3768aff705 100644 --- a/test/tap/gently-rm-overeager.js +++ b/test/tap/gently-rm-overeager.js @@ -6,7 +6,7 @@ var rimraf = require('rimraf') var common = require('../common-tap.js') -var testdir = path.join(__dirname, path.basename(__filename, '.js')) +var testdir = common.pkg var pkg = path.join(testdir, 'gently-rm-overeager') var dep = path.join(testdir, 'test-whoops') diff --git a/test/tap/gently-rm-symlinked-global-dir.js b/test/tap/gently-rm-symlinked-global-dir.js index 8ac290091ca5c..0a27dae5c5e95 100644 --- a/test/tap/gently-rm-symlinked-global-dir.js +++ b/test/tap/gently-rm-symlinked-global-dir.js @@ -7,10 +7,10 @@ var mkdirp = require('mkdirp') var rimraf = require('rimraf') var isWindows = require('../../lib/utils/is-windows.js') -var pkg = resolve(__dirname, 'gently-rm-linked') -var dep = resolve(__dirname, 'test-linked') -var glb = resolve(__dirname, 'test-global') -var lnk = resolve(__dirname, 'test-global-link') +var pkg = resolve(common.pkg, 'package') +var dep = resolve(common.pkg, 'test-linked') +var glb = resolve(common.pkg, 'test-global') +var lnk = resolve(common.pkg, 'test-global-link') var EXEC_OPTS = { cwd: pkg } diff --git a/test/tap/get.js b/test/tap/get.js deleted file mode 100644 index c939ed071e800..0000000000000 --- a/test/tap/get.js +++ /dev/null @@ -1,103 +0,0 @@ -var common = require('../common-tap.js') -var test = require('tap').test -var npm = require('../../') -var rimraf = require('rimraf') -var path = require('path') -var mr = require('npm-registry-mock') - -function nop () {} - -var URI = 'https://npm.registry:8043/rewrite' -var TIMEOUT = 3600 -var FOLLOW = false -var STALE_OK = true -var TOKEN = 'lolbutts' -var AUTH = { token: TOKEN } -var PARAMS = { - timeout: TIMEOUT, - follow: FOLLOW, - staleOk: STALE_OK, - auth: AUTH -} -var PKG_DIR = path.resolve(__dirname, 'get-basic') -var BIGCO_SAMPLE = { - name: '@bigco/sample', - version: '1.2.3' -} - -// mock server reference -var server - -var mocks = { - 'get': { - '/@bigco%2fsample/1.2.3': [200, BIGCO_SAMPLE] - } -} - -test('setup', function (t) { - mr({port: common.port, mocks: mocks}, function (er, s) { - t.ifError(er) - npm.load({registry: common.registry}, function (er) { - t.ifError(er) - server = s - t.end() - }) - }) -}) - -test('get call contract', function (t) { - t.throws(function () { - npm.registry.get(undefined, PARAMS, nop) - }, 'requires a URI') - - t.throws(function () { - npm.registry.get([], PARAMS, nop) - }, 'requires URI to be a string') - - t.throws(function () { - npm.registry.get(URI, undefined, nop) - }, 'requires params object') - - t.throws(function () { - npm.registry.get(URI, '', nop) - }, 'params must be object') - - t.throws(function () { - npm.registry.get(URI, PARAMS, undefined) - }, 'requires callback') - - t.throws(function () { - npm.registry.get(URI, PARAMS, 'callback') - }, 'callback must be function') - - t.end() -}) - -test('basic request', function (t) { - t.plan(6) - - var versioned = common.registry + '/underscore/1.3.3' - npm.registry.get(versioned, PARAMS, function (er, data) { - t.ifError(er, 'loaded specified version underscore data') - t.equal(data.version, '1.3.3') - }) - - var rollup = common.registry + '/underscore' - npm.registry.get(rollup, PARAMS, function (er, data) { - t.ifError(er, 'loaded all metadata') - t.deepEqual(data.name, 'underscore') - }) - - var scoped = common.registry + '/@bigco%2fsample/1.2.3' - npm.registry.get(scoped, PARAMS, function (er, data) { - t.ifError(er, 'loaded all metadata') - t.equal(data.name, '@bigco/sample') - }) -}) - -test('cleanup', function (t) { - server.close() - rimraf.sync(PKG_DIR) - - t.end() -}) diff --git a/test/tap/gist-short-shortcut-package.js b/test/tap/gist-short-shortcut-package.js index 4074995ee5cc4..601d53a827666 100644 --- a/test/tap/gist-short-shortcut-package.js +++ b/test/tap/gist-short-shortcut-package.js @@ -3,14 +3,12 @@ var fs = require('graceful-fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') var requireInject = require('require-inject') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'gist-short-shortcut-package') +var pkg = common.pkg var json = { name: 'gist-short-shortcut-package', @@ -49,7 +47,7 @@ test('gist-short-shortcut-package', function (t) { }) var opts = { - cache: path.resolve(pkg, 'cache'), + cache: common.cache, prefix: pkg, registry: common.registry, loglevel: 'silent' @@ -63,13 +61,7 @@ test('gist-short-shortcut-package', function (t) { }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - function setup () { - cleanup() mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), @@ -77,8 +69,3 @@ function setup () { ) process.chdir(pkg) } - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/gist-short-shortcut.js b/test/tap/gist-short-shortcut.js index 875c30e036512..82c9ae17502dd 100644 --- a/test/tap/gist-short-shortcut.js +++ b/test/tap/gist-short-shortcut.js @@ -3,14 +3,12 @@ var fs = require('graceful-fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') var requireInject = require('require-inject') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'gist-short-shortcut') +var pkg = common.pkg var json = { name: 'gist-short-shortcut', @@ -46,7 +44,7 @@ test('gist-shortcut', function (t) { }) var opts = { - cache: path.resolve(pkg, 'cache'), + cache: common.cache, prefix: pkg, registry: common.registry, loglevel: 'silent' @@ -60,13 +58,7 @@ test('gist-shortcut', function (t) { }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - function setup () { - cleanup() mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), @@ -74,8 +66,3 @@ function setup () { ) process.chdir(pkg) } - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/gist-shortcut-package.js b/test/tap/gist-shortcut-package.js index dd6f41359e9ed..28e57357cc393 100644 --- a/test/tap/gist-shortcut-package.js +++ b/test/tap/gist-shortcut-package.js @@ -2,15 +2,12 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') -var osenv = require('osenv') var requireInject = require('require-inject') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'gist-shortcut-package') +var pkg = common.pkg var json = { name: 'gist-shortcut-package', @@ -20,12 +17,8 @@ var json = { } } -test('setup', function (t) { - setup() - t.end() -}) - test('gist-shortcut-package', function (t) { + setup() var cloneUrls = [ ['git://gist.github.com/deadbeef.git', 'GitHub gist shortcuts try git URLs first'], ['https://gist.github.com/deadbeef.git', 'GitHub gist shortcuts try HTTPS URLs second'], @@ -49,7 +42,7 @@ test('gist-shortcut-package', function (t) { }) var opts = { - cache: path.resolve(pkg, 'cache'), + cache: common.cache, prefix: pkg, registry: common.registry, loglevel: 'silent' @@ -63,22 +56,10 @@ test('gist-shortcut-package', function (t) { }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - function setup () { - cleanup() - mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) ) process.chdir(pkg) } - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/gist-shortcut.js b/test/tap/gist-shortcut.js index 8ad5ef001ce23..ca86d6bc55058 100644 --- a/test/tap/gist-shortcut.js +++ b/test/tap/gist-shortcut.js @@ -2,27 +2,24 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') -var osenv = require('osenv') var requireInject = require('require-inject') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'gist-shortcut') +var pkg = common.pkg var json = { name: 'gist-shortcut', version: '0.0.0' } -test('setup', function (t) { - setup() - t.end() -}) - test('gist-shortcut', function (t) { + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(json, null, 2) + ) + process.chdir(pkg) var cloneUrls = [ ['git://gist.github.com/deadbeef.git', 'GitHub gist shortcuts try git URLs first'], ['https://gist.github.com/deadbeef.git', 'GitHub gist shortcuts try HTTPS URLs second'], @@ -46,7 +43,7 @@ test('gist-shortcut', function (t) { }) var opts = { - cache: path.resolve(pkg, 'cache'), + cache: common.cache, prefix: pkg, registry: common.registry, loglevel: 'silent' @@ -59,23 +56,3 @@ test('gist-shortcut', function (t) { }) }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function setup () { - cleanup() - mkdirp.sync(pkg) - fs.writeFileSync( - path.join(pkg, 'package.json'), - JSON.stringify(json, null, 2) - ) - process.chdir(pkg) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/git-dependency-install-link.js b/test/tap/git-dependency-install-link.js index 1bf839f302891..d80beab057f4d 100644 --- a/test/tap/git-dependency-install-link.js +++ b/test/tap/git-dependency-install-link.js @@ -1,7 +1,6 @@ var fs = require('fs') var resolve = require('path').resolve -var osenv = require('osenv') var mkdirp = require('mkdirp') var rimraf = require('rimraf') var test = require('tap').test @@ -11,10 +10,10 @@ var mr = require('npm-registry-mock') var npm = require('../../lib/npm.js') var common = require('../common-tap.js') -var pkg = resolve(__dirname, 'git-dependency-install-link') -var repo = resolve(__dirname, 'git-dependency-install-link-repo') -var prefix = resolve(__dirname, 'git-dependency-install-link-prefix') -var cache = resolve(pkg, 'cache') +var pkg = resolve(common.pkg, 'package') +var repo = resolve(common.pkg, 'repo') +var prefix = resolve(common.pkg, 'prefix') +var cache = common.cache var daemon var daemonPID @@ -32,7 +31,7 @@ var pjParent = JSON.stringify({ name: 'parent', version: '1.2.3', dependencies: { - 'child': 'git://localhost:1234/child.git' + 'child': 'git://localhost:' + common.gitPort + '/child.git' } }, null, 2) + '\n' @@ -42,8 +41,8 @@ var pjChild = JSON.stringify({ }, null, 2) + '\n' test('setup', function (t) { - bootstrap() - setup(function (er, r) { + t.test('bootstrap', t => bootstrap(t.end)) + t.test('setup', t => setup(function (er, r) { t.ifError(er, 'git started up successfully') if (!er) { @@ -59,7 +58,8 @@ test('setup', function (t) { t.end() }) - }) + })) + t.end() }) test('install from git repo [no --link]', function (t) { @@ -103,20 +103,20 @@ test('install from git repo [with --link]', function (t) { test('clean', function (t) { mockRegistry.close() - daemon.on('close', function () { - cleanup() - t.end() - }) + daemon.on('close', t.end) process.kill(daemonPID) }) -function bootstrap () { - rimraf.sync(repo) - rimraf.sync(pkg) - mkdirp.sync(pkg) - mkdirp.sync(cache) +function bootstrap (cb) { + rimraf(repo, () => { + rimraf(pkg, () => { + mkdirp.sync(pkg) + mkdirp.sync(cache) - fs.writeFileSync(resolve(pkg, 'package.json'), pjParent) + fs.writeFileSync(resolve(pkg, 'package.json'), pjParent) + cb() + }) + }) } function setup (cb) { @@ -139,7 +139,7 @@ function setup (cb) { '--export-all', '--base-path=.', '--reuseaddr', - '--port=1234' + '--port=' + common.gitPort ], { cwd: pkg, @@ -170,10 +170,3 @@ function setup (cb) { }, cb) }) } - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(repo) - rimraf.sync(prefix) - rimraf.sync(pkg) -} diff --git a/test/tap/git-npmignore.js b/test/tap/git-npmignore.js index 19d014c3d9a92..2ab7db7304f0f 100644 --- a/test/tap/git-npmignore.js +++ b/test/tap/git-npmignore.js @@ -1,7 +1,6 @@ /* eslint-disable camelcase */ var child_process = require('child_process') var readdir = require('graceful-fs').readdirSync -var path = require('path') var resolve = require('path').resolve var rimraf = require('rimraf') @@ -15,7 +14,6 @@ var Dir = Tacks.Dir var File = Tacks.File var fixture = new Tacks(Dir({ - cache: Dir({}), deps: Dir({ gitch: Dir({ '.npmignore': File( @@ -42,8 +40,8 @@ var fixture = new Tacks(Dir({ }) })) -var testdir = resolve(__dirname, path.basename(__filename, '.js')) -var cachedir = resolve(testdir, 'cache') +var testdir = common.pkg +var cachedir = common.cache var dep = resolve(testdir, 'deps', 'gitch') var packname = 'gitch-1.0.0.tgz' var packed = resolve(testdir, packname) diff --git a/test/tap/git-prepare.js b/test/tap/git-prepare.js index 1a61056b4beda..072f4dfc447fc 100644 --- a/test/tap/git-prepare.js +++ b/test/tap/git-prepare.js @@ -3,18 +3,16 @@ const fs = require('fs') const path = require('path') -const osenv = require('osenv') -const rimraf = require('rimraf') const test = require('tap').test const mr = require('npm-registry-mock') const npm = require('../../lib/npm.js') const common = require('../common-tap.js') -const testdir = path.resolve(__dirname, path.basename(__filename, '.js')) +const testdir = common.pkg const repo = path.join(testdir, 'repo') const prefix = path.join(testdir, 'prefix') -const cache = path.join(testdir, 'cache') +const cache = common.cache var Tacks = require('tacks') var Dir = Tacks.Dir @@ -30,14 +28,13 @@ process.env.npm_config_prefix = prefix const fixture = new Tacks(Dir({ repo: Dir({}), prefix: Dir({}), - cache: Dir({}), deps: Dir({ parent: Dir({ 'package.json': File({ name: 'parent', version: '1.2.3', dependencies: { - 'child': 'git://localhost:1234/child.git' + 'child': 'git://localhost:' + common.gitPort + '/child.git' } }) }), @@ -69,7 +66,7 @@ const fixture = new Tacks(Dir({ })) test('setup', function (t) { - bootstrap() + fixture.create(testdir) setup(function (er, r) { t.ifError(er, 'git started up successfully') @@ -116,17 +113,10 @@ test('install from git repo with prepare script', function (t) { test('clean', function (t) { mockRegistry.close() - daemon.on('close', function () { - cleanup() - t.end() - }) + daemon.on('close', t.end) process.kill(daemonPID) }) -function bootstrap () { - fixture.create(testdir) -} - function setup (cb) { npm.load({ prefix: testdir, @@ -144,7 +134,7 @@ function setup (cb) { '--export-all', '--base-path=.', '--reuseaddr', - '--port=1234' + '--port=' + common.gitPort ], { cwd: repo, @@ -174,8 +164,3 @@ function setup (cb) { }, cb) }) } - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(testdir) -} diff --git a/test/tap/github-shortcut-package.js b/test/tap/github-shortcut-package.js index 73385436b7893..444520308a245 100644 --- a/test/tap/github-shortcut-package.js +++ b/test/tap/github-shortcut-package.js @@ -2,15 +2,12 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') -var osenv = require('osenv') var requireInject = require('require-inject') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'github-shortcut-package') +var pkg = common.pkg var json = { name: 'github-shortcut-package', @@ -20,12 +17,12 @@ var json = { } } -test('setup', function (t) { - setup() - t.end() -}) - test('github-shortcut-package', function (t) { + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(json, null, 2) + ) + process.chdir(pkg) var cloneUrls = [ ['git://github.com/foo/private.git', 'GitHub shortcuts try git URLs first'], ['https://github.com/foo/private.git', 'GitHub shortcuts try HTTPS URLs second'], @@ -49,7 +46,7 @@ test('github-shortcut-package', function (t) { }) var opts = { - cache: path.resolve(pkg, 'cache'), + cache: common.cache, prefix: pkg, registry: common.registry, loglevel: 'silent' @@ -62,23 +59,3 @@ test('github-shortcut-package', function (t) { }) }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function setup () { - cleanup() - mkdirp.sync(pkg) - fs.writeFileSync( - path.join(pkg, 'package.json'), - JSON.stringify(json, null, 2) - ) - process.chdir(pkg) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/github-shortcut.js b/test/tap/github-shortcut.js index 641d64f3b92fe..59c7e39ea948f 100644 --- a/test/tap/github-shortcut.js +++ b/test/tap/github-shortcut.js @@ -5,27 +5,24 @@ const BB = require('bluebird') const fs = require('graceful-fs') const path = require('path') -const mkdirp = require('mkdirp') -const osenv = require('osenv') const requireInject = require('require-inject') -const rimraf = require('rimraf') const test = require('tap').test const common = require('../common-tap.js') -const pkg = path.resolve(__dirname, 'github-shortcut') +const pkg = common.pkg const json = { name: 'github-shortcut', version: '0.0.0' } -test('setup', function (t) { - setup() - t.end() -}) - test('github-shortcut', function (t) { + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(json, null, 2) + ) + process.chdir(pkg) const cloneUrls = [ ['git://github.com/foo/private.git', 'GitHub shortcuts try git URLs first'], ['https://github.com/foo/private.git', 'GitHub shortcuts try HTTPS URLs second'], @@ -48,7 +45,7 @@ test('github-shortcut', function (t) { }) const opts = { - cache: path.resolve(pkg, 'cache'), + cache: common.cache, prefix: pkg, registry: common.registry, loglevel: 'silent' @@ -62,23 +59,3 @@ test('github-shortcut', function (t) { }) }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function setup () { - cleanup() - mkdirp.sync(pkg) - fs.writeFileSync( - path.join(pkg, 'package.json'), - JSON.stringify(json, null, 2) - ) - process.chdir(pkg) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/gitlab-shortcut-package.js b/test/tap/gitlab-shortcut-package.js index 1dd1ba99fbd29..9b431ff7b66f0 100644 --- a/test/tap/gitlab-shortcut-package.js +++ b/test/tap/gitlab-shortcut-package.js @@ -2,15 +2,12 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') -var osenv = require('osenv') var requireInject = require('require-inject') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'gitlab-shortcut-package') +var pkg = common.pkg var json = { name: 'gitlab-shortcut-package', @@ -20,12 +17,12 @@ var json = { } } -test('setup', function (t) { - setup() - t.end() -}) - test('gitlab-shortcut-package', function (t) { + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(json, null, 2) + ) + process.chdir(pkg) var cloneUrls = [ ['https://gitlab.com/foo/private.git', 'GitLab shortcuts try HTTPS URLs second'], ['ssh://git@gitlab.com/foo/private.git', 'GitLab shortcuts try SSH first'] @@ -48,7 +45,7 @@ test('gitlab-shortcut-package', function (t) { }) var opts = { - cache: path.resolve(pkg, 'cache'), + cache: common.cache, prefix: pkg, registry: common.registry, loglevel: 'silent' @@ -61,23 +58,3 @@ test('gitlab-shortcut-package', function (t) { }) }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function setup () { - cleanup() - mkdirp.sync(pkg) - fs.writeFileSync( - path.join(pkg, 'package.json'), - JSON.stringify(json, null, 2) - ) - process.chdir(pkg) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/gitlab-shortcut.js b/test/tap/gitlab-shortcut.js index 6b2bfbc3edd86..344311b45f26c 100644 --- a/test/tap/gitlab-shortcut.js +++ b/test/tap/gitlab-shortcut.js @@ -2,27 +2,24 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') -var osenv = require('osenv') var requireInject = require('require-inject') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'gitlab-shortcut') +var pkg = common.pkg var json = { name: 'gitlab-shortcut', version: '0.0.0' } -test('setup', function (t) { - setup() - t.end() -}) - test('gitlab-shortcut', function (t) { + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(json, null, 2) + ) + process.chdir(pkg) var cloneUrls = [ ['https://gitlab.com/foo/private.git', 'GitLab shortcuts try HTTPS URLs second'], ['ssh://git@gitlab.com/foo/private.git', 'GitLab shortcuts try SSH first'] @@ -45,7 +42,7 @@ test('gitlab-shortcut', function (t) { }) var opts = { - cache: path.resolve(pkg, 'cache'), + cache: common.cache, prefix: pkg, registry: common.registry, loglevel: 'silent' @@ -58,23 +55,3 @@ test('gitlab-shortcut', function (t) { }) }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function setup () { - cleanup() - mkdirp.sync(pkg) - fs.writeFileSync( - path.join(pkg, 'package.json'), - JSON.stringify(json, null, 2) - ) - process.chdir(pkg) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/graceful-restart.js b/test/tap/graceful-restart.js index 56513fbf7ebbb..787aa988949b6 100644 --- a/test/tap/graceful-restart.js +++ b/test/tap/graceful-restart.js @@ -1,14 +1,8 @@ var fs = require('fs') var resolve = require('path').resolve - -var osenv = require('osenv') -var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var test = require('tap').test - var common = require('../common-tap.js') - -var pkg = resolve(__dirname, 'graceful-restart') +var pkg = common.pkg var outGraceless = [ 'prerestart', @@ -60,11 +54,6 @@ var pjGraceful = JSON.stringify({ } }, null, 2) + '\n' -test('setup', function (t) { - bootstrap() - t.end() -}) - test('graceless restart', function (t) { fs.writeFileSync(resolve(pkg, 'package.json'), pjGraceless) createChild(['run-script', 'restart'], function (err, code, out) { @@ -85,20 +74,6 @@ test('graceful restart', function (t) { }) }) -test('clean', function (t) { - cleanup() - t.end() -}) - -function bootstrap () { - mkdirp.sync(pkg) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} - function createChild (args, cb) { var env = { HOME: process.env.HOME, diff --git a/test/tap/ignore-install-link.js b/test/tap/ignore-install-link.js index 684c6a05b24c0..038b9448d7817 100644 --- a/test/tap/ignore-install-link.js +++ b/test/tap/ignore-install-link.js @@ -1,19 +1,16 @@ -if (process.platform === 'win32') { - console.log('ok - symlinks are weird on windows, skip this test') - process.exit(0) -} var common = require('../common-tap.js') +common.skipIfWindows('symlinks are weird on windows') var test = require('tap').test var path = require('path') var fs = require('fs') var rimraf = require('rimraf') var mkdirp = require('mkdirp') -var root = path.resolve(__dirname, 'ignore-install-link') +var root = common.pkg var pkg = path.resolve(root, 'pkg') var dep = path.resolve(root, 'dep') var target = path.resolve(pkg, 'node_modules', 'dep') -var cache = path.resolve(root, 'cache') +var cache = common.cache var globalPath = path.resolve(root, 'global') var pkgj = { diff --git a/test/tap/ignore-scripts.js b/test/tap/ignore-scripts.js index 785921d7eb2d6..f5af4553c26c0 100644 --- a/test/tap/ignore-scripts.js +++ b/test/tap/ignore-scripts.js @@ -9,7 +9,7 @@ var common = require('../common-tap') // ignore-scripts/package.json has scripts that always exit with non-zero error // codes. -var pkg = path.resolve(__dirname, 'ignore-scripts') +var pkg = common.pkg var gypfile = 'bad_binding_file\n' var json = { diff --git a/test/tap/init-interrupt.js b/test/tap/init-interrupt.js index b0c7643ddf044..38c38053e590d 100644 --- a/test/tap/init-interrupt.js +++ b/test/tap/init-interrupt.js @@ -2,21 +2,12 @@ // if 'npm init' is interrupted with ^C, don't report // 'init written successfully' var test = require('tap').test -var path = require('path') -var osenv = require('osenv') -var rimraf = require('rimraf') var npmlog = require('npmlog') var requireInject = require('require-inject') var npm = require('../../lib/npm.js') -var PKG_DIR = path.resolve(__dirname, 'init-interrupt') - -test('setup', function (t) { - cleanup() - - t.end() -}) +require('../common-tap.js') test('issue #6684 remove confusing message', function (t) { var initJsonMock = function (dir, input, config, cb) { @@ -45,14 +36,3 @@ test('issue #6684 remove confusing message', function (t) { }) }) }) - -test('cleanup', function (t) { - cleanup() - - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(PKG_DIR) -} diff --git a/test/tap/install-actions.js b/test/tap/install-actions.js index 6ca6e33534381..071dc2cc371ed 100644 --- a/test/tap/install-actions.js +++ b/test/tap/install-actions.js @@ -10,7 +10,7 @@ var mockLog = { var actions test('setup', function (t) { - npm.load(function () { + npm.load({ 'unsafe-perm': true }, function () { log.disableProgress() actions = require('../../lib/install/actions.js').actions t.end() @@ -91,6 +91,7 @@ test('->dep:b,->optdep:a->dep:b', function (t) { moduleA.requires = [moduleB] var tree = { + name: 'tree', path: '/', package: { dependencies: { @@ -109,13 +110,8 @@ test('->dep:b,->optdep:a->dep:b', function (t) { moduleA.parent = tree moduleB.parent = tree - t.plan(3) return actions.postinstall('/', moduleA, mockLog).then(() => { - throw new Error('was not supposed to succeed') - }, (err) => { - t.ok(err && err.code === 'ELIFECYCLE', 'Lifecycle failed') t.ok(moduleA.failed, 'moduleA (optional dep) is marked failed') t.ok(!moduleB.failed, 'moduleB (direct dep of moduleA) is marked as failed') - t.end() }) }) diff --git a/test/tap/install-at-locally.js b/test/tap/install-at-locally.js index fa0190ceb3ee2..e4920d22d14aa 100644 --- a/test/tap/install-at-locally.js +++ b/test/tap/install-at-locally.js @@ -2,13 +2,12 @@ var fs = require('graceful-fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.join(__dirname, 'install-at-locally') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg, stdio: [0, 1, 2] } @@ -17,11 +16,6 @@ var json = { version: '0.0.0' } -test('setup', function (t) { - cleanup() - t.end() -}) - test('\'npm install ./package@1.2.3\' should install local pkg', function (t) { var target = './package@1.2.3' setup(target) @@ -46,18 +40,8 @@ test('\'npm install install/at/locally@./package@1.2.3\' should install local pk }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} - function setup (target) { - cleanup() + rimraf.sync(pkg) var root = path.resolve(pkg, target) mkdirp.sync(root) fs.writeFileSync( @@ -65,5 +49,4 @@ function setup (target) { JSON.stringify(json, null, 2) ) mkdirp.sync(path.resolve(pkg, 'node_modules')) - process.chdir(pkg) } diff --git a/test/tap/install-at-sub-path-locally.js b/test/tap/install-at-sub-path-locally.js new file mode 100644 index 0000000000000..931d29bbd2642 --- /dev/null +++ b/test/tap/install-at-sub-path-locally.js @@ -0,0 +1,49 @@ +var fs = require('graceful-fs') +var path = require('path') + +var mkdirp = require('mkdirp') +var test = require('tap').test + +var common = require('../common-tap.js') + +var pkg = path.resolve(common.pkg, 'package') + +var EXEC_OPTS = { cwd: pkg, stdio: [0, 1, 2] } + +var json = { + name: 'install-at-sub-path-locally-mock', + version: '0.0.0' +} + +var target = '../package@1.2.3' + +test('setup', function (t) { + var root = path.resolve(pkg, target) + mkdirp.sync(root) + fs.writeFileSync( + path.join(root, 'package.json'), + JSON.stringify(json, null, 2) + ) + mkdirp.sync(path.resolve(pkg, 'node_modules')) + t.end() +}) + +test('\'npm install ../package@1.2.3\' should install local pkg from sub path', function (t) { + common.npm(['install', '--loglevel=silent', target], EXEC_OPTS, function (err, code) { + if (err) throw err + var p = path.resolve(pkg, 'node_modules/install-at-sub-path-locally-mock/package.json') + t.equal(code, 0, 'npm install exited with code') + t.ok(JSON.parse(fs.readFileSync(p, 'utf8'))) + t.end() + }) +}) + +test('\'running npm install ../package@1.2.3\' should not break on sub path re-install', function (t) { + common.npm(['install', '--loglevel=silent', target], EXEC_OPTS, function (err, code) { + if (err) throw err + var p = path.resolve(pkg, 'node_modules/install-at-sub-path-locally-mock/package.json') + t.equal(code, 0, 'npm install exited with code') + t.ok(JSON.parse(fs.readFileSync(p, 'utf8'))) + t.end() + }) +}) diff --git a/test/tap/install-bad-dep-format.js b/test/tap/install-bad-dep-format.js index 94c7d175fde45..9d9a41383598d 100644 --- a/test/tap/install-bad-dep-format.js +++ b/test/tap/install-bad-dep-format.js @@ -2,8 +2,6 @@ var fs = require('graceful-fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') @@ -18,41 +16,18 @@ var json = { } test('invalid url format returns appropriate error', function (t) { - setup(json) - common.npm(['install'], {}, function (err, code, stdout, stderr) { + var pkgPath = path.resolve(common.pkg, json.name) + mkdirp.sync(pkgPath) + fs.writeFileSync( + path.join(pkgPath, 'package.json'), + JSON.stringify(json, null, 2) + ) + common.npm(['install'], {cwd: pkgPath}, function (err, code, stdout, stderr) { t.ifError(err, 'install ran without error') - t.equals(code, 1, 'inall exited with code 1') + t.equals(code, 1, 'install exited with code 1') t.match(stderr, /ERR.*Unsupported URL Type/, 'Error should report that invalid url-style formats are used') t.end() }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function setup (json) { - cleanup() - process.chdir(mkPkg(json)) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - var pkgs = [json] - pkgs.forEach(function (json) { - rimraf.sync(path.resolve(__dirname, json.name)) - }) -} - -function mkPkg (json) { - var pkgPath = path.resolve(__dirname, json.name) - mkdirp.sync(pkgPath) - fs.writeFileSync( - path.join(pkgPath, 'package.json'), - JSON.stringify(json, null, 2) - ) - return pkgPath -} diff --git a/test/tap/install-bad-man.js b/test/tap/install-bad-man.js index 226d0b24fc06d..0aa83a21c541a 100644 --- a/test/tap/install-bad-man.js +++ b/test/tap/install-bad-man.js @@ -1,15 +1,13 @@ var fs = require('fs') var resolve = require('path').resolve -var osenv = require('osenv') var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = resolve(__dirname, 'install-bad-man') -var target = resolve(__dirname, 'install-bad-man-target') +var pkg = resolve(common.pkg, 'package') +var target = resolve(common.pkg, 'target') var EXEC_OPTS = { cwd: target @@ -21,11 +19,17 @@ var json = { man: [ './install-bad-man.1.lol' ] } -common.pendIfWindows('man pages do not get installed on Windows') +common.skipIfWindows('man pages do not get installed on Windows') test('setup', function (t) { - setup() - t.pass('setup ran') + mkdirp.sync(pkg) + // make sure it installs locally + mkdirp.sync(resolve(target, 'node_modules')) + fs.writeFileSync( + resolve(pkg, 'package.json'), + JSON.stringify(json, null, 2) + '\n' + ) + fs.writeFileSync(resolve(pkg, 'install-bad-man.1.lol'), 'lol\n') t.end() }) @@ -55,27 +59,3 @@ test("install from repo on 'OS X'", function (t) { } ) }) - -test('clean', function (t) { - cleanup() - t.pass('cleaned up') - t.end() -}) - -function setup () { - cleanup() - mkdirp.sync(pkg) - // make sure it installs locally - mkdirp.sync(resolve(target, 'node_modules')) - fs.writeFileSync( - resolve(pkg, 'package.json'), - JSON.stringify(json, null, 2) + '\n' - ) - fs.writeFileSync(resolve(pkg, 'install-bad-man.1.lol'), 'lol\n') -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) - rimraf.sync(target) -} diff --git a/test/tap/install-before.js b/test/tap/install-before.js new file mode 100644 index 0000000000000..05a254a1ef1ce --- /dev/null +++ b/test/tap/install-before.js @@ -0,0 +1,89 @@ +'use strict' + +const BB = require('bluebird') + +const common = require('../common-tap.js') +const mockTar = require('../util/mock-tarball.js') +const mr = common.fakeRegistry.compat +const path = require('path') +const rimraf = BB.promisify(require('rimraf')) +const Tacks = require('tacks') +const { test } = require('tap') + +const { Dir, File } = Tacks + +const testDir = common.pkg + +let server +test('setup', t => { + mr({}, (err, s) => { + t.ifError(err, 'registry mocked successfully') + server = s + t.end() + }) +}) + +test('installs an npm package before a certain date', t => { + const fixture = new Tacks(Dir({ + 'package.json': File({}) + })) + fixture.create(testDir) + const packument = { + name: 'foo', + 'dist-tags': { latest: '1.2.4' }, + versions: { + '1.2.3': { + name: 'foo', + version: '1.2.3', + dist: { + tarball: `${server.registry}/foo/-/foo-1.2.3.tgz` + } + }, + '1.2.4': { + name: 'foo', + version: '1.2.4', + dist: { + tarball: `${server.registry}/foo/-/foo-1.2.4.tgz` + } + } + }, + time: { + created: '2017-01-01T00:00:01.000Z', + modified: '2018-01-01T00:00:01.000Z', + '1.2.3': '2017-01-01T00:00:01.000Z', + '1.2.4': '2018-01-01T00:00:01.000Z' + } + } + server.get('/foo').reply(200, packument) + return mockTar({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.2.3' + }) + }).then(tarball => { + server.get('/foo/-/foo-1.2.3.tgz').reply(200, tarball) + server.get('/foo/-/foo-1.2.4.tgz').reply(500) + return common.npm([ + 'install', 'foo', + '--before', '2018', + '--json', + '--cache', path.join(testDir, 'npmcache'), + '--registry', server.registry + ], { cwd: testDir }) + }).then(([code, stdout, stderr]) => { + t.comment(stdout) + t.comment(stderr) + t.like(JSON.parse(stdout), { + added: [{ + action: 'add', + name: 'foo', + version: '1.2.3' + }] + }, 'installed the 2017 version of the package') + }) +}) + +test('cleanup', t => { + server.close() + return rimraf(testDir) +}) diff --git a/test/tap/install-bin-null.js b/test/tap/install-bin-null.js index f45528a75acf4..2ad75eb59940a 100644 --- a/test/tap/install-bin-null.js +++ b/test/tap/install-bin-null.js @@ -2,13 +2,11 @@ var fs = require('graceful-fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.join(__dirname, 'install-bin-null') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } @@ -43,49 +41,33 @@ var grandchildPkg = { var pkgs = [childPkgA, childPkgB, grandchildPkg] -test('the grandchild has bin:null', function (t) { - setup() - common.npm(['install'], EXEC_OPTS, function (err, code, stdout, stderr) { - t.ifErr(err, 'npm link finished without error') - t.equal(code, 0, 'exited ok') - t.ok(stdout, 'output indicating success') - t.notOk(stderr, 'no output stderr') - t.end() - }) -}) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} - -function setup () { - cleanup() +test('setup', t => { mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(parentPkg, null, 2) ) pkgs.forEach(function (json) { - process.chdir(mkPkg(json)) + var pkgPath = path.resolve(pkg, json.name) + mkdirp.sync(pkgPath) + fs.writeFileSync( + path.join(pkgPath, 'package.json'), + JSON.stringify(json, null, 2) + ) }) fs.writeFileSync( path.join(pkg, childPkgA.name, 'index.js'), '' ) -} + t.end() +}) -function mkPkg (json) { - var pkgPath = path.resolve(pkg, json.name) - mkdirp.sync(pkgPath) - fs.writeFileSync( - path.join(pkgPath, 'package.json'), - JSON.stringify(json, null, 2) - ) - return pkgPath -} +test('the grandchild has bin:null', function (t) { + common.npm(['install'], EXEC_OPTS, function (err, code, stdout, stderr) { + t.ifErr(err, 'npm link finished without error') + t.equal(code, 0, 'exited ok') + t.ok(stdout, 'output indicating success') + t.notOk(stderr, 'no output stderr') + t.end() + }) +}) diff --git a/test/tap/install-cli-only-development.js b/test/tap/install-cli-only-development.js index ff9d05f547f26..6f03931d80e9c 100644 --- a/test/tap/install-cli-only-development.js +++ b/test/tap/install-cli-only-development.js @@ -3,13 +3,12 @@ var path = require('path') var existsSync = fs.existsSync || path.existsSync var mkdirp = require('mkdirp') -var osenv = require('osenv') var rimraf = require('rimraf') -var test = require('tap').test +const t = require('tap') var common = require('../common-tap.js') -var pkg = path.join(__dirname, 'install-cli-development') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } @@ -37,13 +36,29 @@ var devDependency = { version: '0.0.0' } -test('setup', function (t) { - setup() - t.pass('setup ran') +t.test('setup', t => { + mkdirp.sync(path.join(pkg, 'dependency')) + fs.writeFileSync( + path.join(pkg, 'dependency', 'package.json'), + JSON.stringify(dependency, null, 2) + ) + + mkdirp.sync(path.join(pkg, 'dev-dependency')) + fs.writeFileSync( + path.join(pkg, 'dev-dependency', 'package.json'), + JSON.stringify(devDependency, null, 2) + ) + + mkdirp.sync(path.join(pkg, 'node_modules')) + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(json, null, 2) + ) + t.end() }) -test('\'npm install --only=development\' should only install devDependencies', function (t) { +t.test('\'npm install --only=development\' should only install devDependencies', function (t) { common.npm(['install', '--only=development'], EXEC_OPTS, function (err, code) { t.ifError(err, 'install development successful') t.equal(code, 0, 'npm install did not raise error code') @@ -57,14 +72,11 @@ test('\'npm install --only=development\' should only install devDependencies', f existsSync(path.resolve(pkg, 'node_modules/dependency/package.json')), 'dependency was NOT installed' ) - t.end() + rimraf(path.join(pkg, 'node_modules'), t.end) }) }) -test('\'npm install --only=development\' should only install devDependencies regardless of npm.config.get(\'production\')', function (t) { - cleanup() - setup() - +t.test('\'npm install --only=development\' should only install devDependencies regardless of npm.config.get(\'production\')', function (t) { common.npm(['install', '--only=development', '--production'], EXEC_OPTS, function (err, code) { t.ifError(err, 'install development successful') t.equal(code, 0, 'npm install did not raise error code') @@ -78,38 +90,6 @@ test('\'npm install --only=development\' should only install devDependencies reg existsSync(path.resolve(pkg, 'node_modules/dependency/package.json')), 'dependency was NOT installed' ) - t.end() + rimraf(path.join(pkg, 'node_modules'), t.end) }) }) - -test('cleanup', function (t) { - cleanup() - t.pass('cleaned up') - t.end() -}) - -function setup () { - mkdirp.sync(path.join(pkg, 'dependency')) - fs.writeFileSync( - path.join(pkg, 'dependency', 'package.json'), - JSON.stringify(dependency, null, 2) - ) - - mkdirp.sync(path.join(pkg, 'dev-dependency')) - fs.writeFileSync( - path.join(pkg, 'dev-dependency', 'package.json'), - JSON.stringify(devDependency, null, 2) - ) - - mkdirp.sync(path.join(pkg, 'node_modules')) - fs.writeFileSync( - path.join(pkg, 'package.json'), - JSON.stringify(json, null, 2) - ) - process.chdir(pkg) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/install-cli-only-production.js b/test/tap/install-cli-only-production.js index 40328d773539f..63863ff934d93 100644 --- a/test/tap/install-cli-only-production.js +++ b/test/tap/install-cli-only-production.js @@ -3,13 +3,11 @@ var path = require('path') var existsSync = fs.existsSync || path.existsSync var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.join(__dirname, 'install-cli-only-production') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } @@ -59,7 +57,6 @@ test('setup', function (t) { JSON.stringify(json, null, 2) ) - process.chdir(pkg) t.end() }) @@ -82,9 +79,3 @@ test('\'npm install --only=production\' should only install dependencies', funct t.end() }) }) - -test('cleanup', function (t) { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) - t.end() -}) diff --git a/test/tap/install-cli-only-shrinkwrap.js b/test/tap/install-cli-only-shrinkwrap.js index aa731909d3539..004593d782c22 100644 --- a/test/tap/install-cli-only-shrinkwrap.js +++ b/test/tap/install-cli-only-shrinkwrap.js @@ -3,13 +3,12 @@ var path = require('path') var existsSync = fs.existsSync || path.existsSync var mkdirp = require('mkdirp') -var osenv = require('osenv') var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.join(__dirname, path.basename(__filename, '.js')) +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } @@ -55,9 +54,27 @@ var devDependency = { } test('setup', function (t) { - cleanup() - setup() - t.pass('setup ran') + mkdirp.sync(path.join(pkg, 'dependency')) + fs.writeFileSync( + path.join(pkg, 'dependency', 'package.json'), + JSON.stringify(dependency, null, 2) + ) + + mkdirp.sync(path.join(pkg, 'dev-dependency')) + fs.writeFileSync( + path.join(pkg, 'dev-dependency', 'package.json'), + JSON.stringify(devDependency, null, 2) + ) + + mkdirp.sync(path.join(pkg, 'node_modules')) + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(json, null, 2) + ) + fs.writeFileSync( + path.join(pkg, 'npm-shrinkwrap.json'), + JSON.stringify(shrinkwrap, null, 2) + ) t.end() }) @@ -77,13 +94,11 @@ test('\'npm install --only=development\' should only install devDependencies', f existsSync(path.resolve(pkg, 'node_modules/dependency/package.json')), 'dependency was NOT installed' ) - t.end() + rimraf(path.join(pkg, 'node_modules'), t.end) }) }) test('\'npm install --only=production\' should only install dependencies', function (t) { - cleanup() - setup() common.npm(['install', '--only=production'], EXEC_OPTS, function (err, code, stdout, stderr) { if (err) throw err t.comment(stdout.trim()) @@ -99,42 +114,6 @@ test('\'npm install --only=production\' should only install dependencies', funct existsSync(path.resolve(pkg, 'node_modules/dev-dependency/package.json')), 'devDependency was NOT installed' ) - t.end() + rimraf(path.join(pkg, 'node_modules'), t.end) }) }) - -test('cleanup', function (t) { - cleanup() - t.pass('cleaned up') - t.end() -}) - -function setup () { - mkdirp.sync(path.join(pkg, 'dependency')) - fs.writeFileSync( - path.join(pkg, 'dependency', 'package.json'), - JSON.stringify(dependency, null, 2) - ) - - mkdirp.sync(path.join(pkg, 'dev-dependency')) - fs.writeFileSync( - path.join(pkg, 'dev-dependency', 'package.json'), - JSON.stringify(devDependency, null, 2) - ) - - mkdirp.sync(path.join(pkg, 'node_modules')) - fs.writeFileSync( - path.join(pkg, 'package.json'), - JSON.stringify(json, null, 2) - ) - fs.writeFileSync( - path.join(pkg, 'npm-shrinkwrap.json'), - JSON.stringify(shrinkwrap, null, 2) - ) - process.chdir(pkg) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/install-cli-production-nosave.js b/test/tap/install-cli-production-nosave.js index cf69ad3fae05d..23f5494996473 100644 --- a/test/tap/install-cli-production-nosave.js +++ b/test/tap/install-cli-production-nosave.js @@ -3,14 +3,11 @@ var path = require('path') var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') -var test = require('tap').test +var t = require('tap') var common = require('../common-tap.js') -var server -var pkg = path.join(__dirname, 'install-cli-production-nosave') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } @@ -21,16 +18,20 @@ var PACKAGE_JSON1 = { } } -test('setup', function (t) { - setup() +t.test('setup', function (t) { + mkdirp.sync(path.resolve(pkg, 'node_modules')) + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(PACKAGE_JSON1, null, 2) + ) mr({ port: common.port }, function (er, s) { t.ifError(er, 'started mock registry') - server = s + t.parent.teardown(() => s.close()) t.end() }) }) -test('install --production <module> without --save exits successfully', function (t) { +t.test('install --production <module> without --save exits successfully', function (t) { common.npm( [ '--registry', common.registry, @@ -45,25 +46,3 @@ test('install --production <module> without --save exits successfully', function } ) }) - -test('cleanup', function (t) { - server.close() - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} - -function setup () { - cleanup() - mkdirp.sync(path.resolve(pkg, 'node_modules')) - fs.writeFileSync( - path.join(pkg, 'package.json'), - JSON.stringify(PACKAGE_JSON1, null, 2) - ) - - process.chdir(pkg) -} diff --git a/test/tap/install-cli-production.js b/test/tap/install-cli-production.js index a1fdac6e880d5..d083b4295738b 100644 --- a/test/tap/install-cli-production.js +++ b/test/tap/install-cli-production.js @@ -3,13 +3,11 @@ var path = require('path') var existsSync = fs.existsSync || path.existsSync var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.join(__dirname, 'install-cli-production') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } @@ -59,7 +57,6 @@ test('setup', function (t) { JSON.stringify(json, null, 2) ) - process.chdir(pkg) t.end() }) @@ -80,9 +77,3 @@ test('\'npm install --production\' should only install dependencies', function ( t.end() }) }) - -test('cleanup', function (t) { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) - t.end() -}) diff --git a/test/tap/install-cli-unicode.js b/test/tap/install-cli-unicode.js index 01a5f4a471dc0..930066db5fe54 100644 --- a/test/tap/install-cli-unicode.js +++ b/test/tap/install-cli-unicode.js @@ -1,16 +1,12 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var server -var pkg = path.resolve(__dirname, 'install-cli-unicode') +var pkg = common.pkg function hasOnlyAscii (s) { return /^[\000-\177]*$/.test(s) @@ -28,15 +24,13 @@ var json = { } test('setup', function (t) { - rimraf.sync(pkg) - mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) ) mr({ port: common.port }, function (er, s) { - server = s + t.parent.teardown(() => s.close()) t.end() }) }) @@ -61,11 +55,3 @@ test('does not use unicode with --unicode false', function (t) { } ) }) - -test('cleanup', function (t) { - server.close() - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) - - t.end() -}) diff --git a/test/tap/install-contributors-count.js b/test/tap/install-contributors-count.js index 52fba0fd24999..ead925081330b 100644 --- a/test/tap/install-contributors-count.js +++ b/test/tap/install-contributors-count.js @@ -1,12 +1,11 @@ 'use strict' -var path = require('path') var test = require('tap').test var Tacks = require('tacks') var Dir = Tacks.Dir var File = Tacks.File var common = require('../common-tap.js') -var testdir = path.resolve(__dirname, path.basename(__filename, '.js')) +var testdir = common.pkg var fixture = new Tacks(Dir({ node_modules: Dir({ a: Dir({ diff --git a/test/tap/install-dep-classification.js b/test/tap/install-dep-classification.js new file mode 100644 index 0000000000000..257fc99fc123f --- /dev/null +++ b/test/tap/install-dep-classification.js @@ -0,0 +1,188 @@ +'use strict' +const path = require('path') +const test = require('tap').test +const Tacks = require('tacks') +const File = Tacks.File +const Dir = Tacks.Dir +const common = require('../common-tap.js') +const fs = require('fs') + +const basedir = common.pkg +const testdir = path.join(basedir, 'testdir') +const cachedir = common.cache +const globaldir = path.join(basedir, 'global') +const tmpdir = path.join(basedir, 'tmp') +const optionaldir = path.join(testdir, 'optional') +const devdir = path.join(testdir, 'dev') + +const env = common.newEnv().extend({ + npm_config_cache: cachedir, + npm_config_tmp: tmpdir, + npm_config_prefix: globaldir, + npm_config_registry: common.registry, + npm_config_loglevel: 'error' +}) + +/** + * NOTE: Tarball Fixtures + * They contain package.json files with dependencies like the following: + * a-1.0.0.tgz: package/package.json + * { + * "name":"a", + * "version":"1.0.0", + * "dependencies":{ + * "b":"./b-1.0.0.tgz" + * } + * } + * example-1.0.0.tgz: package/package.json + * { + * "name":"example", + * "version":"1.0.0", + * "dependencies":{ + * "a":"./a-1.0.0.tgz", + * "b":"./b-1.0.0.tgz" + * } + * } + */ +const fixture = new Tacks(Dir({ + cache: Dir(), + global: Dir(), + tmp: Dir(), + testdir: Dir({ + 'a-1.0.0.tgz': File(Buffer.from( + '1f8b0800000000000003edcfc10a83300c06e09df71492f35653567bf06d' + + 'a2067163b558b7c3c4775f54f0e4654c18837e973f4da0249eca1bd59cfa' + + '25d535b4eeb03344b4c6245bfd8946995d328b5a5b3bd55264464beebdc8' + + '9647e8a99355befd67b92559f34f0ce0e8ce9003c1099edc85a675f2d20a' + + '154aa762cfae6257361c201fa090994a8bf33c577dfd82713cfefa86288a' + + 'a2e8736f68a0ff4400080000', + 'hex' + )), + 'b-1.0.0.tgz': File(Buffer.from( + '1f8b08000000000000032b484cce4e4c4fd52f80d07a59c5f9790c540606' + + '06066626260ad8c4c1c0d85c81c1d8d4ccc0d0d0cccc00a80ec830353103' + + 'd2d4760836505a5c925804740aa5e640bca200a78708a856ca4bcc4d55b2' + + '524a52d2512a4b2d2acecccf03f20cf50cf40c946ab906da79a360148c82' + + '51300a680400106986b400080000', + 'hex' + )), + dev: Dir({ + 'package.json': File({ + name: 'dev', + version: '1.0.0', + devDependencies: { + example: '../example-1.0.0.tgz' + } + }) + }), + 'example-1.0.0.tgz': File(Buffer.from( + '1f8b0800000000000003ed8fc10a83300c863def2924e7ada6587bf06daa' + + '06719bb55837c6c4775fa6307670a70963d0ef92f02584fcce94275353e2' + + '962a8ebeb3d1c620a2562a5ef34f64aae328cd344aa935f21e379962875b' + + '3fb2c6c50fa6e757bebdb364895ff54f18c19a962007ba99d69d09f670a5' + + 'de379d6527050a645391235b912d1bf2908f607826127398e762a8efbc53' + + 'ccae7873d3b4fb75ba402010087ce2014747c9d500080000', + 'hex' + )), + optional: Dir({ + 'package.json': File({ + name: 'optional', + version: '1.0.0', + optionalDependencies: { + example: '../example-1.0.0.tgz' + } + }) + }) + }) +})) + +function setup () { + cleanup() + fixture.create(basedir) +} + +function cleanup () { + fixture.remove(basedir) +} + +test('setup', function (t) { + setup() + return common.fakeRegistry.listen() +}) + +test('optional dependency identification', function (t) { + return common.npm(['install', '--no-optional'], {cwd: optionaldir, env}).then(([code, stdout, stderr]) => { + t.is(code, 0, 'no error code') + t.is(stderr, '', 'no error output') + t.notOk(fs.existsSync(path.join(optionaldir, 'node_modules')), 'did not install anything') + t.similar(JSON.parse(fs.readFileSync(path.join(optionaldir, 'package-lock.json'), 'utf8')), { + dependencies: { + a: { + version: 'file:../a-1.0.0.tgz', + optional: true + }, + b: { + version: 'file:../b-1.0.0.tgz', + optional: true + }, + example: { + version: '1.0.0', + optional: true + } + } + }, 'locks dependencies as optional') + }) +}) + +test('development dependency identification', function (t) { + return common.npm(['install', '--only=prod'], {cwd: devdir, env}).then(([code, stdout, stderr]) => { + t.is(code, 0, 'no error code') + t.is(stderr, '', 'no error output') + t.notOk(fs.existsSync(path.join(devdir, 'node_modules')), 'did not install anything') + t.similar(JSON.parse(fs.readFileSync(path.join(devdir, 'package-lock.json'), 'utf8')), { + dependencies: { + a: { + version: 'file:../a-1.0.0.tgz', + dev: true + }, + b: { + version: 'file:../b-1.0.0.tgz', + dev: true + }, + example: { + version: '1.0.0', + dev: true + } + } + }, 'locks dependencies as dev') + }) +}) + +test('default dependency identification', function (t) { + return common.npm(['install'], {cwd: optionaldir, env}).then(([code, stdout, stderr]) => { + t.is(code, 0, 'no error code') + t.is(stderr, '', 'no error output') + t.similar(JSON.parse(fs.readFileSync(path.join(optionaldir, 'package-lock.json'), 'utf8')), { + dependencies: { + a: { + version: 'file:../a-1.0.0.tgz', + optional: true + }, + b: { + version: 'file:../b-1.0.0.tgz', + optional: true + }, + example: { + version: '1.0.0', + optional: true + } + } + }, 'locks dependencies as optional') + }) +}) + +test('cleanup', function (t) { + common.fakeRegistry.close() + cleanup() + t.done() +}) diff --git a/test/tap/install-duplicate-deps-warning.js b/test/tap/install-duplicate-deps-warning.js index 017a5cdfe117c..869476ccd17ef 100644 --- a/test/tap/install-duplicate-deps-warning.js +++ b/test/tap/install-duplicate-deps-warning.js @@ -1,16 +1,13 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') var npm = require('../../') -var pkg = path.resolve(__dirname, path.basename(__filename, '.js')) +var pkg = common.pkg var json = { dependencies: { @@ -21,24 +18,19 @@ var json = { } } -test('setup', function (t) { +test('npm install with duplicate dependencies, different versions', function (t) { + t.plan(1) t.comment('test for https://github.com/npm/npm/issues/6725') - cleanup() - mkdirp.sync(pkg) + fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) ) process.chdir(pkg) - console.dir(pkg) - t.end() -}) -test('npm install with duplicate dependencies, different versions', function (t) { - t.plan(1) mr({ port: common.port }, function (er, s) { var opts = { - cache: path.resolve(pkg, 'cache'), + cache: common.cache, registry: common.registry } @@ -57,13 +49,3 @@ test('npm install with duplicate dependencies, different versions', function (t) }) }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/install-from-local-multipath.js b/test/tap/install-from-local-multipath.js new file mode 100644 index 0000000000000..e35794dca986c --- /dev/null +++ b/test/tap/install-from-local-multipath.js @@ -0,0 +1,173 @@ +var fs = require('graceful-fs') +var path = require('path') + +var mkdirp = require('mkdirp') +var test = require('tap').test + +var common = require('../common-tap') + +var root = common.pkg +// Allow running this test on older commits (useful for bisecting) +if (!root) { + var main = require.main.filename + root = path.resolve(path.dirname(main), path.basename(main, '.js')) +} +var pkg = path.join(root, 'parent') + +var EXEC_OPTS = { cwd: pkg } + +var parent = { + name: 'parent', + version: '0.0.0', + dependencies: { + 'child-1-1': 'file:../children/child-1-1', + 'child-1-2': 'file:../children/child-1-2', + 'child-2': 'file:../children/child-2' + } +} + +var parentLock = { + 'name': 'parent', + 'version': '1.0.0', + 'lockfileVersion': 1, + 'requires': true, + 'dependencies': { + 'child-1-1': { + 'version': 'file:../children/child-1-1', + 'requires': { + 'child-2': 'file:../children/child-2' + } + }, + 'child-1-2': { + 'version': 'file:../children/child-1-2', + 'requires': { + 'child-1-1': 'file:../children/child-1-1', + 'child-2': 'file:../children/child-2' + } + }, + 'child-2': { + 'version': 'file:../children/child-2' + } + } +} + +var child11 = { + name: 'parent', + version: '0.0.0', + 'dependencies': { + 'child-2': 'file:../child-2' + } +} + +var child11Lock = { + 'name': 'child-1-1', + 'version': '1.0.0', + 'lockfileVersion': 1, + 'requires': true, + 'dependencies': { + 'child-2': { + 'version': 'file:../child-2' + } + } +} + +var child12 = { + 'name': 'child-1-2', + 'version': '1.0.0', + 'dependencies': { + 'child-1-1': 'file:../child-1-1', + 'child-2': 'file:../child-2' + } +} + +var child12Lock = { + 'name': 'child-1-2', + 'version': '1.0.0', + 'lockfileVersion': 1, + 'requires': true, + 'dependencies': { + 'child-1-1': { + 'version': 'file:../child-1-1', + 'requires': { + 'child-2': 'file:../child-2' + } + }, + 'child-2': { + 'version': 'file:../child-2' + } + } +} + +var child2 = { + 'name': 'child-2', + 'version': '1.0.0', + 'dependencies': {} +} + +var child2Lock = { + 'name': 'child-2', + 'version': '1.0.0', + 'lockfileVersion': 1, + 'requires': true, + 'dependencies': {} +} + +test('setup', function (t) { + mkdirp.sync(pkg) + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(parent, null, 2) + ) + + fs.writeFileSync( + path.join(pkg, 'package-lock.json'), + JSON.stringify(parentLock, null, 2) + ) + + mkdirp.sync(path.join(root, 'children', 'child-1-1')) + fs.writeFileSync( + path.join(root, 'children', 'child-1-1', 'package.json'), + JSON.stringify(child11, null, 2) + ) + fs.writeFileSync( + path.join(root, 'children', 'child-1-1', 'package-lock.json'), + JSON.stringify(child11Lock, null, 2) + ) + + mkdirp.sync(path.join(root, 'children', 'child-1-2')) + fs.writeFileSync( + path.join(root, 'children', 'child-1-2', 'package.json'), + JSON.stringify(child12, null, 2) + ) + fs.writeFileSync( + path.join(root, 'children', 'child-1-2', 'package-lock.json'), + JSON.stringify(child12Lock, null, 2) + ) + + mkdirp.sync(path.join(root, 'children', 'child-2')) + fs.writeFileSync( + path.join(root, 'children', 'child-2', 'package.json'), + JSON.stringify(child2, null, 2) + ) + fs.writeFileSync( + path.join(root, 'children', 'child-2', 'package-lock.json'), + JSON.stringify(child2Lock, null, 2) + ) + + process.chdir(pkg) + t.end() +}) + +test('\'npm install\' should install local packages', function (t) { + common.npm( + [ + 'install', '.' + ], + EXEC_OPTS, + function (err, code) { + t.ifError(err, 'error should not exist') + t.notOk(code, 'npm install exited with code 0') + t.end() + } + ) +}) diff --git a/test/tap/install-from-local.js b/test/tap/install-from-local.js index 3cac382a70d38..1ab94243ff3ca 100644 --- a/test/tap/install-from-local.js +++ b/test/tap/install-from-local.js @@ -2,13 +2,11 @@ var fs = require('graceful-fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap') -var root = path.join(__dirname, 'install-from-local') +var root = common.pkg var pkg = path.join(root, 'package-with-local-paths') var EXEC_OPTS = { cwd: pkg } @@ -37,7 +35,6 @@ var localDevDependency = { } test('setup', function (t) { - rimraf.sync(pkg) mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), @@ -90,9 +87,3 @@ test('\'npm install\' should install local packages', function (t) { } ) }) - -test('cleanup', function (t) { - process.chdir(osenv.tmpdir()) - rimraf.sync(root) - t.end() -}) diff --git a/test/tap/install-into-likenamed-folder.js b/test/tap/install-into-likenamed-folder.js index 187d5fbf11bca..c7bc86f3baaf4 100644 --- a/test/tap/install-into-likenamed-folder.js +++ b/test/tap/install-into-likenamed-folder.js @@ -6,7 +6,7 @@ var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var base = path.join(__dirname, path.basename(__filename, '.js')) +var base = common.pkg var moduleDir = path.join(base, 'example-src') var destDir = path.join(base, 'example') var moduleJson = { diff --git a/test/tap/install-link-metadeps-locally.js b/test/tap/install-link-metadeps-locally.js new file mode 100644 index 0000000000000..136fd46d10bbf --- /dev/null +++ b/test/tap/install-link-metadeps-locally.js @@ -0,0 +1,52 @@ +// XXX Remove in npm v7, when this is no longer how we do things +const t = require('tap') +const common = require('../common-tap.js') +const pkg = common.pkg +const mkdirp = require('mkdirp') +const { writeFileSync, statSync } = require('fs') +const { resolve } = require('path') +const mr = require('npm-registry-mock') +const rimraf = require('rimraf') + +t.test('setup', t => { + mkdirp.sync(resolve(pkg, 'node_modules')) + mkdirp.sync(resolve(pkg, 'foo')) + writeFileSync(resolve(pkg, 'foo', 'package.json'), JSON.stringify({ + name: 'foo', + version: '1.2.3', + dependencies: { + underscore: '*' + } + })) + + writeFileSync(resolve(pkg, 'package.json'), JSON.stringify({ + name: 'root', + version: '1.2.3', + dependencies: { + foo: 'file:foo' + } + })) + + mr({ port: common.port }, (er, s) => { + if (er) { + throw er + } + t.parent.teardown(() => s.close()) + t.end() + }) +}) + +t.test('initial install to create package-lock', + t => common.npm(['install', '--registry', common.registry], { cwd: pkg }) + .then(([code]) => t.equal(code, 0, 'command worked'))) + +t.test('remove node_modules', t => + rimraf(resolve(pkg, 'node_modules'), t.end)) + +t.test('install again from package-lock', t => + common.npm(['install', '--registry', common.registry], { cwd: pkg }) + .then(([code]) => { + t.equal(code, 0, 'command worked') + const underscore = resolve(pkg, 'node_modules', 'underscore') + t.equal(statSync(underscore).isDirectory(), true, 'underscore installed') + })) diff --git a/test/tap/install-link-metadeps-subfolders.js b/test/tap/install-link-metadeps-subfolders.js new file mode 100644 index 0000000000000..7544c8a4ebe84 --- /dev/null +++ b/test/tap/install-link-metadeps-subfolders.js @@ -0,0 +1,68 @@ +const t = require('tap') +const common = require('../common-tap.js') +const mkdirp = require('mkdirp') +const { writeFileSync, readFileSync } = require('fs') +const { resolve } = require('path') +const pkg = common.pkg +const app = resolve(pkg, 'app') +const lib = resolve(pkg, 'lib') +const moda = resolve(lib, 'module-a') +const modb = resolve(lib, 'module-b') + +const rimraf = require('rimraf') + +t.test('setup', t => { + mkdirp.sync(app) + mkdirp.sync(moda) + mkdirp.sync(modb) + + writeFileSync(resolve(app, 'package.json'), JSON.stringify({ + name: 'app', + version: '1.2.3', + dependencies: { + moda: 'file:../lib/module-a' + } + })) + + writeFileSync(resolve(moda, 'package.json'), JSON.stringify({ + name: 'moda', + version: '1.2.3', + dependencies: { + modb: 'file:../module-b' + } + })) + + writeFileSync(resolve(modb, 'package.json'), JSON.stringify({ + name: 'modb', + version: '1.2.3' + })) + + t.end() +}) + +t.test('initial install to create package-lock', + t => common.npm(['install'], { cwd: app }) + .then(([code]) => t.equal(code, 0, 'command worked'))) + +t.test('remove node_modules', t => + rimraf(resolve(pkg, 'node_modules'), t.end)) + +t.test('install again from package-lock', t => + common.npm(['install'], { cwd: app }) + .then(([code]) => { + t.equal(code, 0, 'command worked') + // verify that module-b is linked under module-a + const depPkg = resolve( + app, + 'node_modules', + 'moda', + 'node_modules', + 'modb', + 'package.json' + ) + const data = JSON.parse(readFileSync(depPkg, 'utf8')) + t.strictSame(data, { + name: 'modb', + version: '1.2.3' + }) + })) diff --git a/test/tap/install-link-scripts.js b/test/tap/install-link-scripts.js index acc88b4b2ccdc..52e50c6e9fb0a 100644 --- a/test/tap/install-link-scripts.js +++ b/test/tap/install-link-scripts.js @@ -2,13 +2,13 @@ var fs = require('graceful-fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') var rimraf = require('rimraf') -var test = require('tap').test +const t = require('tap') var common = require('../common-tap.js') +common.skipIfWindows('links are weird on windows') -var pkg = path.join(__dirname, 'install-link-scripts') +var pkg = common.pkg var tmp = path.join(pkg, 'tmp') var dep = path.join(pkg, 'dep') @@ -36,9 +36,29 @@ console.log('hey sup') process.env.npm_config_prefix = tmp -test('plain install', function (t) { - setup() +t.beforeEach(cb => { + rimraf(pkg, er => { + if (er) { + return cb(er) + } + mkdirp.sync(tmp) + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(json, null, 2) + ) + + mkdirp.sync(path.join(dep, 'bin')) + fs.writeFileSync( + path.join(dep, 'package.json'), + JSON.stringify(dependency, null, 2) + ) + fs.writeFileSync(path.join(dep, 'bin', 'foo'), foo) + fs.chmod(path.join(dep, 'bin', 'foo'), '0755') + cb() + }) +}) +t.test('plain install', function (t) { common.npm( [ 'install', dep, @@ -55,9 +75,7 @@ test('plain install', function (t) { ) }) -test('link', function (t) { - setup() - +t.test('link', function (t) { common.npm( [ 'link', @@ -74,9 +92,7 @@ test('link', function (t) { ) }) -test('install --link', function (t) { - setup() - +t.test('install --link', function (t) { common.npm( [ 'link', @@ -103,30 +119,3 @@ test('install --link', function (t) { } ) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function setup () { - cleanup() - mkdirp.sync(tmp) - fs.writeFileSync( - path.join(pkg, 'package.json'), - JSON.stringify(json, null, 2) - ) - - mkdirp.sync(path.join(dep, 'bin')) - fs.writeFileSync( - path.join(dep, 'package.json'), - JSON.stringify(dependency, null, 2) - ) - fs.writeFileSync(path.join(dep, 'bin', 'foo'), foo) - fs.chmod(path.join(dep, 'bin', 'foo'), '0755') -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/install-local-dep-cycle.js b/test/tap/install-local-dep-cycle.js index 1f76ad9598a2d..a66f04bce4715 100644 --- a/test/tap/install-local-dep-cycle.js +++ b/test/tap/install-local-dep-cycle.js @@ -6,7 +6,7 @@ var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var base = path.join(__dirname, path.basename(__filename, '.js')) +var base = common.pkg var baseJSON = { name: 'base', diff --git a/test/tap/install-local-from-local.js b/test/tap/install-local-from-local.js new file mode 100644 index 0000000000000..ec53c74a39206 --- /dev/null +++ b/test/tap/install-local-from-local.js @@ -0,0 +1,94 @@ +'use strict' +var path = require('path') +var fs = require('graceful-fs') +var test = require('tap').test +var common = require('../common-tap.js') +var Tacks = require('tacks') +var Dir = Tacks.Dir +var File = Tacks.File + +var testdir = path.join(__dirname, path.basename(__filename, '.js')) +var cwd = path.join(testdir, '3') + +/** + * NOTE: Tarball Fixtures + * They contain package.json files with dependencies like the following: + * 1-1.0.0.tgz: package/package.json + * { + * "name":"1", + * "version":"1.0.0" + * } + * 2-1.0.0.tgz: package/package.json + * { + * "name":"2", + * "version":"1.0.0", + * "dependencies":{ + * "1":"file:../1/1-1.0.0.tgz" + * } + * } + */ +var fixture = new Tacks(Dir({ + '1': Dir({ + '1-1.0.0.tgz': File(Buffer.from( + '1f8b08000000000000032b484cce4e4c4fd52f80d07a59c5f9790c540606' + + '06066626260ad8c4c1c0d85c81c1d8d4ccc0d0d0cccc00a80ec830353103' + + 'd2d4760836505a5c925804740aa5e640bca200a78708a856ca4bcc4d55b2' + + '523254d2512a4b2d2acecccf03f1f40cf40c946ab906da79a360148c8251' + + '300a6804007849dfdf00080000', + 'hex' + )) + }), + '2': Dir({ + '2-1.0.0.tgz': File(Buffer.from( + '1f8b0800000000000003ed8f3d0e83300c8599394594b90d36840cdc2602' + + '17d19f80087468c5ddeb14a9135b91aa4af996e73c3f47f660eb8b6d291b' + + '565567dfbb646700c0682db6fc00ea5c24456900d118e01c17a52e58f75e' + + '648bd94f76e455befd67bd457cf44f78a64248676f242b21737908cf3b8d' + + 'beeb5d70508182d56d6820d790ab3bf2dc0a83ec62489dba2b554a6598e1' + + 'f13da1a6f62139b0a44bfaeb0b23914824b2c50b8b5b623100080000', + 'hex' + )) + }), + '3': Dir({ + 'package.json': File({ + name: '3', + version: '1.0.0', + dependencies: { + '2': '../2/2-1.0.0.tgz' + } + }) + }) +})) + +function setup () { + fixture.create(testdir) +} + +function cleanup () { + fixture.remove(testdir) +} + +test('setup', function (t) { + cleanup() + setup() + t.end() +}) + +test('installing local package with local dependency', function (t) { + common.npm( + ['install'], + {cwd: cwd}, + function (er, code, stdout, stderr) { + t.is(code, 0, 'no error code') + t.is(stderr, '', 'no error output') + t.ok(fs.existsSync(path.join(cwd, 'node_modules', '2')), 'installed direct dep') + t.ok(fs.existsSync(path.join(cwd, 'node_modules', '1')), 'installed indirect dep') + t.end() + } + ) +}) + +test('cleanup', function (t) { + cleanup() + t.end() +}) diff --git a/test/tap/install-man.js b/test/tap/install-man.js index d24819ca7887d..8c4b89015151f 100644 --- a/test/tap/install-man.js +++ b/test/tap/install-man.js @@ -1,15 +1,13 @@ var fs = require('fs') var resolve = require('path').resolve -var osenv = require('osenv') var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = resolve(__dirname, 'install-man') -var target = resolve(__dirname, 'install-man-target') +var pkg = resolve(common.pkg, 'package') +var target = resolve(common.pkg, 'target') common.pendIfWindows('man pages do not get installed on Windows') @@ -24,8 +22,14 @@ var json = { } test('setup', function (t) { - setup() - t.pass('setup ran') + mkdirp.sync(pkg) + // make sure it installs locally + mkdirp.sync(resolve(target, 'node_modules')) + fs.writeFileSync( + resolve(pkg, 'package.json'), + JSON.stringify(json, null, 2) + '\n' + ) + fs.writeFileSync(resolve(pkg, 'install-man.1'), 'THIS IS A MANPAGE\n') t.end() }) @@ -51,27 +55,3 @@ test('install man page', function (t) { } ) }) - -test('clean', function (t) { - cleanup() - t.pass('cleaned up') - t.end() -}) - -function setup () { - cleanup() - mkdirp.sync(pkg) - // make sure it installs locally - mkdirp.sync(resolve(target, 'node_modules')) - fs.writeFileSync( - resolve(pkg, 'package.json'), - JSON.stringify(json, null, 2) + '\n' - ) - fs.writeFileSync(resolve(pkg, 'install-man.1'), 'THIS IS A MANPAGE\n') -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) - rimraf.sync(target) -} diff --git a/test/tap/install-mention-funding.js b/test/tap/install-mention-funding.js new file mode 100644 index 0000000000000..3e9b81f24070b --- /dev/null +++ b/test/tap/install-mention-funding.js @@ -0,0 +1,127 @@ +'use strict' +const path = require('path') +const test = require('tap').test +const Tacks = require('tacks') +const Dir = Tacks.Dir +const File = Tacks.File +const common = require('../common-tap.js') + +const base = common.pkg +const singlePackage = path.join(base, 'single-funding-package') +const multiplePackages = path.join(base, 'top-level-funding') + +function getFixturePackage ({ name, version, dependencies, funding }) { + return Dir({ + 'package.json': File({ + name, + version: version || '1.0.0', + funding: funding || { + type: 'individual', + url: 'http://example.com/donate' + }, + dependencies: dependencies || {} + }) + }) +} + +const fixture = new Tacks(Dir({ + 'package.json': File({}), + 'single-funding-package': getFixturePackage({ + name: 'single-funding-package' + }), + 'top-level-funding': getFixturePackage({ + name: 'top-level-funding', + dependencies: { + 'dep-foo': 'file:../dep-foo', + 'dep-bar': 'file:../dep-bar' + } + }), + 'dep-foo': getFixturePackage({ + name: 'dep-foo', + funding: { + type: 'corporate', + url: 'https://corp.example.com/sponsor' + }, + dependencies: { + 'sub-dep-bar': 'file:../sub-dep-bar' + } + }), + 'dep-bar': getFixturePackage({ + name: 'dep-bar', + version: '2.1.0', + dependencies: { + 'sub-dep-bar': 'file:../sub-dep-bar' + } + }), + 'sub-dep-bar': getFixturePackage({ + name: 'sub-dep-bar', + funding: { + type: 'foo', + url: 'http://example.com/foo' + } + }) +})) + +test('mention npm fund upon installing single dependency', function (t) { + setup(t) + common.npm(['install', '--no-save', singlePackage], {cwd: base}, function (err, code, stdout, stderr) { + if (err) throw err + t.is(code, 0, 'installed successfully') + t.is(stderr, '', 'no warnings') + t.includes(stdout, '1 package is looking for funding', 'should print amount of packages needing funding') + t.includes(stdout, ' run `npm fund` for details', 'should print npm fund mention') + t.end() + }) +}) + +test('mention npm fund upon installing multiple dependencies', function (t) { + setup(t) + common.npm(['install', '--no-save', multiplePackages], {cwd: base}, function (err, code, stdout, stderr) { + if (err) throw err + t.is(code, 0, 'installed successfully') + t.is(stderr, '', 'no warnings') + t.includes(stdout, '4 packages are looking for funding', 'should print amount of packages needing funding') + t.includes(stdout, ' run `npm fund` for details', 'should print npm fund mention') + t.end() + }) +}) + +test('skips mention npm fund using --no-fund option', function (t) { + setup(t) + common.npm(['install', '--no-save', '--no-fund', multiplePackages], {cwd: base}, function (err, code, stdout, stderr) { + if (err) throw err + t.is(code, 0, 'installed successfully') + t.is(stderr, '', 'no warnings') + t.doesNotHave(stdout, '4 packages are looking for funding', 'should print amount of packages needing funding') + t.doesNotHave(stdout, ' run `npm fund` for details', 'should print npm fund mention') + t.end() + }) +}) + +test('mention packages looking for funding using --json', function (t) { + setup(t) + common.npm(['install', '--no-save', '--json', multiplePackages], {cwd: base}, function (err, code, stdout, stderr) { + if (err) throw err + t.is(code, 0, 'installed successfully') + t.is(stderr, '', 'no warnings') + const res = JSON.parse(stdout) + t.match(res.funding, '4 packages are looking for funding', 'should print amount of packages needing funding') + t.end() + }) +}) + +test('cleanup', function (t) { + cleanup() + t.end() +}) + +function setup (t) { + fixture.create(base) + t.teardown(() => { + cleanup() + }) +} + +function cleanup () { + fixture.remove(base) +} diff --git a/test/tap/install-noargs-dev.js b/test/tap/install-noargs-dev.js index ec9c7e3687eef..53422b9b5bc23 100644 --- a/test/tap/install-noargs-dev.js +++ b/test/tap/install-noargs-dev.js @@ -3,14 +3,11 @@ var path = require('path') var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var server -var pkg = path.join(__dirname, 'install-noargs-dev') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } @@ -31,10 +28,14 @@ var PACKAGE_JSON2 = { } test('setup', function (t) { - setup() + mkdirp.sync(path.resolve(pkg, 'node_modules')) + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(PACKAGE_JSON1, null, 2) + ) mr({ port: common.port }, function (er, s) { t.ifError(er, 'started mock registry') - server = s + t.parent.teardown(() => s.close()) t.end() }) }) @@ -87,25 +88,3 @@ test('install noargs installs updated devDependencies', function (t) { } ) }) - -test('cleanup', function (t) { - server.close() - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} - -function setup () { - cleanup() - mkdirp.sync(path.resolve(pkg, 'node_modules')) - fs.writeFileSync( - path.join(pkg, 'package.json'), - JSON.stringify(PACKAGE_JSON1, null, 2) - ) - - process.chdir(pkg) -} diff --git a/test/tap/install-package-json-order.js b/test/tap/install-package-json-order.js index 93977c6c3dc44..45ce882620c7b 100644 --- a/test/tap/install-package-json-order.js +++ b/test/tap/install-package-json-order.js @@ -1,27 +1,33 @@ var test = require('tap').test var path = require('path') -var rimraf = require('rimraf') var mkdirp = require('mkdirp') var spawn = require('child_process').spawn var npm = require.resolve('../../bin/npm-cli.js') var node = process.execPath -var pkg = path.resolve(__dirname, 'install-package-json-order') +const common = require('../common-tap.js') +var pkg = common.pkg var workdir = path.join(pkg, 'workdir') var tmp = path.join(pkg, 'tmp') -var cache = path.join(pkg, 'cache') var fs = require('fs') -var osenv = require('osenv') test('package.json sorting after install', function (t) { var packageJson = path.resolve(pkg, 'package.json') var installedPackage = path.resolve(workdir, 'node_modules/install-package-json-order/package.json') - cleanup() - mkdirp.sync(cache) mkdirp.sync(tmp) mkdirp.sync(workdir) - setup() + + fs.writeFileSync(path.resolve(pkg, 'package.json'), JSON.stringify({ + 'name': 'install-package-json-order', + 'version': '0.0.0', + 'array': [ 'one', 'two', 'three' ] + }, null, 2), 'utf8') + + fs.writeFileSync(path.resolve(workdir, 'package.json'), JSON.stringify({ + 'name': 'install-package-json-order-work', + 'version': '0.0.0' + }, null, 2), 'utf8') var before = JSON.parse(fs.readFileSync(packageJson).toString()) var child = spawn(node, [npm, 'install', pkg], { cwd: workdir }) @@ -34,29 +40,3 @@ test('package.json sorting after install', function (t) { t.end() }) }) - -test('cleanup', function (t) { - cleanup() - t.pass('cleaned up') - t.end() -}) - -function setup () { - mkdirp.sync(pkg) - - fs.writeFileSync(path.resolve(pkg, 'package.json'), JSON.stringify({ - 'name': 'install-package-json-order', - 'version': '0.0.0', - 'array': [ 'one', 'two', 'three' ] - }, null, 2), 'utf8') - fs.writeFileSync(path.resolve(workdir, 'package.json'), JSON.stringify({ - 'name': 'install-package-json-order-work', - 'version': '0.0.0' - }, null, 2), 'utf8') -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(cache) - rimraf.sync(pkg) -} diff --git a/test/tap/install-package-lock-only.js b/test/tap/install-package-lock-only.js index 9d8aa8dbec313..2c5191a02e7d7 100644 --- a/test/tap/install-package-lock-only.js +++ b/test/tap/install-package-lock-only.js @@ -8,9 +8,9 @@ var File = Tacks.File var Dir = Tacks.Dir var common = require('../common-tap.js') -var basedir = path.join(__dirname, path.basename(__filename, '.js')) +var basedir = common.pkg var testdir = path.join(basedir, 'testdir') -var cachedir = path.join(basedir, 'cache') +var cachedir = common.cache var globaldir = path.join(basedir, 'global') var tmpdir = path.join(basedir, 'tmp') @@ -28,6 +28,18 @@ var conf = { }) } +const confPkgLockFalse = { + cwd: testdir, + env: Object.assign({}, process.env, { + npm_config_cache: cachedir, + npm_config_tmp: tmpdir, + npm_config_prefix: globaldir, + npm_config_registry: common.registry, + npm_config_loglevel: 'warn', + npm_config_package_lock: false + }) +} + var server var fixture = new Tacks(Dir({ cache: Dir(), @@ -54,7 +66,6 @@ function cleanup () { } test('setup', function (t) { - setup() mr({port: common.port, throwOnUnmatched: true}, function (err, s) { if (err) throw err server = s @@ -63,6 +74,7 @@ test('setup', function (t) { }) test('package-lock-only', function (t) { + setup() return common.npm(['install', '--package-lock-only'], conf).spread((code, stdout, stderr) => { t.is(code, 0, 'command ran ok') t.comment(stdout.trim()) @@ -78,6 +90,32 @@ test('package-lock-only', function (t) { }) }) +test('--package-lock-only with --package-lock negates `package_lock: false`', function (t) { + setup() + return common.npm(['install', '--package-lock', '--package-lock-only'], confPkgLockFalse).spread((code, stdout, stderr) => { + t.is(code, 0, 'ok') + t.comment(stdout.trim()) + t.comment(stderr.trim()) + + // Verify that package-lock.json exists. + t.ok(fs.existsSync(pkgLockPath), 'ensure that package-lock.json was created') + t.end() + }) +}) + +test('package-lock-only creates package_lock.json when config has `package_lock: false`', function (t) { + setup() + return common.npm(['install', '--package-lock-only'], confPkgLockFalse).spread((code, stdout, stderr) => { + t.is(code, 0, 'ok') + t.comment(stdout.trim()) + t.comment(stderr.trim()) + + // Verify that package-lock.json exists. + t.ok(fs.existsSync(pkgLockPath), 'ensure that package-lock.json was created') + t.end() + }) +}) + test('cleanup', function (t) { server.close() cleanup() diff --git a/test/tap/install-parse-error.js b/test/tap/install-parse-error.js index 1330195ead9f0..1a72c336de071 100644 --- a/test/tap/install-parse-error.js +++ b/test/tap/install-parse-error.js @@ -1,12 +1,11 @@ 'use strict' -var path = require('path') var test = require('tap').test var Tacks = require('tacks') var File = Tacks.File var Dir = Tacks.Dir var common = require('../common-tap.js') -var testdir = path.join(__dirname, path.basename(__filename, '.js')) +var testdir = common.pkg var fixture = new Tacks(Dir({ 'package.json': File( diff --git a/test/tap/install-property-conflicts.js b/test/tap/install-property-conflicts.js index 8f293885abf40..a98f8570c62bd 100644 --- a/test/tap/install-property-conflicts.js +++ b/test/tap/install-property-conflicts.js @@ -1,14 +1,12 @@ var fs = require('fs') var resolve = require('path').resolve -var osenv = require('osenv') var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = resolve(__dirname, 'install-property-conflicts') +var pkg = common.pkg var target = resolve(pkg, '_target') var EXEC_OPTS = { @@ -22,8 +20,12 @@ var json = { } test('setup', function (t) { - setup() - t.pass('setup ran') + // make sure it installs locally + mkdirp.sync(resolve(target, 'node_modules')) + fs.writeFileSync( + resolve(pkg, 'package.json'), + JSON.stringify(json, null, 2) + '\n' + ) t.end() }) @@ -49,26 +51,3 @@ test('install package with a `type` property', function (t) { } ) }) - -test('clean', function (t) { - cleanup() - t.pass('cleaned up') - t.end() -}) - -function setup () { - cleanup() - mkdirp.sync(pkg) - // make sure it installs locally - mkdirp.sync(resolve(target, 'node_modules')) - fs.writeFileSync( - resolve(pkg, 'package.json'), - JSON.stringify(json, null, 2) + '\n' - ) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) - rimraf.sync(target) -} diff --git a/test/tap/install-report-just-installed.js b/test/tap/install-report-just-installed.js index 0a2cde2562ac7..23b373b269405 100644 --- a/test/tap/install-report-just-installed.js +++ b/test/tap/install-report-just-installed.js @@ -1,12 +1,11 @@ 'use strict' -var path = require('path') var test = require('tap').test var Tacks = require('tacks') var Dir = Tacks.Dir var File = Tacks.File var common = require('../common-tap.js') -var testdir = path.resolve(__dirname, path.basename(__filename, '.js')) +var testdir = common.pkg var fixture = new Tacks(Dir({ node_modules: Dir({ a: Dir({ diff --git a/test/tap/install-save-consistent-newlines.js b/test/tap/install-save-consistent-newlines.js index 6250377445a79..dfe41c649d2af 100644 --- a/test/tap/install-save-consistent-newlines.js +++ b/test/tap/install-save-consistent-newlines.js @@ -5,13 +5,12 @@ const path = require('path') const mkdirp = require('mkdirp') const mr = require('npm-registry-mock') -const osenv = require('osenv') const rimraf = require('rimraf') const test = require('tap').test const common = require('../common-tap.js') -const pkg = path.join(__dirname, 'install-save-consistent-newlines') +const pkg = common.pkg const EXEC_OPTS = { cwd: pkg } @@ -21,102 +20,79 @@ const json = { description: 'fixture' } -var server - -test('setup', function (t) { - setup('\n') +test('mock registry', function (t) { mr({ port: common.port }, function (er, s) { - server = s + t.parent.teardown(() => s.close()) t.end() }) }) -test('\'npm install --save\' should keep the original package.json line endings (LF)', function (t) { - common.npm( - [ - '--loglevel', 'silent', - '--registry', common.registry, - '--save', - 'install', 'underscore@1.3.1' - ], - EXEC_OPTS, - function (err, code) { - t.ifError(err, 'npm ran without issue') - t.notOk(code, 'npm install exited without raising an error code') - - const pkgPath = path.resolve(pkg, 'package.json') - const pkgStr = fs.readFileSync(pkgPath, 'utf8') - - t.match(pkgStr, '\n') - t.notMatch(pkgStr, '\r') +const runTest = (t, opts) => { + t.test('setup', setup(opts.ending)) + t.test('check', check(opts)) + t.end() +} - const pkgLockPath = path.resolve(pkg, 'package-lock.json') - const pkgLockStr = fs.readFileSync(pkgLockPath, 'utf8') +const setup = lineEnding => t => { + rimraf(pkg, er => { + if (er) { + throw er + } + mkdirp.sync(path.resolve(pkg, 'node_modules')) - t.match(pkgLockStr, '\n') - t.notMatch(pkgLockStr, '\r') + var jsonStr = JSON.stringify(json, null, 2) - t.end() + if (lineEnding === '\r\n') { + jsonStr = jsonStr.replace(/\n/g, '\r\n') } - ) -}) -test('\'npm install --save\' should keep the original package.json line endings (CRLF)', function (t) { - setup('\r\n') + fs.writeFileSync( + path.join(pkg, 'package.json'), + jsonStr + ) - common.npm( - [ - '--loglevel', 'silent', - '--registry', common.registry, - '--save', - 'install', 'underscore@1.3.1' - ], - EXEC_OPTS, - function (err, code) { - t.ifError(err, 'npm ran without issue') - t.notOk(code, 'npm install exited without raising an error code') + t.end() + }) +} - const pkgPath = path.resolve(pkg, 'package.json') - const pkgStr = fs.readFileSync(pkgPath, 'utf8') +const check = opts => t => common.npm( + [ + '--loglevel', 'silent', + '--registry', common.registry, + '--save', + 'install', 'underscore@1.3.1' + ], + EXEC_OPTS +).then(([code, err, out]) => { + t.notOk(code, 'npm install exited without raising an error code') - t.match(pkgStr, '\r\n') - t.notMatch(pkgStr, /[^\r]\n/) + const pkgPath = path.resolve(pkg, 'package.json') + const pkgStr = fs.readFileSync(pkgPath, 'utf8') - const pkgLockPath = path.resolve(pkg, 'package-lock.json') - const pkgLockStr = fs.readFileSync(pkgLockPath, 'utf8') + t.match(pkgStr, opts.match) + t.notMatch(pkgStr, opts.notMatch) - t.match(pkgLockStr, '\r\n') - t.notMatch(pkgLockStr, /[^\r]\n/) + const pkgLockPath = path.resolve(pkg, 'package-lock.json') + const pkgLockStr = fs.readFileSync(pkgLockPath, 'utf8') - t.end() - } - ) -}) + t.match(pkgLockStr, opts.match) + t.notMatch(pkgLockStr, opts.notMatch) -test('cleanup', function (t) { - server.close() - cleanup() t.end() }) -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} - -function setup (lineEnding) { - cleanup() - mkdirp.sync(path.resolve(pkg, 'node_modules')) - - var jsonStr = JSON.stringify(json, null, 2) - - if (lineEnding === '\r\n') { - jsonStr = jsonStr.replace(/\n/g, '\r\n') - } +test('keep LF line endings', t => { + runTest(t, { + ending: '\n', + match: '\n', + notMatch: '\r' + }) +}) - fs.writeFileSync( - path.join(pkg, 'package.json'), - jsonStr - ) - process.chdir(pkg) -} +test('keep CRLF line endings', t => { + runTest(t, { + ending: '\r\n', + match: '\r\n', + notMatch: /[^\r]\n/ + }) +}) diff --git a/test/tap/install-save-exact.js b/test/tap/install-save-exact.js index 14e32f0f84b62..efa1e636138a1 100644 --- a/test/tap/install-save-exact.js +++ b/test/tap/install-save-exact.js @@ -3,14 +3,12 @@ var path = require('path') var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var server -var pkg = path.join(__dirname, 'install-save-exact') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } @@ -20,53 +18,32 @@ var json = { description: 'fixture' } -test('setup', function (t) { - setup() +test('mock registry', function (t) { mr({ port: common.port }, function (er, s) { - server = s + t.parent.teardown(() => s.close()) t.end() }) }) -test('\'npm install --save --save-exact\' should install local pkg', function (t) { - common.npm( - [ - '--loglevel', 'silent', - '--registry', common.registry, - '--save', - '--save-exact', - 'install', 'underscore@1.3.1' - ], - EXEC_OPTS, - function (err, code) { - t.ifError(err, 'npm ran without issue') - t.notOk(code, 'npm install exited without raising an error code') - - var p = path.resolve(pkg, 'node_modules/underscore/package.json') - t.ok(JSON.parse(fs.readFileSync(p))) - - p = path.resolve(pkg, 'package.json') - var pkgJson = JSON.parse(fs.readFileSync(p, 'utf8')) - - t.same( - pkgJson.dependencies, - { 'underscore': '1.3.1' }, - 'underscore dependency should specify exactly 1.3.1' - ) - - t.end() - } - ) -}) - -test('\'npm install --save-dev --save-exact\' should install local pkg', function (t) { - setup() +const setup = t => { + t.test('destroy', t => rimraf(pkg, t.end)) + t.test('create', t => { + mkdirp.sync(path.resolve(pkg, 'node_modules')) + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(json, null, 2) + ) + t.end() + }) + t.end() +} +const check = (savearg, deptype) => t => { common.npm( [ '--loglevel', 'silent', '--registry', common.registry, - '--save-dev', + savearg, '--save-exact', 'install', 'underscore@1.3.1' ], @@ -82,7 +59,7 @@ test('\'npm install --save-dev --save-exact\' should install local pkg', functio var pkgJson = JSON.parse(fs.readFileSync(p, 'utf8')) t.same( - pkgJson.devDependencies, + pkgJson[deptype], { 'underscore': '1.3.1' }, 'underscore dependency should specify exactly 1.3.1' ) @@ -90,25 +67,16 @@ test('\'npm install --save-dev --save-exact\' should install local pkg', functio t.end() } ) -}) +} -test('cleanup', function (t) { - server.close() - cleanup() +test('\'npm install --save --save-exact\' should install local pkg', function (t) { + t.test('setup', setup) + t.test('check', check('--save', 'dependencies')) t.end() }) -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} - -function setup () { - cleanup() - mkdirp.sync(path.resolve(pkg, 'node_modules')) - fs.writeFileSync( - path.join(pkg, 'package.json'), - JSON.stringify(json, null, 2) - ) - process.chdir(pkg) -} +test('\'npm install --save-dev --save-exact\' should install local pkg', function (t) { + t.test('setup', setup) + t.test('check', check('--save-dev', 'devDependencies')) + t.end() +}) diff --git a/test/tap/install-save-local.js b/test/tap/install-save-local.js index 5965281bf2967..8b6597952813d 100644 --- a/test/tap/install-save-local.js +++ b/test/tap/install-save-local.js @@ -2,13 +2,12 @@ var fs = require('graceful-fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var root = path.join(__dirname, 'install-save-local') +var root = common.pkg var pkg = path.join(root, 'package') var EXEC_OPTS = { cwd: pkg } @@ -28,168 +27,155 @@ var localDevDependency = { version: '0.0.0' } -test('setup', function (t) { - setup() +test('setup deps in root', t => { + mkdirp.sync(path.join(root, 'package-local-dependency')) + fs.writeFileSync( + path.join(root, 'package-local-dependency', 'package.json'), + JSON.stringify(localDependency, null, 2) + ) + + mkdirp.sync(path.join(root, 'package-local-dev-dependency')) + fs.writeFileSync( + path.join(root, 'package-local-dev-dependency', 'package.json'), + JSON.stringify(localDevDependency, null, 2) + ) + t.end() }) test('\'npm install --save ../local/path\' should save to package.json', function (t) { - common.npm( + t.plan(2) + t.test('setup', setup) + t.test('run test', t => common.npm( [ '--loglevel', 'silent', '--save', 'install', '../package-local-dependency' ], - EXEC_OPTS, - function (err, code) { - t.ifError(err, 'npm install ran without issue') - t.notOk(code, 'npm install exited with code 0') - - var dependencyPackageJson = path.join( - pkg, 'node_modules', 'package-local-dependency', 'package.json' - ) - t.ok(JSON.parse(fs.readFileSync(dependencyPackageJson, 'utf8'))) - - var pkgJson = JSON.parse(fs.readFileSync(pkg + '/package.json', 'utf8')) - t.is(Object.keys(pkgJson.dependencies).length, 1, 'only one dep') - t.ok( - /file:.*?[/]package-local-dependency$/.test(pkgJson.dependencies['package-local-dependency']), - 'local package saved correctly' - ) - t.end() - } - ) + EXEC_OPTS + ).then(([code]) => { + t.equal(code, 0, 'npm install exited with code 0') + + var dependencyPackageJson = path.join( + pkg, 'node_modules', 'package-local-dependency', 'package.json' + ) + t.ok(JSON.parse(fs.readFileSync(dependencyPackageJson, 'utf8'))) + + var pkgJson = JSON.parse(fs.readFileSync(pkg + '/package.json', 'utf8')) + t.is(Object.keys(pkgJson.dependencies).length, 1, 'only one dep') + t.ok( + /file:.*?[/]package-local-dependency$/.test(pkgJson.dependencies['package-local-dependency']), + 'local package saved correctly' + ) + })) }) test('\'npm install --save local/path\' should save to package.json', function (t) { - setup() - common.npm( + t.plan(2) + t.test('setup', setup) + t.test('run test', t => common.npm( [ '--loglevel', 'silent', '--save', 'install', 'package-local-dependency/' ], - EXEC_OPTS, - function (err, code) { - t.ifError(err, 'npm install ran without issue') - t.notOk(code, 'npm install exited with code 0') - - var dependencyPackageJson = path.join( - pkg, 'node_modules', 'package-local-dependency', 'package.json' - ) - t.ok(JSON.parse(fs.readFileSync(dependencyPackageJson, 'utf8'))) - - var pkgJson = JSON.parse(fs.readFileSync(pkg + '/package.json', 'utf8')) - t.is(Object.keys(pkgJson.dependencies).length, 1, 'only one dep') - t.ok( - /file:package-local-dependency$/.test(pkgJson.dependencies['package-local-dependency']), - 'local package saved correctly' - ) - t.end() - } - ) + EXEC_OPTS + ).then(([code, out, err]) => { + t.equal(code, 0, 'npm install exited with code 0') + + var dependencyPackageJson = path.join( + pkg, 'node_modules', 'package-local-dependency', 'package.json' + ) + t.ok(JSON.parse(fs.readFileSync(dependencyPackageJson, 'utf8'))) + + var pkgJson = JSON.parse(fs.readFileSync(pkg + '/package.json', 'utf8')) + t.is(Object.keys(pkgJson.dependencies).length, 1, 'only one dep') + t.ok( + /file:package-local-dependency$/.test(pkgJson.dependencies['package-local-dependency']), + 'local package saved correctly' + ) + })) }) test('\'npm install --save-dev ../local/path\' should save to package.json', function (t) { - setup() - common.npm( + t.plan(2) + t.test('setup', setup) + t.test('run test', t => common.npm( [ '--loglevel', 'silent', '--save-dev', 'install', '../package-local-dev-dependency' ], - EXEC_OPTS, - function (err, code) { - t.ifError(err, 'npm install ran without issue') - t.notOk(code, 'npm install exited with code 0') - - var dependencyPackageJson = path.resolve( - pkg, 'node_modules', 'package-local-dev-dependency', 'package.json' - ) - t.ok(JSON.parse(fs.readFileSync(dependencyPackageJson, 'utf8'))) - - var pkgJson = JSON.parse(fs.readFileSync(pkg + '/package.json', 'utf8')) - t.is(Object.keys(pkgJson.devDependencies).length, 1, 'only one dep') - t.ok( - /file:.*?[/\\]package-local-dev-dependency$/.test(pkgJson.devDependencies['package-local-dev-dependency']), - 'local package saved correctly' - ) - - t.end() - } - ) + EXEC_OPTS + ).then(([code]) => { + t.equal(code, 0, 'npm install exited with code 0') + + var dependencyPackageJson = path.resolve( + pkg, 'node_modules', 'package-local-dev-dependency', 'package.json' + ) + t.ok(JSON.parse(fs.readFileSync(dependencyPackageJson, 'utf8'))) + + var pkgJson = JSON.parse(fs.readFileSync(pkg + '/package.json', 'utf8')) + t.is(Object.keys(pkgJson.devDependencies).length, 1, 'only one dep') + t.ok( + /file:.*?[/\\]package-local-dev-dependency$/.test(pkgJson.devDependencies['package-local-dev-dependency']), + 'local package saved correctly' + ) + + t.end() + })) }) + test('\'npm install --save-dev local/path\' should save to package.json', function (t) { - setup() - common.npm( + t.plan(2) + t.test('setup', setup) + t.test('run test', t => common.npm( [ '--loglevel', 'silent', '--save-dev', 'install', 'package-local-dev-dependency/' ], - EXEC_OPTS, - function (err, code) { - t.ifError(err, 'npm install ran without issue') - t.notOk(code, 'npm install exited with code 0') - - var dependencyPackageJson = path.resolve( - pkg, 'node_modules', 'package-local-dev-dependency', 'package.json' - ) - t.ok(JSON.parse(fs.readFileSync(dependencyPackageJson, 'utf8'))) - - var pkgJson = JSON.parse(fs.readFileSync(pkg + '/package.json', 'utf8')) - t.is(Object.keys(pkgJson.devDependencies).length, 1, 'only one dep') - t.ok( - /file:package-local-dev-dependency$/.test(pkgJson.devDependencies['package-local-dev-dependency']), - 'local package saved correctly' - ) - - t.end() - } - ) + EXEC_OPTS + ).then(([code]) => { + t.equal(code, 0, 'npm install exited with code 0') + + var dependencyPackageJson = path.resolve( + pkg, 'node_modules', 'package-local-dev-dependency', 'package.json' + ) + t.ok(JSON.parse(fs.readFileSync(dependencyPackageJson, 'utf8'))) + + var pkgJson = JSON.parse(fs.readFileSync(pkg + '/package.json', 'utf8')) + t.is(Object.keys(pkgJson.devDependencies).length, 1, 'only one dep') + t.ok( + /file:package-local-dev-dependency$/.test(pkgJson.devDependencies['package-local-dev-dependency']), + 'local package saved correctly' + ) + + t.end() + })) }) -test('cleanup', function (t) { - cleanup() +function setup (t) { + t.test('destroy', t => rimraf(pkg, t.end)) + t.test('create', t => { + mkdirp.sync(pkg) + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(json, null, 2) + ) + + mkdirp.sync(path.join(pkg, 'package-local-dependency')) + fs.writeFileSync( + path.join(pkg, 'package-local-dependency', 'package.json'), + JSON.stringify(localDependency, null, 2) + ) + + mkdirp.sync(path.join(pkg, 'package-local-dev-dependency')) + fs.writeFileSync( + path.join(pkg, 'package-local-dev-dependency', 'package.json'), + JSON.stringify(localDevDependency, null, 2) + ) + t.end() + }) t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - process.chdir(__dirname) - rimraf.sync(root) -} - -function setup () { - cleanup() - mkdirp.sync(pkg) - fs.writeFileSync( - path.join(pkg, 'package.json'), - JSON.stringify(json, null, 2) - ) - - mkdirp.sync(path.join(root, 'package-local-dependency')) - fs.writeFileSync( - path.join(root, 'package-local-dependency', 'package.json'), - JSON.stringify(localDependency, null, 2) - ) - - mkdirp.sync(path.join(root, 'package-local-dev-dependency')) - fs.writeFileSync( - path.join(root, 'package-local-dev-dependency', 'package.json'), - JSON.stringify(localDevDependency, null, 2) - ) - - mkdirp.sync(path.join(pkg, 'package-local-dependency')) - fs.writeFileSync( - path.join(pkg, 'package-local-dependency', 'package.json'), - JSON.stringify(localDependency, null, 2) - ) - - mkdirp.sync(path.join(pkg, 'package-local-dev-dependency')) - fs.writeFileSync( - path.join(pkg, 'package-local-dev-dependency', 'package.json'), - JSON.stringify(localDevDependency, null, 2) - ) - process.chdir(pkg) } diff --git a/test/tap/install-save-prefix.js b/test/tap/install-save-prefix.js index b669c5fb7ef70..d61608e1cfd21 100644 --- a/test/tap/install-save-prefix.js +++ b/test/tap/install-save-prefix.js @@ -1,16 +1,13 @@ var fs = require('fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var server -var pkg = path.join(__dirname, 'install-save-prefix') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } @@ -19,17 +16,18 @@ var json = { version: '0.0.1' } -test('setup', function (t) { - setup() +test('start mock reg', function (t) { mr({ port: common.port }, function (er, s) { t.ifError(er, 'started mock registry') - server = s + t.parent.teardown(() => s.close()) t.end() }) }) test('install --save with \'^\' save prefix should accept minor updates', function (t) { - common.npm( + t.plan(2) + t.test('setup', setup) + t.test('run test', t => common.npm( [ '--registry', common.registry, '--loglevel', 'silent', @@ -37,31 +35,30 @@ test('install --save with \'^\' save prefix should accept minor updates', functi '--save', 'install', 'underscore@latest' ], - EXEC_OPTS, - function (err, code) { - t.ifError(err, 'npm install ran without issue') - t.notOk(code, 'npm install exited with code 0') - - var p = path.join(pkg, 'node_modules', 'underscore', 'package.json') - t.ok(JSON.parse(fs.readFileSync(p))) - - var pkgJson = JSON.parse(fs.readFileSync( - path.join(pkg, 'package.json'), - 'utf8' - )) - t.deepEqual( - pkgJson.dependencies, - { 'underscore': '^1.5.1' }, - 'got expected save prefix and version of 1.5.1' - ) - t.end() - } - ) + EXEC_OPTS + ).then(([code]) => { + console.error('back from install!', code) + t.equal(code, 0, 'npm install exited with code 0') + + var p = path.join(pkg, 'node_modules', 'underscore', 'package.json') + t.ok(JSON.parse(fs.readFileSync(p))) + + var pkgJson = JSON.parse(fs.readFileSync( + path.join(pkg, 'package.json'), + 'utf8' + )) + t.deepEqual( + pkgJson.dependencies, + { 'underscore': '^1.5.1' }, + 'got expected save prefix and version of 1.5.1' + ) + })) }) test('install --save-dev with \'^\' save prefix should accept minor dev updates', function (t) { - setup() - common.npm( + t.plan(2) + t.test('setup', setup) + t.test('run test', t => common.npm( [ '--registry', common.registry, '--loglevel', 'silent', @@ -69,31 +66,30 @@ test('install --save-dev with \'^\' save prefix should accept minor dev updates' '--save-dev', 'install', 'underscore@1.3.1' ], - EXEC_OPTS, - function (err, code) { - t.ifError(err, 'npm install ran without issue') - t.notOk(code, 'npm install exited with code 0') - - var p = path.join(pkg, 'node_modules', 'underscore', 'package.json') - t.ok(JSON.parse(fs.readFileSync(p))) - - var pkgJson = JSON.parse(fs.readFileSync( - path.join(pkg, 'package.json'), - 'utf8' - )) - t.deepEqual( - pkgJson.devDependencies, - { 'underscore': '^1.3.1' }, - 'got expected save prefix and version of 1.3.1' - ) - t.end() - } - ) + EXEC_OPTS + ).then(([code]) => { + t.equal(code, 0, 'npm install exited with code 0') + + var p = path.join(pkg, 'node_modules', 'underscore', 'package.json') + t.ok(JSON.parse(fs.readFileSync(p))) + + var pkgJson = JSON.parse(fs.readFileSync( + path.join(pkg, 'package.json'), + 'utf8' + )) + t.deepEqual( + pkgJson.devDependencies, + { 'underscore': '^1.3.1' }, + 'got expected save prefix and version of 1.3.1' + ) + t.end() + })) }) test('install --save with \'~\' save prefix should accept patch updates', function (t) { - setup() - common.npm( + t.plan(2) + t.test('setup', setup) + t.test('run test', t => common.npm( [ '--registry', common.registry, '--loglevel', 'silent', @@ -101,31 +97,29 @@ test('install --save with \'~\' save prefix should accept patch updates', functi '--save', 'install', 'underscore@1.3.1' ], - EXEC_OPTS, - function (err, code) { - t.ifError(err, 'npm install ran without issue') - t.notOk(code, 'npm install exited with code 0') - - var p = path.join(pkg, 'node_modules', 'underscore', 'package.json') - t.ok(JSON.parse(fs.readFileSync(p))) - - var pkgJson = JSON.parse(fs.readFileSync( - path.join(pkg, 'package.json'), - 'utf8' - )) - t.deepEqual( - pkgJson.dependencies, - { 'underscore': '~1.3.1' }, - 'got expected save prefix and version of 1.3.1' - ) - t.end() - } - ) + EXEC_OPTS + ).then(([code]) => { + t.equal(code, 0, 'npm install exited with code 0') + + var p = path.join(pkg, 'node_modules', 'underscore', 'package.json') + t.ok(JSON.parse(fs.readFileSync(p))) + + var pkgJson = JSON.parse(fs.readFileSync( + path.join(pkg, 'package.json'), + 'utf8' + )) + t.deepEqual( + pkgJson.dependencies, + { 'underscore': '~1.3.1' }, + 'got expected save prefix and version of 1.3.1' + ) + })) }) test('install --save-dev with \'~\' save prefix should accept patch updates', function (t) { - setup() - common.npm( + t.plan(2) + t.test('setup', setup) + t.test('run test', t => common.npm( [ '--registry', common.registry, '--loglevel', 'silent', @@ -133,46 +127,38 @@ test('install --save-dev with \'~\' save prefix should accept patch updates', fu '--save-dev', 'install', 'underscore@1.3.1' ], - EXEC_OPTS, - function (err, code) { - t.ifError(err, 'npm install ran without issue') - t.notOk(code, 'npm install exited with code 0') - - var p = path.join(pkg, 'node_modules', 'underscore', 'package.json') - t.ok(JSON.parse(fs.readFileSync(p))) - - var pkgJson = JSON.parse(fs.readFileSync( - path.join(pkg, 'package.json'), - 'utf8' - )) - t.deepEqual( - pkgJson.devDependencies, - { 'underscore': '~1.3.1' }, - 'got expected save prefix and version of 1.3.1' - ) - t.end() - } - ) + EXEC_OPTS + ).then(([code]) => { + t.notOk(code, 'npm install exited with code 0') + + var p = path.join(pkg, 'node_modules', 'underscore', 'package.json') + t.ok(JSON.parse(fs.readFileSync(p))) + + var pkgJson = JSON.parse(fs.readFileSync( + path.join(pkg, 'package.json'), + 'utf8' + )) + t.deepEqual( + pkgJson.devDependencies, + { 'underscore': '~1.3.1' }, + 'got expected save prefix and version of 1.3.1' + ) + })) }) -test('cleanup', function (t) { - server.close() - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} - -function setup () { - cleanup() - mkdirp.sync(path.resolve(pkg, 'node_modules')) - fs.writeFileSync( - path.join(pkg, 'package.json'), - JSON.stringify(json, null, 2) - ) +function setup (t) { + t.test('destroy', t => { + t.plan(2) + rimraf(path.resolve(pkg, 'node_modules'), () => t.pass('node_modules')) + rimraf(path.resolve(pkg, 'pacakage-lock.json'), () => t.pass('lock file')) + }) + t.test('create', t => { + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(json, null, 2) + ) + t.end() + }) - process.chdir(pkg) + t.end() } diff --git a/test/tap/install-scoped-already-installed.js b/test/tap/install-scoped-already-installed.js index 58966b047c430..d4655f4a9a748 100644 --- a/test/tap/install-scoped-already-installed.js +++ b/test/tap/install-scoped-already-installed.js @@ -3,13 +3,11 @@ var path = require('path') var existsSync = fs.existsSync || path.existsSync var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap') -var root = path.join(__dirname, 'install-scoped-already-installed') +var root = common.pkg var pkg = path.join(root, 'package-with-scoped-paths') var modules = path.join(pkg, 'node_modules') @@ -37,7 +35,6 @@ var scopedDependency = { } test('setup', function (t) { - rimraf.sync(root) mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), @@ -56,7 +53,6 @@ test('setup', function (t) { JSON.stringify(scopedDependency, null, 2) ) - process.chdir(pkg) t.end() }) @@ -118,12 +114,6 @@ test('installing already installed local scoped package', function (t) { ) }) -test('cleanup', function (t) { - process.chdir(osenv.tmpdir()) - rimraf.sync(root) - t.end() -}) - function contains (list, element) { var matcher = new RegExp(element.replace(/\//g, '[\\\\/]') + '$') for (var i = 0; i < list.length; ++i) { diff --git a/test/tap/install-scoped-link.js b/test/tap/install-scoped-link.js index 9171b8f46f439..a0c9c61a9843c 100644 --- a/test/tap/install-scoped-link.js +++ b/test/tap/install-scoped-link.js @@ -4,16 +4,15 @@ var path = require('path') var existsSync = fs.existsSync || path.existsSync var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var escapeExecPath = require('../../lib/utils/escape-exec-path') var common = require('../common-tap.js') -var pkg = path.join(__dirname, 'install-scoped-link') -var work = path.join(__dirname, 'install-scoped-link-TEST') +var resolve = require('path').resolve +var pkg = resolve(common.pkg, 'package') +var work = resolve(common.pkg, 'TEST') var modules = path.join(work, 'node_modules') var EXEC_OPTS = { cwd: work } @@ -29,7 +28,6 @@ var json = { } test('setup', function (t) { - cleanup() mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), @@ -73,14 +71,3 @@ test('installing package with links', function (t) { } ) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(work) - rimraf.sync(pkg) -} diff --git a/test/tap/install-scoped-with-bundled-dependency.js b/test/tap/install-scoped-with-bundled-dependency.js index bd197ae036591..db126eb6426c4 100644 --- a/test/tap/install-scoped-with-bundled-dependency.js +++ b/test/tap/install-scoped-with-bundled-dependency.js @@ -6,9 +6,9 @@ var File = Tacks.File var Dir = Tacks.Dir var common = require('../common-tap.js') -var basedir = path.join(__dirname, path.basename(__filename, '.js')) +var basedir = common.pkg var testdir = path.join(basedir, 'testdir') -var cachedir = path.join(basedir, 'cache') +var cachedir = common.cache var globaldir = path.join(basedir, 'global') var tmpdir = path.join(basedir, 'tmp') diff --git a/test/tap/install-scoped-with-peer-dependency.js b/test/tap/install-scoped-with-peer-dependency.js index 7f60c73221092..016f5f0453b31 100644 --- a/test/tap/install-scoped-with-peer-dependency.js +++ b/test/tap/install-scoped-with-peer-dependency.js @@ -2,15 +2,13 @@ var fs = require('fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.join(__dirname, 'install-scoped-with-peer-dependency') +var pkg = common.pkg var local = path.join(pkg, 'package') -var EXEC_OPTS = { } +var EXEC_OPTS = { cwd: pkg } var json = { name: '@scope/package', @@ -21,8 +19,12 @@ var json = { } test('setup', function (t) { - setup() - + mkdirp.sync(local) + mkdirp.sync(path.resolve(pkg, 'node_modules')) + fs.writeFileSync( + path.join(local, 'package.json'), + JSON.stringify(json, null, 2) + ) t.end() }) @@ -36,24 +38,3 @@ test('it should install peerDependencies in same tree level as the parent packag t.end() }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function setup () { - cleanup() - mkdirp.sync(local) - mkdirp.sync(path.resolve(pkg, 'node_modules')) - fs.writeFileSync( - path.join(local, 'package.json'), - JSON.stringify(json, null, 2) - ) - process.chdir(pkg) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/install-shrinkwrapped-git.js b/test/tap/install-shrinkwrapped-git.js index 7bbb4f6e5fb75..3cdc2b91291d4 100644 --- a/test/tap/install-shrinkwrapped-git.js +++ b/test/tap/install-shrinkwrapped-git.js @@ -3,7 +3,6 @@ var fs = require('fs') var path = require('path') var resolve = path.resolve -var osenv = require('osenv') var mkdirp = require('mkdirp') var rimraf = require('rimraf') var test = require('tap').test @@ -11,7 +10,7 @@ var npm = require('../../lib/npm') var common = require('../common-tap') var chain = require('slide').chain -var mockPath = resolve(__dirname, 'install-shrinkwrapped') +var mockPath = common.pkg var parentPath = resolve(mockPath, 'parent') var parentNodeModulesPath = path.join(parentPath, 'node_modules') var outdatedNodeModulesPath = resolve(mockPath, 'node-modules-backup') @@ -32,16 +31,31 @@ var childPackageJSON = JSON.stringify({ }) test('setup', function (t) { - cleanup() - setup(function (err, result) { - t.ifError(err, 'git started up successfully') + mkdirp.sync(parentPath) + fs.writeFileSync(resolve(parentPath, 'package.json'), parentPackageJSON) + process.chdir(parentPath) - if (!err) { - gitDaemon = result[result.length - 2] - gitDaemonPID = result[result.length - 1] - } + // Setup child + mkdirp.sync(childPath) + fs.writeFileSync(resolve(childPath, 'package.json'), childPackageJSON) + + // Setup npm and then git + npm.load({ + registry: common.registry, + loglevel: 'silent', + save: true // Always install packages with --save + }, function () { + // It's important to initialize git after npm because it uses config + initializeGit(function (err, result) { + t.ifError(err, 'git started up successfully') - t.end() + if (!err) { + gitDaemon = result[result.length - 2] + gitDaemonPID = result[result.length - 1] + } + + t.end() + }) }) }) @@ -53,11 +67,11 @@ test('shrinkwrapped git dependency got updated', function (t) { if (err) { throw err } chain([ // Install & shrinkwrap child package's first commit - [npm.commands.install, ['git://localhost:1234/child.git#' + refs[0]]], + [npm.commands.install, ['git://localhost:' + common.gitPort + '/child.git#' + refs[0]]], // Backup node_modules with the first commit [fs.rename, parentNodeModulesPath, outdatedNodeModulesPath], // Install & shrinkwrap child package's latest commit - [npm.commands.install, ['git://localhost:1234/child.git#' + refs[1].substr(0, 8)]], + [npm.commands.install, ['git://localhost:' + common.gitPort + '/child.git#' + refs[1].substr(0, 8)]], // Restore node_modules with the first commit [rimraf, parentNodeModulesPath], [fs.rename, outdatedNodeModulesPath, parentNodeModulesPath], @@ -68,15 +82,15 @@ test('shrinkwrapped git dependency got updated', function (t) { t.similar(pkglock, { dependencies: { child: { - version: `git://localhost:1234/child.git#${refs[1]}`, - from: `git://localhost:1234/child.git#${refs[1].substr(0, 8)}` + version: `git://localhost:${common.gitPort}/child.git#${refs[1]}`, + from: `git://localhost:${common.gitPort}/child.git#${refs[1].substr(0, 8)}` } } }, 'version and from fields are correct in git-based pkglock dep') var childPackageJSON = require(path.join(parentNodeModulesPath, 'child', 'package.json')) t.equal( childPackageJSON._resolved, - 'git://localhost:1234/child.git#' + refs[1], + 'git://localhost:' + common.gitPort + '/child.git#' + refs[1], "Child package wasn't updated" ) t.end() @@ -85,40 +99,10 @@ test('shrinkwrapped git dependency got updated', function (t) { }) test('clean', function (t) { - gitDaemon.on('close', function () { - cleanup() - t.end() - }) + gitDaemon.on('close', t.end) process.kill(gitDaemonPID) }) -function setup (cb) { - // Setup parent package - mkdirp.sync(parentPath) - fs.writeFileSync(resolve(parentPath, 'package.json'), parentPackageJSON) - process.chdir(parentPath) - - // Setup child - mkdirp.sync(childPath) - fs.writeFileSync(resolve(childPath, 'package.json'), childPackageJSON) - - // Setup npm and then git - npm.load({ - registry: common.registry, - loglevel: 'silent', - save: true // Always install packages with --save - }, function () { - // It's important to initialize git after npm because it uses config - initializeGit(cb) - }) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(mockPath) - rimraf.sync(common['npm_config_cache']) -} - function prepareChildAndGetRefs (cb) { var opts = { cwd: childPath, env: { PATH: process.env.PATH } } chain([ @@ -153,7 +137,7 @@ function startGitDaemon (cb) { '--export-all', '--base-path=' + mockPath, // Path to the dir that contains child.git '--reuseaddr', - '--port=1234' + '--port=' + common.gitPort ], { cwd: parentPath, diff --git a/test/tap/install-test-cli-with-broken-package-lock.js b/test/tap/install-test-cli-with-broken-package-lock.js new file mode 100644 index 0000000000000..3c1a56131bad4 --- /dev/null +++ b/test/tap/install-test-cli-with-broken-package-lock.js @@ -0,0 +1,118 @@ +var fs = require('graceful-fs') +var path = require('path') + +var mkdirp = require('mkdirp') +var osenv = require('osenv') +var rimraf = require('rimraf') +var test = require('tap').test + +var common = require('../common-tap.js') + +var pkg = common.pkg + +var EXEC_OPTS = { cwd: pkg } + +var json = { + name: 'install-test-cli-with-broken-package-lock', + description: 'fixture', + version: '0.0.0', + dependencies: { + optimist: '0.6.0' + } +} + +var brokenLockfile = { + name: 'install-test-cli-with-broken-package-lock', + version: '0.0.0', + lockfileVersion: 1, + requires: true, + dependencies: { + optimist: { + version: '0.6.0', + resolved: 'https://registry.npmjs.org/optimist/-/optimist-0.6.0.tgz', + integrity: 'sha1-aUJIJvNAX3nxQub8PZrljU27kgA=', + requires: { + minimist: '~0.0.1', + wordwrap: '~0.0.2' + } + }, + wordwrap: { + version: '0.0.3', + resolved: 'https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz', + integrity: 'sha1-o9XabNXAvAAI03I0u68b7WMFkQc=' + } + } +} + +var expected = { + name: 'install-test-cli-with-broken-package-lock', + version: '0.0.0', + lockfileVersion: 1, + requires: true, + dependencies: { + minimist: { + version: '0.0.10', + resolved: 'https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz', + integrity: 'sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=' + }, + optimist: { + version: '0.6.0', + resolved: 'https://registry.npmjs.org/optimist/-/optimist-0.6.0.tgz', + integrity: 'sha1-aUJIJvNAX3nxQub8PZrljU27kgA=', + requires: { + minimist: '~0.0.1', + wordwrap: '~0.0.2' + } + }, + wordwrap: { + version: '0.0.3', + resolved: 'https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz', + integrity: 'sha1-o9XabNXAvAAI03I0u68b7WMFkQc=' + } + } +} + +test('setup', function (t) { + setup() + t.end() +}) + +test('\'npm install-test\' should repair package-lock.json', function (t) { + common.npm(['install-test'], EXEC_OPTS, function (err, code, stderr, stdout) { + if (err) throw err + t.comment(stdout.trim()) + t.comment(stderr.trim()) + t.is(code, 0, 'npm install did not raise error code') + var lockfile = JSON.parse(fs.readFileSync(path.join(pkg, 'package-lock.json'))) + t.same( + lockfile, + expected, + 'package-lock.json should be repaired' + ) + t.end() + }) +}) + +test('cleanup', function (t) { + cleanup() + t.end() +}) + +function setup () { + cleanup() + mkdirp.sync(pkg) + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(json, null, 2) + ) + fs.writeFileSync( + path.join(pkg, 'package-lock.json'), + JSON.stringify(brokenLockfile, null, 2) + ) + process.chdir(pkg) +} + +function cleanup () { + process.chdir(osenv.tmpdir()) + rimraf.sync(pkg) +} diff --git a/test/tap/install-test-cli-without-package-lock.js b/test/tap/install-test-cli-without-package-lock.js index ea3d75feb6af8..603043af8fd38 100644 --- a/test/tap/install-test-cli-without-package-lock.js +++ b/test/tap/install-test-cli-without-package-lock.js @@ -2,13 +2,11 @@ var fs = require('graceful-fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.join(__dirname, path.basename(__filename, '.js')) +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } @@ -28,8 +26,23 @@ var dependency = { } test('setup', function (t) { - setup() - t.pass('setup ran') + mkdirp.sync(path.join(pkg, 'dependency')) + fs.writeFileSync( + path.join(pkg, 'dependency', 'package.json'), + JSON.stringify(dependency, null, 2) + ) + + mkdirp.sync(path.join(pkg, 'node_modules')) + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(json, null, 2) + ) + + // Disable package-lock + fs.writeFileSync( + path.join(pkg, '.npmrc'), + 'package-lock=false\n' + ) t.end() }) @@ -49,35 +62,3 @@ test('\'npm install-test\' should not generate package-lock.json.*', function (t t.end() }) }) - -test('cleanup', function (t) { - cleanup() - t.pass('cleaned up') - t.end() -}) - -function setup () { - mkdirp.sync(path.join(pkg, 'dependency')) - fs.writeFileSync( - path.join(pkg, 'dependency', 'package.json'), - JSON.stringify(dependency, null, 2) - ) - - mkdirp.sync(path.join(pkg, 'node_modules')) - fs.writeFileSync( - path.join(pkg, 'package.json'), - JSON.stringify(json, null, 2) - ) - - // Disable package-lock - fs.writeFileSync( - path.join(pkg, '.npmrc'), - 'package-lock=false\n' - ) - process.chdir(pkg) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/install-windows-newlines.js b/test/tap/install-windows-newlines.js index 1c69b204ad02b..b56c7645ebf42 100644 --- a/test/tap/install-windows-newlines.js +++ b/test/tap/install-windows-newlines.js @@ -8,7 +8,7 @@ var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.join(__dirname, 'install-windows-newlines') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg, stdio: [0, 1, 2] } diff --git a/test/tap/install-with-dev-dep-duplicate.js b/test/tap/install-with-dev-dep-duplicate.js index 2d31b8fad7abd..7d8586f47d4ad 100644 --- a/test/tap/install-with-dev-dep-duplicate.js +++ b/test/tap/install-with-dev-dep-duplicate.js @@ -1,16 +1,13 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') var npm = require('../../') -var pkg = path.resolve(__dirname, 'dev-dep-duplicate') +var pkg = common.pkg var json = { author: 'Anders Janmyr', @@ -42,13 +39,19 @@ test('prefers version from dependencies over devDependencies', function (t) { mr({ port: common.port }, function (er, s) { setup(function (err) { - if (err) return t.fail(err) + if (err) { + throw err + } npm.install('.', function (err) { - if (err) return t.fail(err) + if (err) { + throw err + } npm.commands.ls([], true, function (err, _, results) { - if (err) return t.fail(err) + if (err) { + throw err + } // these contain full paths so we can't do an exact match // with them @@ -63,14 +66,7 @@ test('prefers version from dependencies over devDependencies', function (t) { }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - function setup (cb) { - cleanup() - mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) @@ -78,13 +74,8 @@ function setup (cb) { process.chdir(pkg) var opts = { - cache: path.resolve(pkg, 'cache'), + cache: common.cache, registry: common.registry } npm.load(opts, cb) } - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/install-without-registry-config.js b/test/tap/install-without-registry-config.js new file mode 100644 index 0000000000000..852bd777ced5a --- /dev/null +++ b/test/tap/install-without-registry-config.js @@ -0,0 +1,35 @@ +const t = require('tap') +const { pkg, npm } = require('../common-tap.js') +const { writeFileSync, statSync, readFileSync } = require('fs') +const mkdirp = require('mkdirp') +const proj = pkg + '/project' +const dep = pkg + '/dep' +mkdirp.sync(proj) +mkdirp.sync(dep) +writeFileSync(dep + '/package.json', JSON.stringify({ + name: 'dependency', + version: '1.2.3' +})) +writeFileSync(proj + '/package.json', JSON.stringify({ + name: 'project', + version: '4.2.0' +})) + +const runTest = t => npm([ + 'install', + '../dep', + '--no-registry' +], { cwd: proj }).then(([code, out, err]) => { + t.equal(code, 0) + t.match(out, /^\+ dependency@1\.2\.3\n.* 1 package in [0-9.]+m?s\n$/) + t.equal(err, '') + const data = readFileSync(proj + '/node_modules/dependency/package.json', 'utf8') + t.same(JSON.parse(data), { + name: 'dependency', + version: '1.2.3' + }, 'dep got installed') + t.ok(statSync(proj + '/package-lock.json').isFile(), 'package-lock exists') +}) + +t.test('install without a registry, no package lock', t => runTest(t)) +t.test('install without a registry, with package lock', t => runTest(t)) diff --git a/test/tap/install.fund.js b/test/tap/install.fund.js new file mode 100644 index 0000000000000..fca5fb3afd123 --- /dev/null +++ b/test/tap/install.fund.js @@ -0,0 +1,99 @@ +'use strict' + +const { test } = require('tap') +const { getPrintFundingReport } = require('../../lib/install/fund') + +test('message when there are no funding found', (t) => { + t.equal( + getPrintFundingReport({}), + '', + 'should not print any message if missing info' + ) + t.equal( + getPrintFundingReport({ + name: 'foo', + version: '1.0.0', + dependencies: {} + }), + '', + 'should not print any message if package has no dependencies' + ) + t.equal( + getPrintFundingReport({ + fund: true, + idealTree: { + name: 'foo', + version: '1.0.0', + dependencies: { + bar: {}, + lorem: {} + } + } + }), + '', + 'should not print any message if no package has funding info' + ) + t.end() +}) + +test('print appropriate message for a single package', (t) => { + t.equal( + getPrintFundingReport({ + fund: true, + idealTree: { + name: 'foo', + version: '1.0.0', + children: [ + { + package: { + name: 'bar', + version: '1.0.0', + funding: { type: 'foo', url: 'http://example.com' } + } + } + ] + } + }).replace(/[\r\n]+/g, '\n'), + `\n1 package is looking for funding\n run \`npm fund\` for details\n`, + 'should print single package message' + ) + t.end() +}) + +test('print appropriate message for many packages', (t) => { + t.equal( + getPrintFundingReport({ + fund: true, + idealTree: { + name: 'foo', + version: '1.0.0', + children: [ + { + package: { + name: 'bar', + version: '1.0.0', + funding: { type: 'foo', url: 'http://example.com' } + } + }, + { + package: { + name: 'lorem', + version: '1.0.0', + funding: { type: 'foo', url: 'http://example.com' } + } + }, + { + package: { + name: 'ipsum', + version: '1.0.0', + funding: { type: 'foo', url: 'http://example.com' } + } + } + ] + } + }).replace(/[\r\n]+/g, '\n'), + `\n3 packages are looking for funding\n run \`npm fund\` for details\n`, + 'should print many package message' + ) + t.end() +}) diff --git a/test/tap/invalid-dep-version-filtering.js b/test/tap/invalid-dep-version-filtering.js index 19ab5d209dd5c..f2f175b24ef5e 100644 --- a/test/tap/invalid-dep-version-filtering.js +++ b/test/tap/invalid-dep-version-filtering.js @@ -7,8 +7,8 @@ var Tacks = require('tacks') var File = Tacks.File var Dir = Tacks.Dir -var testdir = path.join(__dirname, path.basename(__filename, '.js')) -var cachedir = path.join(testdir, 'cache') +var testdir = common.pkg +var cachedir = common.cache var fixture = new Tacks(Dir({ cache: Dir(), diff --git a/test/tap/it.js b/test/tap/it.js index b0dddb8f919de..bb59bc6f5e904 100644 --- a/test/tap/it.js +++ b/test/tap/it.js @@ -2,15 +2,13 @@ var join = require('path').join var statSync = require('graceful-fs').statSync var writeFileSync = require('graceful-fs').writeFileSync -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap') -var pkg = join(__dirname, 'run-script') +var pkg = common.pkg var installed = join(pkg, 'node_modules', 'underscore', 'package.json') var json = { @@ -23,55 +21,45 @@ var json = { } } -var server - test('run up the mock registry', function (t) { mr({ port: common.port }, function (err, s) { if (err) throw err - server = s + t.parent.teardown(() => s.close()) t.end() }) }) +const check = args => t => + common.npm(args.concat('--registry=' + common.registry), { cwd: pkg }) + .then(([code, stdout, stderr]) => { + t.equal(code, 0, 'command ran without error') + t.ok(statSync(installed), 'package was installed') + t.equal(require(installed).version, '1.5.1', 'underscore got installed as expected') + t.match(stdout, /hax/, 'found expected test output') + t.notOk(stderr, 'stderr should be empty') + }) + test('npm install-test', function (t) { - setup() - common.npm(['install-test', '--no-shrinkwrap', '--registry=' + common.registry], { cwd: pkg }, function (err, code, stdout, stderr) { - if (err) throw err - t.equal(code, 0, 'command ran without error') - t.ok(statSync(installed), 'package was installed') - t.equal(require(installed).version, '1.5.1', 'underscore got installed as expected') - t.match(stdout, /hax/, 'found expected test output') - t.notOk(stderr, 'stderr should be empty') - t.end() - }) + t.plan(2) + t.test('setup', setup) + t.test('check', check(['install-test', '--no-shrinkwrap'])) }) test('npm it (the form most people will use)', function (t) { - setup() - common.npm(['it', '--registry=' + common.registry], { cwd: pkg }, function (err, code, stdout, stderr) { - if (err) throw err - t.equal(code, 0, 'command ran without error') - t.ok(statSync(installed), 'package was installed') - t.equal(require(installed).version, '1.5.1', 'underscore got installed as expected') - t.match(stdout, /hax/, 'found expected test output') - t.notOk(stderr, 'stderr should be empty') - t.end() - }) + t.plan(2) + t.test('setup', setup) + t.test('check', check(['it'])) }) -test('cleanup', function (t) { - process.chdir(osenv.tmpdir()) - server.close() - cleanup() +function setup (t) { + t.test('destroy', t => { + t.plan(2) + rimraf(join(pkg, 'node_modules'), () => t.pass('node_modules')) + rimraf(join(pkg, 'package-lock.json'), () => t.pass('lock file')) + }) + t.test('create', t => { + writeFileSync(join(pkg, 'package.json'), JSON.stringify(json, null, 2)) + t.end() + }) t.end() -}) - -function cleanup () { - rimraf.sync(pkg) -} - -function setup () { - cleanup() - mkdirp.sync(pkg) - writeFileSync(join(pkg, 'package.json'), JSON.stringify(json, null, 2)) } diff --git a/test/tap/legacy-ignore-nested-nm.js b/test/tap/legacy-ignore-nested-nm.js index 095c41efa146b..6a57b72b60488 100644 --- a/test/tap/legacy-ignore-nested-nm.js +++ b/test/tap/legacy-ignore-nested-nm.js @@ -4,7 +4,7 @@ var common = require('../common-tap.js') var path = require('path') var rimraf = require('rimraf') var mkdirp = require('mkdirp') -var basepath = path.resolve(__dirname, path.basename(__filename, '.js')) +var basepath = common.pkg var fixturepath = path.resolve(basepath, 'npm-test-ignore-nested-nm') var modulepath = path.resolve(basepath, 'node_modules') var installedpath = path.resolve(modulepath, 'npm-test-ignore-nested-nm') diff --git a/test/tap/legacy-missing-bindir.js b/test/tap/legacy-missing-bindir.js index 2285f8d2a7556..398ef0e7acc51 100644 --- a/test/tap/legacy-missing-bindir.js +++ b/test/tap/legacy-missing-bindir.js @@ -5,7 +5,7 @@ var test = require('tap').test var common = require('../common-tap.js') var rimraf = require('rimraf') var mkdirp = require('mkdirp') -var basepath = path.resolve(__dirname, path.basename(__filename, '.js')) +var basepath = common.pkg var fixturepath = path.resolve(basepath, 'npm-test-missing-bindir') var modulepath = path.resolve(basepath, 'node_modules') var installedpath = path.resolve(modulepath, 'npm-test-missing-bindir') diff --git a/test/tap/legacy-no-auth-leak.js b/test/tap/legacy-no-auth-leak.js index f837239250222..ce1adaeca756a 100644 --- a/test/tap/legacy-no-auth-leak.js +++ b/test/tap/legacy-no-auth-leak.js @@ -1,8 +1,7 @@ 'use strict' var test = require('tap').test var common = require('../common-tap.js') -var path = require('path') -var basepath = path.resolve(__dirname, path.basename(__filename, '.js')) +var basepath = common.pkg var Tacks = require('tacks') var File = Tacks.File var Dir = Tacks.Dir diff --git a/test/tap/legacy-platform-all.js b/test/tap/legacy-platform-all.js index 2bfb19a45782b..01c7be7ec1c86 100644 --- a/test/tap/legacy-platform-all.js +++ b/test/tap/legacy-platform-all.js @@ -4,7 +4,7 @@ var common = require('../common-tap.js') var path = require('path') var rimraf = require('rimraf') var mkdirp = require('mkdirp') -var basepath = path.resolve(__dirname, path.basename(__filename, '.js')) +var basepath = common.pkg var fixturepath = path.resolve(basepath, 'npm-test-platform-all') var modulepath = path.resolve(basepath, 'node_modules') var Tacks = require('tacks') @@ -33,6 +33,7 @@ var fixture = new Tacks( ], cpu: [ 'arm', + 'arm64', 'mips', 'ia32', 'x64', diff --git a/test/tap/legacy-platform.js b/test/tap/legacy-platform.js index 4e94148b21309..619d2e4330c59 100644 --- a/test/tap/legacy-platform.js +++ b/test/tap/legacy-platform.js @@ -4,7 +4,7 @@ var common = require('../common-tap.js') var path = require('path') var rimraf = require('rimraf') var mkdirp = require('mkdirp') -var basepath = path.resolve(__dirname, path.basename(__filename, '.js')) +var basepath = common.pkg var fixturepath = path.resolve(basepath, 'npm-test-platform') var modulepath = path.resolve(basepath, 'node_modules') var Tacks = require('tacks') diff --git a/test/tap/legacy-private.js b/test/tap/legacy-private.js index 5e7817bf6a13a..44a1094cd0cc6 100644 --- a/test/tap/legacy-private.js +++ b/test/tap/legacy-private.js @@ -4,7 +4,7 @@ var common = require('../common-tap.js') var path = require('path') var rimraf = require('rimraf') var mkdirp = require('mkdirp') -var basepath = path.resolve(__dirname, path.basename(__filename, '.js')) +var basepath = common.pkg var fixturepath = path.resolve(basepath, 'npm-test-private') var modulepath = path.resolve(basepath, 'node_modules') var Tacks = require('tacks') diff --git a/test/tap/legacy-test-package.js b/test/tap/legacy-test-package.js index d94666b43e2e4..3c807e50491c7 100644 --- a/test/tap/legacy-test-package.js +++ b/test/tap/legacy-test-package.js @@ -4,7 +4,7 @@ var common = require('../common-tap.js') var path = require('path') var rimraf = require('rimraf') var mkdirp = require('mkdirp') -var basepath = path.resolve(__dirname, path.basename(__filename, '.js')) +var basepath = common.pkg var fixturepath = path.resolve(basepath, 'npm-test-test-package') var modulepath = path.resolve(basepath, 'node_modules') var installedpath = path.resolve(modulepath, 'npm-test-test-package') diff --git a/test/tap/lifecycle-INIT_CWD.js b/test/tap/lifecycle-INIT_CWD.js index eec5c266eec98..fbedd3849c708 100644 --- a/test/tap/lifecycle-INIT_CWD.js +++ b/test/tap/lifecycle-INIT_CWD.js @@ -2,57 +2,35 @@ var fs = require('fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'lifecycle-initcwd') +var pkg = common.pkg var subdir = path.resolve(pkg, 'subdir') var json = { name: 'init-cwd', version: '1.0.0', scripts: { - initcwd: 'echo "$INIT_CWD"' + initcwd: process.platform === 'win32' ? 'echo %INIT_CWD%' : 'echo "$INIT_CWD"' } } test('setup', function (t) { - cleanup() - mkdirp.sync(pkg) mkdirp.sync(subdir) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) ) - - process.chdir(subdir) - t.end() -}) - -test('make sure the env.INIT_CWD is correct', function (t) { - common.npm(['run-script', 'initcwd'], { - cwd: subdir - }, function (er, code, stdout) { - if (er) throw er - t.equal(code, 0, 'exit code') - stdout = stdout.trim().split(/\r|\n/).pop() - var actual = stdout - - t.equal(actual, subdir) - t.end() - }) -}) - -test('cleanup', function (t) { - cleanup() t.end() }) -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(subdir) - rimraf.sync(pkg) -} +test('make sure the env.INIT_CWD is correct', t => + common.npm(['run-script', 'initcwd'], { cwd: subdir }) + .then(([code, stdout, stderr]) => { + t.equal(code, 0, 'exit code') + stdout = stdout.trim().split(/\r|\n/).pop() + var actual = stdout + t.equal(actual, subdir) + })) diff --git a/test/tap/lifecycle-order.js b/test/tap/lifecycle-order.js index 903e1945d1e91..ac6c07925c52b 100644 --- a/test/tap/lifecycle-order.js +++ b/test/tap/lifecycle-order.js @@ -1,14 +1,8 @@ var fs = require('graceful-fs') var path = require('path') - -var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test - var common = require('../common-tap.js') - -var pkg = path.resolve(__dirname, path.basename(__filename, '.js')) +var pkg = common.pkg var json = { name: 'lifecycle-order', @@ -21,37 +15,19 @@ var json = { } test('setup', function (t) { - cleanup() - mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) ) - - process.chdir(pkg) t.end() }) -test('lifecycle scripts execute in the proper order', function (t) { - common.npm('install', {cwd: pkg}, function (err, code, stdout, stderr) { - if (err) throw err +test('lifecycle scripts execute in the proper order', t => + common.npm('install', {cwd: pkg}).then(([code, stdout, stderr]) => { t.is(code, 0, 'no error') // All three files should exist t.ok(fs.existsSync(path.join(pkg, 'preinstall-step')), 'preinstall ok') t.ok(fs.existsSync(path.join(pkg, 'install-step')), 'install ok') t.ok(fs.existsSync(path.join(pkg, 'postinstall-step')), 'postinstall ok') - - t.end() - }) -}) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} + })) diff --git a/test/broken-under-nyc-and-travis/lifecycle-path.js b/test/tap/lifecycle-path.js similarity index 93% rename from test/broken-under-nyc-and-travis/lifecycle-path.js rename to test/tap/lifecycle-path.js index 6209319b412f6..70fb839197191 100644 --- a/test/broken-under-nyc-and-travis/lifecycle-path.js +++ b/test/tap/lifecycle-path.js @@ -2,14 +2,14 @@ var fs = require('fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') var rimraf = require('rimraf') +var which = require('which') var test = require('tap').test var common = require('../common-tap.js') var isWindows = require('../../lib/utils/is-windows.js') -var pkg = path.resolve(__dirname, 'lifecycle-path') +var pkg = common.pkg var PATH if (isWindows) { @@ -21,9 +21,10 @@ if (isWindows) { PATH = '/bin:/usr/bin' } +var systemNode = which.sync('node', { nothrow: true, path: PATH }) +// the path to the system wide node (null if none) + test('setup', function (t) { - cleanup() - mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify({}, null, 2) @@ -183,6 +184,12 @@ function checkPath (testconfig, t) { 'The node binary used for scripts is.*' + process.execPath.replace(/[/\\]/g, '.')) t.match(stderr, regex, 'reports the current binary vs conflicting') + } else if (systemNode !== null) { + var regexSystemNode = new RegExp( + 'The node binary used for scripts is.*' + + systemNode.replace(/[/\\]/g, '.') + ) + t.match(stderr, regexSystemNode, 'reports the system binary vs conflicting') } else { t.match(stderr, /there is no node binary in the current PATH/, 'informs user that there is no node binary in PATH') } @@ -201,13 +208,3 @@ function checkPath (testconfig, t) { t.end() }) } - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/link.js b/test/tap/link.js index 88f3caed791fc..2d2d63de21415 100644 --- a/test/tap/link.js +++ b/test/tap/link.js @@ -1,18 +1,16 @@ var mkdirp = require('mkdirp') -var osenv = require('osenv') var path = require('path') -var rimraf = require('rimraf') var test = require('tap').test var lstatSync = require('fs').lstatSync var writeFileSync = require('fs').writeFileSync var common = require('../common-tap.js') -var link = path.join(__dirname, 'link') -var linkScoped = path.join(__dirname, 'link-scoped') -var linkInstall = path.join(__dirname, 'link-install') +var link = path.join(common.pkg, 'link') +var linkScoped = path.join(common.pkg, 'link-scoped') +var linkInstall = path.join(common.pkg, 'link-install') var linkInside = path.join(linkInstall, 'node_modules', 'inside') -var linkRoot = path.join(__dirname, 'link-root') +var linkRoot = path.join(common.pkg, 'link-root') var config = 'prefix = ' + linkRoot var configPath = path.join(link, '_npmrc') @@ -72,7 +70,28 @@ var insideInstallJSON = { } test('setup', function (t) { - setup() + mkdirp.sync(linkRoot) + mkdirp.sync(link) + writeFileSync( + path.join(link, 'package.json'), + JSON.stringify(readJSON, null, 2) + ) + mkdirp.sync(linkScoped) + writeFileSync( + path.join(linkScoped, 'package.json'), + JSON.stringify(readScopedJSON, null, 2) + ) + mkdirp.sync(linkInstall) + writeFileSync( + path.join(linkInstall, 'package.json'), + JSON.stringify(installJSON, null, 2) + ) + mkdirp.sync(linkInside) + writeFileSync( + path.join(linkInside, 'package.json'), + JSON.stringify(insideInstallJSON, null, 2) + ) + writeFileSync(configPath, config) common.npm(['ls', '-g', '--depth=0'], OPTS, function (err, c, out) { t.ifError(err) t.equal(c, 0, 'set up ok') @@ -173,50 +192,14 @@ test('ls the linked packages', function (t) { }) test('cleanup', function (t) { - process.chdir(osenv.tmpdir()) + process.chdir(common.pkg) common.npm(['rm', 'foo'], OPTS, function (err, code) { t.ifError(err, 'npm removed the linked package without error') t.equal(code, 0, 'cleanup foo in local ok') common.npm(['rm', '-g', 'foo'], OPTS, function (err, code) { t.ifError(err, 'npm removed the global package without error') t.equal(code, 0, 'cleanup foo in global ok') - - cleanup() t.end() }) }) }) - -function cleanup () { - rimraf.sync(linkRoot) - rimraf.sync(link) - rimraf.sync(linkScoped) - rimraf.sync(linkInstall) - rimraf.sync(linkInside) -} - -function setup () { - cleanup() - mkdirp.sync(linkRoot) - mkdirp.sync(link) - writeFileSync( - path.join(link, 'package.json'), - JSON.stringify(readJSON, null, 2) - ) - mkdirp.sync(linkScoped) - writeFileSync( - path.join(linkScoped, 'package.json'), - JSON.stringify(readScopedJSON, null, 2) - ) - mkdirp.sync(linkInstall) - writeFileSync( - path.join(linkInstall, 'package.json'), - JSON.stringify(installJSON, null, 2) - ) - mkdirp.sync(linkInside) - writeFileSync( - path.join(linkInside, 'package.json'), - JSON.stringify(insideInstallJSON, null, 2) - ) - writeFileSync(configPath, config) -} diff --git a/test/tap/local-args-relative-to-cwd.js b/test/tap/local-args-relative-to-cwd.js index 6c424bf67f123..de95516e2893e 100644 --- a/test/tap/local-args-relative-to-cwd.js +++ b/test/tap/local-args-relative-to-cwd.js @@ -6,7 +6,7 @@ var Tacks = require('tacks') var File = Tacks.File var Dir = Tacks.Dir var common = require('../common-tap.js') -var testdir = path.join(__dirname, path.basename(__filename, '.js')) +var testdir = common.pkg var fixture = new Tacks( Dir({ diff --git a/test/tap/locker.js b/test/tap/locker.js index 8c548095f7f54..1df6fda7102dc 100644 --- a/test/tap/locker.js +++ b/test/tap/locker.js @@ -2,26 +2,19 @@ var test = require('tap').test var path = require('path') var fs = require('graceful-fs') var crypto = require('crypto') -var rimraf = require('rimraf') -var osenv = require('osenv') var mkdirp = require('mkdirp') var npm = require('../../') var locker = require('../../lib/utils/locker.js') var lock = locker.lock var unlock = locker.unlock -var pkg = path.join(__dirname, '/locker') +const common = require('../common-tap.js') +var pkg = common.pkg var cache = path.join(pkg, '/cache') var tmp = path.join(pkg, '/tmp') var nm = path.join(pkg, '/node_modules') -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} - test('setup', function (t) { - cleanup() mkdirp.sync(cache) mkdirp.sync(tmp) t.end() @@ -82,8 +75,3 @@ test('unlocking out of order errors out', function (t) { }) }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) diff --git a/test/tap/lockfile-http-deps.js b/test/tap/lockfile-http-deps.js index a614daf139d22..b5eaa10760a43 100644 --- a/test/tap/lockfile-http-deps.js +++ b/test/tap/lockfile-http-deps.js @@ -8,9 +8,9 @@ var File = Tacks.File var Dir = Tacks.Dir var common = require('../common-tap.js') -var basedir = path.join(__dirname, path.basename(__filename, '.js')) +var basedir = common.pkg var testdir = path.join(basedir, 'testdir') -var cachedir = path.join(basedir, 'cache') +var cachedir = common.cache var globaldir = path.join(basedir, 'global') var tmpdir = path.join(basedir, 'tmp') diff --git a/test/tap/logout-scoped.js b/test/tap/logout-scoped.js index db993204789f3..114cc09da7382 100644 --- a/test/tap/logout-scoped.js +++ b/test/tap/logout-scoped.js @@ -8,15 +8,14 @@ var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'logout') +var pkg = common.pkg var outfile = path.join(pkg, '_npmrc') var opts = { cwd: pkg } -var contents = function () { /* -foo=boo -@bar:registry=http://localhost:1337 -//localhost:1337/:_authToken=glarb -*/ }.toString().split('\n').slice(1, -1).join('\n') +var contents = `foo=boo +@bar:registry=http://localhost:${common.port} +//localhost:${common.port}/:_authToken=glarb +` function mocks (server) { server.delete('/-/user/token/glarb') diff --git a/test/tap/logout.js b/test/tap/logout.js index d62cb4fffc28b..9218b4bf6f1ca 100644 --- a/test/tap/logout.js +++ b/test/tap/logout.js @@ -8,14 +8,13 @@ var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'logout') +var pkg = common.pkg var outfile = path.join(pkg, '_npmrc') var opts = { cwd: pkg } -var contents = function () { /* -foo=boo -//localhost:1337/:_authToken=glarb -*/ }.toString().split('\n').slice(1, -1).join('\n') +var contents = `foo=boo +//localhost:${common.port}/:_authToken=glarb +` function mocks (server) { server.delete('/-/user/token/glarb') diff --git a/test/tap/ls-depth-cli.js b/test/tap/ls-depth-cli.js index 7fd4a467d612f..55142b3ad167b 100644 --- a/test/tap/ls-depth-cli.js +++ b/test/tap/ls-depth-cli.js @@ -1,16 +1,13 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') var Bluebird = require('bluebird') var mr = Bluebird.promisify(require('npm-registry-mock')) -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap') -var pkg = path.resolve(__dirname, 'ls-depth-cli') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg, @@ -29,8 +26,6 @@ var json = { } test('setup', function (t) { - cleanup() - mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) @@ -117,7 +112,7 @@ test('npm ls --depth=0 --json', function (t) { 'dependencies': { 'test-package-with-one-dep': { 'version': '0.0.0', - 'resolved': 'http://localhost:1337/test-package-with-one-dep/-/test-package-with-one-dep-0.0.0.tgz' + 'resolved': 'http://localhost:' + common.port + '/test-package-with-one-dep/-/test-package-with-one-dep-0.0.0.tgz' } } }) @@ -141,11 +136,11 @@ test('npm ls --depth=Infinity --json', function (t) { 'dependencies': { 'test-package-with-one-dep': { 'version': '0.0.0', - 'resolved': 'http://localhost:1337/test-package-with-one-dep/-/test-package-with-one-dep-0.0.0.tgz', + 'resolved': 'http://localhost:' + common.port + '/test-package-with-one-dep/-/test-package-with-one-dep-0.0.0.tgz', 'dependencies': { 'test-package': { 'version': '0.0.0', - 'resolved': 'http://localhost:1337/test-package/-/test-package-0.0.0.tgz' + 'resolved': 'http://localhost:' + common.port + '/test-package/-/test-package-0.0.0.tgz' } } } @@ -199,13 +194,3 @@ test('npm ls --depth=1 --parseable --long', function (t) { } ) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/ls-depth-unmet.js b/test/tap/ls-depth-unmet.js index 4fd6740d6a58c..bf032efe9c9aa 100644 --- a/test/tap/ls-depth-unmet.js +++ b/test/tap/ls-depth-unmet.js @@ -1,15 +1,12 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap') -var pkg = path.resolve(__dirname, 'ls-depth-unmet') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } @@ -25,8 +22,6 @@ var json = { } test('setup', function (t) { - cleanup() - mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) @@ -160,13 +155,3 @@ test('npm ls --depth=Infinity', function (t) { } ) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/ls-env.js b/test/tap/ls-env.js index 29058d9245836..5d9d7cd06b2cf 100644 --- a/test/tap/ls-env.js +++ b/test/tap/ls-env.js @@ -1,15 +1,12 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap') -var pkg = path.resolve(__dirname, 'ls-depth') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } @@ -22,8 +19,6 @@ var json = { } test('setup', function (t) { - cleanup() - mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) @@ -123,13 +118,3 @@ test('npm ls --only=prod', function (t) { t.end() }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/ls-l-depth-0.js b/test/tap/ls-l-depth-0.js index e9c2374aad1d7..8e707a1d399b5 100644 --- a/test/tap/ls-l-depth-0.js +++ b/test/tap/ls-l-depth-0.js @@ -1,16 +1,13 @@ -var cat = require('graceful-fs').writeFileSync +var writeFileSync = require('graceful-fs').writeFileSync var resolve = require('path').resolve var mkdirp = require('mkdirp') var Bluebird = require('bluebird') var mr = Bluebird.promisify(require('npm-registry-mock')) -var rimraf = require('rimraf') var test = require('tap').test -var tmpdir = require('osenv').tmpdir - var common = require('../common-tap.js') -var pkg = resolve(__dirname, 'ls-l-depth-0') +var pkg = common.pkg var dep = resolve(pkg, 'deps', 'glock') var modules = resolve(pkg, 'node_modules') @@ -25,8 +22,6 @@ var expected = ' file:glock-1.8.7.tgz\n' + '\n' -var server - var EXEC_OPTS = { cwd: pkg } var fixture = { @@ -44,9 +39,12 @@ var fixture = { var deppack test('setup', function (t) { - setup() + mkdirp.sync(modules) + mkdirp.sync(dep) + + writeFileSync(resolve(dep, 'package.json'), JSON.stringify(fixture)) return mr({ port: common.port }).then((s) => { - server = s + t.parent.teardown(() => s.close()) return common.npm(['pack', dep], EXEC_OPTS) }).spread((code, stdout) => { t.is(code, 0, 'pack') @@ -67,10 +65,12 @@ test('#6311: npm ll --depth=0 duplicates listing', function (t) { if (err) throw err t.notOk(code, 'npm install exited cleanly') t.is(stderr, '', 'npm install ran silently') - t.equal( + t.match( stdout.trim(), - 'add\tunderscore\t1.5.1\tnode_modules/underscore\t\t\n' + - 'add\tglock\t1.8.7\tnode_modules/glock', + new RegExp( + '^add\tunderscore\t1[.]5[.]1\tnode_modules[\\\\/]underscore\t\t[\n]' + + 'add\tglock\t1[.]8[.]7\tnode_modules[\\\\/]glock$' + ), 'got expected install output' ) @@ -98,24 +98,3 @@ test('#6311: npm ll --depth=0 duplicates listing', function (t) { } ) }) - -test('cleanup', function (t) { - cleanup() - server.close() - - t.end() -}) - -function cleanup () { - process.chdir(tmpdir()) - rimraf.sync(pkg) -} - -function setup () { - cleanup() - - mkdirp.sync(modules) - mkdirp.sync(dep) - - cat(resolve(dep, 'package.json'), JSON.stringify(fixture)) -} diff --git a/test/tap/ls-peer.js b/test/tap/ls-peer.js new file mode 100644 index 0000000000000..05cc1382250c1 --- /dev/null +++ b/test/tap/ls-peer.js @@ -0,0 +1,123 @@ +'use strict' +const path = require('path') +const test = require('tap').test +const Tacks = require('tacks') +const File = Tacks.File +const Dir = Tacks.Dir +const common = require('../common-tap.js') + +const basedir = common.pkg +const testdir = path.join(basedir, 'testdir') +const cachedir = common.cache +const globaldir = path.join(basedir, 'global') +const tmpdir = path.join(basedir, 'tmp') + +const conf = { + cwd: testdir, + env: common.newEnv().extend({ + npm_config_cache: cachedir, + npm_config_tmp: tmpdir, + npm_config_prefix: globaldir, + npm_config_registry: common.registry, + npm_config_loglevel: 'warn' + }) +} + +const fixture = new Tacks(Dir({ + cache: Dir(), + global: Dir(), + tmp: Dir(), + testdir: Dir({ + 'package.json': File({ + name: 'fixture', + version: '1.0.0', + dependencies: { + dep: 'file:dep-1.0.0.tgz', + 'peer-dep': 'file:peer-dep-2.0.0.tgz' + } + }), + // Source dir + // dep: Dir({ + // 'package.json': File({ + // name: 'dep', + // version: '1.0.0', + // peerDependencies: { + // 'peer-dep': 'file:peer-dep-1.0.0.tgz' + // } + // }) + // }), + 'dep-1.0.0.tgz': File(Buffer.from( + '1f8b0800000000000003ed8f3d0ec2300c853be71451661a1cd166e8cc45' + + 'a2d654e1278d92c200eadd491a60ea462584946fb1f5fc9e655bd59e548f' + + '5b9b2a3ffac1142b0300b2aae8921e1152d062574b10424a08bed0d4d10f' + + '6b1fb2c4d58fca8553bedd937ea19ffa273c08a5cca80bb286b20e2ddb44' + + 'e186ceebc1444d70e090548be8f668d174685a8d3e8c63fc3529633a040e' + + 'fa8ccd5b28e7381ffb3b0bce894ce4d70f6732994c66e60975aec5690008' + + '0000', + 'hex' + )), + // Source dir + // 'peer-dep': Dir({ + // 'package.json': File({ + // name: 'peer-dep', + // version: '2.0.0' + // }) + // }), + 'peer-dep-1.0.0.tgz': File(Buffer.from( + '1f8b08000000000000032b484cce4e4c4fd52f80d07a59c5f9790c540606' + + '06066626260ad8c4c1c0d45c81c1d8d4ccc0d0d0cccc00a80ec830353500' + + 'd2d4760836505a5c925804740aa5e640bca200a78708a8e6525050ca4bcc' + + '4d55b252502a484d2dd24d492d50d2018996a5161567e6e781240cf50cf4' + + '0c94b86ab906dab9a360148c8251300aa80400c1c67f6300080000', + 'hex' + )), + 'peer-dep-2.0.0.tgz': File(Buffer.from( + '1f8b08000000000000032b484cce4e4c4fd52f80d07a59c5f9790c540606' + + '06066626260ad8c4c1c0d45c81c1d8d4ccc0d0d0cccc00a80ec830353500' + + 'd2d4760836505a5c925804740aa5e640bca200a78708a8e6525050ca4bcc' + + '4d55b252502a484d2dd24d492d50d2018996a5161567e6e781248cf40cf4' + + '0c94b86ab906dab9a360148c8251300aa80400cb30060800080000', + 'hex' + )) + }) +})) + +function setup () { + cleanup() + fixture.create(basedir) +} + +function cleanup () { + fixture.remove(basedir) +} + +test('setup', t => { + setup() + return common.fakeRegistry.listen().then(() => common.npm(['install'], conf)) +}) + +test('list warns about unmet peer dependency', t => { + return common.npm(['ls'], conf).then(([code, stdout, stderr]) => { + t.is(code, 1, 'command ran not ok') + t.comment(stdout.trim()) + t.comment(stderr.trim()) + t.match(stdout, 'UNMET PEER DEPENDENCY peer-dep@2.0.0') + t.match(stderr, 'npm ERR! peer dep missing: peer-dep@file:peer-dep-1.0.0.tgz, required by dep@1.0.0') + }) +}) + +test('list shows installed but unmet peer dependency', t => { + return common.npm(['ls', 'peer-dep'], conf).then(([code, stdout, stderr]) => { + t.is(code, 1, 'command ran not ok') + t.comment(stdout.trim()) + t.comment(stderr.trim()) + t.match(stdout, 'UNMET PEER DEPENDENCY peer-dep@2.0.0') + t.match(stderr, 'npm ERR! peer dep missing: peer-dep@file:peer-dep-1.0.0.tgz, required by dep@1.0.0') + }) +}) + +test('cleanup', t => { + common.fakeRegistry.close() + cleanup() + t.done() +}) diff --git a/test/tap/ls-production-and-dev.js b/test/tap/ls-production-and-dev.js index 5fc0b776e3337..5836c8fc13714 100644 --- a/test/tap/ls-production-and-dev.js +++ b/test/tap/ls-production-and-dev.js @@ -1,15 +1,12 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap') -var pkg = path.resolve(__dirname, 'ls-depth') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } @@ -27,8 +24,6 @@ var json = { } test('setup', function (t) { - cleanup() - mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) @@ -160,13 +155,3 @@ test('npm ls --only=prod', function (t) { t.end() }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/ls-top-errors.js b/test/tap/ls-top-errors.js index 69b8b299c8171..79a466fb127a4 100644 --- a/test/tap/ls-top-errors.js +++ b/test/tap/ls-top-errors.js @@ -8,7 +8,7 @@ var rimraf = require('rimraf') var common = require('../common-tap') -var pkg = path.resolve(__dirname, path.basename(__filename, '.js')) +var pkg = common.pkg var pathModA = path.join(pkg, 'node_modules', 'moduleA') var pathModB = path.join(pkg, 'node_modules', 'moduleB') diff --git a/test/tap/ls.js b/test/tap/ls.js index acec723afbae0..b3bdbc613cbc8 100644 --- a/test/tap/ls.js +++ b/test/tap/ls.js @@ -3,7 +3,7 @@ var test = require('tap').test var path = require('path') var rimraf = require('rimraf') var common = require('../common-tap.js') -var basepath = path.resolve(__dirname, path.basename(__filename, '.js')) +var basepath = common.pkg var fixturepath = path.resolve(basepath, 'npm-test-files') var pkgpath = path.resolve(fixturepath, 'npm-test-ls') var Tacks = require('tacks') diff --git a/test/tap/map-to-registry.js b/test/tap/map-to-registry.js deleted file mode 100644 index f6fdef5f10c3e..0000000000000 --- a/test/tap/map-to-registry.js +++ /dev/null @@ -1,166 +0,0 @@ -var test = require('tap').test -var npm = require('../../') - -var common = require('../common-tap.js') -var mapRegistry = require('../../lib/utils/map-to-registry.js') - -var creds = { - '//registry.npmjs.org/:username': 'u', - '//registry.npmjs.org/:_password': Buffer.from('p').toString('base64'), - '//registry.npmjs.org/:email': 'e', - cache: common.npm_config_cache -} -test('setup', function (t) { - npm.load(creds, function (err) { - t.ifError(err) - t.end() - }) -}) - -test('mapRegistryToURI', function (t) { - t.plan(16) - - mapRegistry('basic', npm.config, function (er, uri, auth, registry) { - t.ifError(er, 'mapRegistryToURI worked') - t.equal(uri, 'https://registry.npmjs.org/basic') - t.deepEqual(auth, { - scope: '//registry.npmjs.org/', - token: undefined, - username: 'u', - password: 'p', - email: 'e', - auth: 'dTpw', - alwaysAuth: false - }) - t.equal(registry, 'https://registry.npmjs.org/') - }) - - npm.config.set('scope', 'test') - npm.config.set('@test:registry', 'http://reg.npm/design/-/rewrite/') - npm.config.set('//reg.npm/design/-/rewrite/:_authToken', 'a-token') - mapRegistry('simple', npm.config, function (er, uri, auth, registry) { - t.ifError(er, 'mapRegistryToURI worked') - t.equal(uri, 'http://reg.npm/design/-/rewrite/simple') - t.deepEqual(auth, { - scope: '//reg.npm/design/-/rewrite/', - token: 'a-token', - username: undefined, - password: undefined, - email: undefined, - auth: undefined, - alwaysAuth: false - }) - t.equal(registry, 'http://reg.npm/design/-/rewrite/') - }) - - npm.config.set('scope', '') - npm.config.set('@test2:registry', 'http://reg.npm/-/rewrite/') - npm.config.set('//reg.npm/-/rewrite/:_authToken', 'b-token') - mapRegistry('@test2/easy', npm.config, function (er, uri, auth, registry) { - t.ifError(er, 'mapRegistryToURI worked') - t.equal(uri, 'http://reg.npm/-/rewrite/@test2%2feasy') - t.deepEqual(auth, { - scope: '//reg.npm/-/rewrite/', - token: 'b-token', - username: undefined, - password: undefined, - email: undefined, - auth: undefined, - alwaysAuth: false - }) - t.equal(registry, 'http://reg.npm/-/rewrite/') - }) - - npm.config.set('scope', 'test') - npm.config.set('@test3:registry', 'http://reg.npm/design/-/rewrite/relative') - npm.config.set('//reg.npm/design/-/rewrite/:_authToken', 'c-token') - mapRegistry('@test3/basic', npm.config, function (er, uri, auth, registry) { - t.ifError(er, 'mapRegistryToURI worked') - t.equal(uri, 'http://reg.npm/design/-/rewrite/relative/@test3%2fbasic') - t.deepEqual(auth, { - scope: '//reg.npm/design/-/rewrite/', - token: 'c-token', - username: undefined, - password: undefined, - email: undefined, - auth: undefined, - alwaysAuth: false - }) - t.equal(registry, 'http://reg.npm/design/-/rewrite/relative/') - }) -}) - -test('mapToRegistry token scoping', function (t) { - npm.config.set('scope', '') - npm.config.set('registry', 'https://reg.npm/') - npm.config.set('//reg.npm/:_authToken', 'r-token') - - t.test('pass token to registry host', function (t) { - mapRegistry( - 'https://reg.npm/packages/e/easy-1.0.0.tgz', - npm.config, - function (er, uri, auth, registry) { - t.ifError(er, 'mapRegistryToURI worked') - t.equal(uri, 'https://reg.npm/packages/e/easy-1.0.0.tgz') - t.deepEqual(auth, { - scope: '//reg.npm/', - token: 'r-token', - username: undefined, - password: undefined, - email: undefined, - auth: undefined, - alwaysAuth: false - }) - t.equal(registry, 'https://reg.npm/') - } - ) - t.end() - }) - - t.test("don't pass token to non-registry host", function (t) { - mapRegistry( - 'https://butts.lol/packages/e/easy-1.0.0.tgz', - npm.config, - function (er, uri, auth, registry) { - t.ifError(er, 'mapRegistryToURI worked') - t.equal(uri, 'https://butts.lol/packages/e/easy-1.0.0.tgz') - t.deepEqual(auth, { - scope: '//reg.npm/', - token: undefined, - username: undefined, - password: undefined, - email: undefined, - auth: undefined, - alwaysAuth: false - }) - t.equal(registry, 'https://reg.npm/') - } - ) - t.end() - }) - - t.test('pass token to non-registry host with always-auth', function (t) { - npm.config.set('always-auth', true) - mapRegistry( - 'https://butts.lol/packages/e/easy-1.0.0.tgz', - npm.config, - function (er, uri, auth, registry) { - t.ifError(er, 'mapRegistryToURI worked') - t.equal(uri, 'https://butts.lol/packages/e/easy-1.0.0.tgz') - t.deepEqual(auth, { - scope: '//reg.npm/', - token: 'r-token', - username: undefined, - password: undefined, - email: undefined, - auth: undefined, - alwaysAuth: true - }) - t.equal(registry, 'https://reg.npm/') - } - ) - t.end() - }) - - t.end() -}) diff --git a/test/tap/meta-test-flaky-root-ownership-test.js b/test/tap/meta-test-flaky-root-ownership-test.js new file mode 100644 index 0000000000000..24dd9e3d95dda --- /dev/null +++ b/test/tap/meta-test-flaky-root-ownership-test.js @@ -0,0 +1,20 @@ +const t = require('tap') +if (!process.getuid || process.getuid() !== 0 || !process.env.SUDO_UID || !process.env.SUDO_GID) { + t.pass('this test only runs in sudo mode') + t.end() + process.exit(0) +} + +const common = require('../common-tap.js') +const fs = require('fs') +const mkdirp = require('mkdirp') +mkdirp.sync(common.cache + '/root/owned') +fs.writeFileSync(common.cache + '/root/owned/file.txt', 'should be chowned') +const chown = require('chownr') + +// this will fire after t.teardown() but before process.on('exit') +setTimeout(() => { + chown.sync(common.cache, +process.env.SUDO_UID, +process.env.SUDO_GID) +}, 100) + +t.pass('this is fine') diff --git a/test/tap/nested-extraneous.js b/test/tap/nested-extraneous.js index 99d4bea5b9b1c..1764a41c75279 100644 --- a/test/tap/nested-extraneous.js +++ b/test/tap/nested-extraneous.js @@ -5,7 +5,7 @@ var fs = require('fs') var rimraf = require('rimraf') var path = require('path') -var pkg = path.resolve(__dirname, 'nested-extraneous') +var pkg = common.pkg var pj = { name: 'nested-extraneous', version: '1.2.3' diff --git a/test/tap/no-global-warns.js b/test/tap/no-global-warns.js index 304cf5bf54388..1c39fd79549c0 100644 --- a/test/tap/no-global-warns.js +++ b/test/tap/no-global-warns.js @@ -2,12 +2,10 @@ var path = require('path') var test = require('tap').test var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var writeFileSync = require('fs').writeFileSync var common = require('../common-tap.js') -var base = path.join(__dirname, path.basename(__filename, '.js')) +var base = common.pkg var mockGlobal = path.join(base, 'global') var toInstall = path.join(base, 'to-install') @@ -38,7 +36,13 @@ var installJSON = { } test('setup', function (t) { - setup() + mkdirp.sync(mockGlobal) + mkdirp.sync(toInstall) + writeFileSync( + path.join(toInstall, 'package.json'), + JSON.stringify(installJSON, null, 2) + ) + writeFileSync(configPath, config) t.end() }) @@ -59,24 +63,3 @@ test('no-global-warns', function (t) { t.end() }) }) - -test('cleanup', function (t) { - process.chdir(osenv.tmpdir()) - cleanup() - t.end() -}) - -function cleanup () { - rimraf.sync(base) -} - -function setup () { - cleanup() - mkdirp.sync(mockGlobal) - mkdirp.sync(toInstall) - writeFileSync( - path.join(toInstall, 'package.json'), - JSON.stringify(installJSON, null, 2) - ) - writeFileSync(configPath, config) -} diff --git a/test/tap/no-scan-full-global-dir.js b/test/tap/no-scan-full-global-dir.js index 6a9349d54d962..7501b881c98cc 100644 --- a/test/tap/no-scan-full-global-dir.js +++ b/test/tap/no-scan-full-global-dir.js @@ -3,15 +3,19 @@ var fs = require('fs') var path = require('path') var test = require('tap').test var requireInject = require('require-inject') -var osenv = require('osenv') var npm = require('../../lib/npm.js') +// XXX update this when rpt's realpath.js is extracted out +var rptRealpath = require.resolve('read-package-tree/realpath.js') + +const common = require('../common-tap.js') +const pkg = common.pkg var packages = { - test: {package: {name: 'test'}, path: __dirname, children: ['abc', 'def', 'ghi', 'jkl']}, - abc: {package: {name: 'abc'}, path: path.join(__dirname, 'node_modules', 'abc')}, - def: {package: {name: 'def'}, path: path.join(__dirname, 'node_modules', 'def')}, - ghi: {package: {name: 'ghi'}, path: path.join(__dirname, 'node_modules', 'ghi')}, - jkl: {package: {name: 'jkl'}, path: path.join(__dirname, 'node_modules', 'jkl')} + test: {package: {name: 'test'}, path: pkg, children: ['abc', 'def', 'ghi', 'jkl']}, + abc: {package: {name: 'abc'}, path: path.join(pkg, 'node_modules', 'abc')}, + def: {package: {name: 'def'}, path: path.join(pkg, 'node_modules', 'def')}, + ghi: {package: {name: 'ghi'}, path: path.join(pkg, 'node_modules', 'ghi')}, + jkl: {package: {name: 'jkl'}, path: path.join(pkg, 'node_modules', 'jkl')} } var dirs = {} var files = {} @@ -20,8 +24,6 @@ Object.keys(packages).forEach(function (name) { files[path.join(packages[name].path, 'package.json')] = packages[name].package }) -process.chdir(osenv.tmpdir()) - var mockReaddir = function (name, cb) { if (dirs[name]) return cb(null, dirs[name]) var er = new Error('No such mock: ' + name) @@ -39,6 +41,8 @@ mockFs.realpath = function (dir, cb) { return cb(null, dir) } +const mockRptRealpath = path => Promise.resolve(path) + test('setup', function (t) { npm.load(function () { t.pass('npm loaded') @@ -52,7 +56,8 @@ test('installer', function (t) { 'fs': mockFs, 'readdir-scoped-modules': mockReaddir, 'read-package-json': mockReadPackageJson, - 'mkdirp': function (path, cb) { cb() } + 'mkdirp': function (path, cb) { cb() }, + [rptRealpath]: mockRptRealpath }) var Installer = installer.Installer @@ -67,7 +72,7 @@ test('installer', function (t) { } } - var inst = new TestInstaller(__dirname, false, ['def', 'abc']) + var inst = new TestInstaller(pkg, false, ['def', 'abc']) inst.loadCurrentTree(function () { var kids = inst.currentTree.children.map(function (child) { return child.package.name }) t.isDeeply(kids, ['abc', 'def']) @@ -81,7 +86,8 @@ test('uninstaller', function (t) { 'fs': mockFs, 'readdir-scoped-modules': mockReaddir, 'read-package-json': mockReadPackageJson, - 'mkdirp': function (path, cb) { cb() } + 'mkdirp': function (path, cb) { cb() }, + [rptRealpath]: mockRptRealpath }) var Uninstaller = uninstaller.Uninstaller @@ -92,7 +98,7 @@ test('uninstaller', function (t) { } } - var uninst = new TestUninstaller(__dirname, false, ['ghi', 'jkl']) + var uninst = new TestUninstaller(pkg, false, ['ghi', 'jkl']) uninst.loadCurrentTree(function () { var kids = uninst.currentTree.children.map(function (child) { return child.package.name }) t.isDeeply(kids, ['ghi', 'jkl']) diff --git a/test/tap/noargs-install-config-save.js b/test/tap/noargs-install-config-save.js index 12ccf86804b37..cb1af408abb5c 100644 --- a/test/tap/noargs-install-config-save.js +++ b/test/tap/noargs-install-config-save.js @@ -1,14 +1,12 @@ var common = require('../common-tap.js') var test = require('tap').test -var path = require('path') var fs = require('fs') var rimraf = require('rimraf') var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var pkg = path.resolve(process.env.npm_config_tmp || '/tmp', - 'noargs-install-config-save') +var pkg = common.pkg function writePackageJson () { rimraf.sync(pkg) @@ -73,6 +71,6 @@ test('updates the package.json (adds dependencies) with an argument', function ( }) test('cleanup', function (t) { - rimraf.sync(pkg + '/cache') + rimraf.sync(pkg) t.end() }) diff --git a/test/tap/node-modules-path-munge.js b/test/tap/node-modules-path-munge.js index fdca0393cc1d1..fb116806c8cbc 100644 --- a/test/tap/node-modules-path-munge.js +++ b/test/tap/node-modules-path-munge.js @@ -4,7 +4,7 @@ var fs = require('fs') var rimraf = require('rimraf') var mkdirp = require('mkdirp') var path = require('path') -var dir = path.join(__dirname, 'my_node_modules') +var dir = path.join(common.pkg, 'my_node_modules') var script = process.platform === 'win32' ? 'echo %PATH%' : 'echo $PATH' t.test('setup', function (t) { diff --git a/test/tap/npm-api-not-loaded-error.js b/test/tap/npm-api-not-loaded-error.js index 48b71e5213a7c..8bf263503874e 100644 --- a/test/tap/npm-api-not-loaded-error.js +++ b/test/tap/npm-api-not-loaded-error.js @@ -1,8 +1,9 @@ var test = require('tap').test +const common = require('../common-tap.js') var npm = require('../..') var path = require('path') var rimraf = require('rimraf') -var npmrc = path.join(__dirname, 'npmrc') +var npmrc = path.join(common.pkg, 'npmrc') var fs = require('fs') test('setup', function (t) { diff --git a/test/tap/onload.js b/test/tap/onload.js index 8d2b6c743bab8..4750fa6f88c2a 100644 --- a/test/tap/onload.js +++ b/test/tap/onload.js @@ -2,13 +2,13 @@ var path = require('path') var test = require('tap').test var rimraf = require('rimraf') var common = require('../common-tap.js') -var opts = { cwd: __dirname } +var opts = { cwd: common.pkg } var binDir = '../../node_modules/.bin' var fixture = path.resolve(__dirname, binDir) var onload = path.resolve(__dirname, '../fixtures/onload.js') test('setup', function (t) { - rimraf.sync(path.join(__dirname, 'node_modules')) + rimraf.sync(path.join(common.pkg, 'node_modules')) t.end() }) diff --git a/test/tap/optional-metadep-rollback-collision.js b/test/tap/optional-metadep-rollback-collision.js index 1c05d1ba5860e..b62d63d7f760f 100644 --- a/test/tap/optional-metadep-rollback-collision.js +++ b/test/tap/optional-metadep-rollback-collision.js @@ -4,16 +4,14 @@ var fs = require('graceful-fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'optional-metadep-rollback-collision') +var pkg = common.pkg var deps = path.resolve(pkg, 'deps') var opdep = path.resolve(pkg, 'node_modules', 'opdep') -var cache = path.resolve(pkg, 'cache') +var cache = common.cache var createServer = require('http').createServer var mr = require('npm-registry-mock') var serverPort = 27991 @@ -64,7 +62,7 @@ var opdep_json = { } } -var blart = function () { /* +var blart = ` var rando = require('crypto').randomBytes var resolve = require('path').resolve @@ -116,18 +114,15 @@ mkdirp(BASEDIR, function go () { keepItGoingLouder = {} }, 3 * 1000) }) -*/ }.toString().split('\n').slice(1, -1).join('\n') +` -let badServer -let mockServer test('setup', function (t) { - cleanup() - badServer = createServer(function (req, res) { + const badServer = createServer(function (req, res) { setTimeout(function () { res.writeHead(404) res.end() }, 1000) - }).listen(serverPort) + }).listen(serverPort, () => t.parent.teardown(() => badServer.close())) mkdirp.sync(pkg) fs.writeFileSync( @@ -154,36 +149,36 @@ test('setup', function (t) { JSON.stringify(opdep_json, null, 2) ) mr({ port: common.port }, function (er, server) { - mockServer = server + t.parent.teardown(() => server.close()) t.end() }) }) -test('go go test racer', function (t) { - return common.npm( - [ - '--prefix', pkg, - '--fetch-retries', '0', - '--loglevel', 'error', - '--no-progress', - '--registry', common.registry, - '--parseable', - '--cache', cache, - 'install' - ], - { - cwd: pkg, - env: { - PATH: process.env.PATH, - Path: process.env.Path - }, - stdio: 'pipe' - }).spread((code, stdout, stderr) => { - t.comment(stdout.trim()) - t.comment(stderr.trim()) - t.is(code, 0, 'npm install exited with code 0') - t.notOk(/not ok/.test(stdout), 'should not contain the string \'not ok\'') - }) -}) + +test('go go test racer', t => common.npm( + [ + '--prefix', pkg, + '--fetch-retries', '0', + '--loglevel', 'error', + '--no-progress', + '--registry', common.registry, + '--parseable', + '--cache', cache, + 'install' + ], + { + cwd: pkg, + env: { + PATH: process.env.PATH, + Path: process.env.Path + }, + stdio: 'pipe' + } +).spread((code, stdout, stderr) => { + t.comment(stdout.trim()) + t.comment(stderr.trim()) + t.is(code, 0, 'npm install exited with code 0') + t.notOk(/not ok/.test(stdout), 'should not contain the string \'not ok\'') +})) test('verify results', function (t) { t.throws(function () { @@ -191,16 +186,3 @@ test('verify results', function (t) { }) t.end() }) - -test('cleanup', function (t) { - mockServer.close() - badServer.close() - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - - rimraf.sync(pkg) -} diff --git a/test/tap/org.js b/test/tap/org.js new file mode 100644 index 0000000000000..7315cf0b6faf1 --- /dev/null +++ b/test/tap/org.js @@ -0,0 +1,136 @@ +'use strict' + +var test = require('tap').test +var common = require('../common-tap.js') + +var mr = common.fakeRegistry.compat + +var server + +test('setup', function (t) { + mr({port: common.port}, function (err, s) { + t.ifError(err, 'registry mocked successfully') + server = s + t.end() + }) +}) + +const names = ['add', 'set'] +const roles = ['developer', 'admin', 'owner'] + +names.forEach(function (name) { + test('org ' + name + ' [orgname] [username]: defaults to developer', function (t) { + const membershipData = { + org: { + name: 'myorg', + size: 1 + }, + user: 'myuser', + role: 'developer' + } + server.put('/-/org/myorg/user', JSON.stringify({ + user: 'myuser' + })).reply(200, membershipData) + common.npm([ + 'org', 'add', 'myorg', 'myuser', + '--json', + '--registry', common.registry, + '--loglevel', 'silent' + ], {}, function (err, code, stdout, stderr) { + t.ifError(err, 'npm org') + + t.equal(code, 0, 'exited OK') + t.equal(stderr, '', 'no error output') + + t.same(JSON.parse(stdout), membershipData) + t.end() + }) + }) + + roles.forEach(function (role) { + test('org ' + name + ' [orgname] [username]: accepts role ' + role, function (t) { + const membershipData = { + org: { + name: 'myorg', + size: 1 + }, + user: 'myuser', + role: role + } + server.put('/-/org/myorg/user', JSON.stringify({ + user: 'myuser' + })).reply(200, membershipData) + common.npm([ + 'org', name, 'myorg', 'myuser', + '--json', + '--registry', common.registry, + '--loglevel', 'silent' + ], {}, function (err, code, stdout, stderr) { + t.ifError(err, 'npm org') + + t.equal(code, 0, 'exited OK') + t.equal(stderr, '', 'no error output') + + t.same(JSON.parse(stdout), membershipData) + t.end() + }) + }) + }) +}) + +test('org rm [orgname] [username]', function (t) { + const membershipData = { + otheruser: 'admin' + } + server.delete('/-/org/myorg/user', JSON.stringify({ + user: 'myuser' + })).reply(204, {}) + server.get('/-/org/myorg/user') + .reply(200, membershipData) + common.npm([ + 'org', 'rm', 'myorg', 'myuser', + '--json', + '--registry', common.registry, + '--loglevel', 'silent' + ], {}, function (err, code, stdout, stderr) { + t.ifError(err, 'npm org') + + t.equal(code, 0, 'exited OK') + t.equal(stderr, '', 'no error output') + t.deepEqual(JSON.parse(stdout), { + user: 'myuser', + org: 'myorg', + deleted: true, + userCount: 1 + }, 'got useful info') + t.end() + }) +}) + +test('org ls [orgname]', function (t) { + const membershipData = { + username: 'admin', + username2: 'foo' + } + server.get('/-/org/myorg/user') + .reply(200, membershipData) + common.npm([ + 'org', 'ls', 'myorg', + '--json', + '--registry', common.registry, + '--loglevel', 'silent' + ], {}, function (err, code, stdout, stderr) { + t.ifError(err, 'npm org') + t.equal(code, 0, 'exited OK') + t.equal(stderr, '', 'no error output') + t.same(JSON.parse(stdout), membershipData, 'outputs members') + t.end() + }) +}) + +test('cleanup', function (t) { + t.pass('cleaned up') + server.done() + server.close() + t.end() +}) diff --git a/test/tap/outdated-color.js b/test/tap/outdated-color.js index 3a81d05a821b4..11c967650f3b0 100644 --- a/test/tap/outdated-color.js +++ b/test/tap/outdated-color.js @@ -8,7 +8,7 @@ var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'outdated-color') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } diff --git a/test/tap/outdated-depth.js b/test/tap/outdated-depth.js index 91523405e89ad..368d32230abf0 100644 --- a/test/tap/outdated-depth.js +++ b/test/tap/outdated-depth.js @@ -1,16 +1,13 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var npm = require('../../') var common = require('../common-tap') -var pkg = path.resolve(__dirname, 'outdated-depth') +var pkg = common.pkg var json = { name: 'outdated-depth', @@ -22,8 +19,6 @@ var json = { } test('setup', function (t) { - cleanup() - mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) @@ -47,6 +42,7 @@ test('outdated depth zero', function (t) { mr({ port: common.port }, function (er, s) { npm.load( { + depth: 0, loglevel: 'silent', registry: common.registry }, @@ -54,25 +50,27 @@ test('outdated depth zero', function (t) { npm.install('.', function (er) { if (er) throw new Error(er) npm.outdated(function (err, d) { - t.ifError(err, 'npm outdated ran without error') + if (err) { + throw err + } t.is(process.exitCode, 1, 'exit code set to 1') process.exitCode = 0 t.deepEqual(d[0], expected) - s.close() - t.end() + t.equal(d.length, 1) + npm.config.set('depth', 1) + npm.outdated(function (err, d) { + t.equal(d.length, 2) + if (err) { + throw err + } + t.is(process.exitCode, 1, 'exit code set to 1') + process.exitCode = 0 + s.close() + t.end() + }) }) }) } ) }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/outdated-git.js b/test/tap/outdated-git.js index 2a595e5288934..15db1a30c7c74 100644 --- a/test/tap/outdated-git.js +++ b/test/tap/outdated-git.js @@ -9,8 +9,8 @@ var common = require('../common-tap.js') var npm = require('../../') // config -var pkg = path.resolve(__dirname, 'outdated-git') -var cache = path.resolve(pkg, 'cache') +var pkg = common.pkg +var cache = common.cache var json = { name: 'outdated-git', author: 'Rocko Artischocko', diff --git a/test/tap/outdated-json.js b/test/tap/outdated-json.js index 39d54fb196c5b..77db52cc72521 100644 --- a/test/tap/outdated-json.js +++ b/test/tap/outdated-json.js @@ -1,16 +1,13 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') var server -var pkg = path.resolve(__dirname, 'outdated-json') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } @@ -42,8 +39,6 @@ var expected = { } test('setup', function (t) { - cleanup() - mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) @@ -92,14 +87,37 @@ test('it should log json data', function (t) { ) }) +test('it should log json data even when the list is empty', function (t) { + common.npm( + [ + 'rm', + 'request', + 'underscore' + ], + EXEC_OPTS, + function (er, code, stdout) { + t.ifError(er, 'run without error') + t.is(code, 0, 'successful exit status') + common.npm( + [ + '--registry', common.registry, + '--silent', + '--json', + 'outdated' + ], + EXEC_OPTS, + function (er, code, stdout) { + t.ifError(er, 'run without error') + t.is(code, 0, 'successful exit status') + t.same(JSON.parse(stdout), {}, 'got an empty object printed') + t.end() + } + ) + } + ) +}) + test('cleanup', function (t) { server.close() - cleanup() t.end() }) - -function cleanup () { - // windows fix for locked files - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/outdated-latest.js b/test/tap/outdated-latest.js index d72fd87176f16..073b71da91cc6 100644 --- a/test/tap/outdated-latest.js +++ b/test/tap/outdated-latest.js @@ -7,9 +7,9 @@ const File = Tacks.File const Dir = Tacks.Dir const common = require('../common-tap.js') -const basedir = path.join(__dirname, path.basename(__filename, '.js')) +const basedir = common.pkg const testdir = path.join(basedir, 'testdir') -const cachedir = path.join(basedir, 'cache') +const cachedir = common.cache const globaldir = path.join(basedir, 'global') const tmpdir = path.join(basedir, 'tmp') diff --git a/test/tap/outdated-long.js b/test/tap/outdated-long.js index 976d416a13bb5..8cd2ceadb96fc 100644 --- a/test/tap/outdated-long.js +++ b/test/tap/outdated-long.js @@ -1,17 +1,15 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') var npm = require('../../') // config -var pkg = path.resolve(__dirname, 'outdated-long') -var cache = path.resolve(pkg, 'cache') +var pkg = common.pkg +var cache = common.cache var json = { name: 'outdated-long', @@ -23,8 +21,6 @@ var json = { } test('setup', function (t) { - cleanup() - mkdirp.sync(cache) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) @@ -65,7 +61,7 @@ test('it should not throw', function (t) { mr({ port: common.port }, function (er, s) { npm.load( { - cache: 'cache', + cache: cache, loglevel: 'silent', parseable: true, registry: common.registry @@ -83,6 +79,7 @@ test('it should not throw', function (t) { t.is(process.exitCode, 1, 'exit code set to 1') process.exitCode = 0 console.log = originalLog + output[0] = output[0].replace(/\\/g, '/') t.same(output, expOut) t.same(d, expData) @@ -94,12 +91,3 @@ test('it should not throw', function (t) { ) }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - rimraf.sync(pkg) -} diff --git a/test/tap/outdated-remote.js b/test/tap/outdated-remote.js new file mode 100644 index 0000000000000..e6cbd0e8acdde --- /dev/null +++ b/test/tap/outdated-remote.js @@ -0,0 +1,89 @@ +'use strict' +const path = require('path') +const test = require('tap').test +const Tacks = require('tacks') +const File = Tacks.File +const Dir = Tacks.Dir +const common = require('../common-tap.js') + +const basedir = common.pkg +const testdir = path.join(basedir, 'testdir') +const cachedir = common.cache +const globaldir = path.join(basedir, 'global') +const tmpdir = path.join(basedir, 'tmp') + +const conf = { + cwd: testdir, + env: common.newEnv().extend({ + npm_config_cache: cachedir, + npm_config_tmp: tmpdir, + npm_config_prefix: globaldir, + npm_config_registry: common.registry, + npm_config_loglevel: 'warn' + }) +} + +const fixture = new Tacks(Dir({ + cache: Dir(), + global: Dir(), + tmp: Dir(), + testdir: Dir({ + node_modules: Dir({ + 'foo-http': Dir({ + 'package.json': File({ + name: 'foo-http', + version: '1.0.0' + }) + }), + 'foo-https': Dir({ + 'package.json': File({ + name: 'foo-https', + version: '1.0.0' + }) + }) + }), + 'package.json': File({ + name: 'outdated-remote', + version: '1.0.0', + dependencies: { + 'foo-http': 'http://example.com/foo.tar.gz', + 'foo-https': 'https://example.com/foo.tar.gz' + } + }) + }) +})) + +function setup () { + fixture.create(basedir) +} + +test('setup', t => { + setup() + return common.fakeRegistry.listen() +}) + +test('discovers new versions in outdated', t => { + return common.npm(['outdated', '--json'], conf).then(([code, stdout, stderr]) => { + t.is(code, 1, 'npm outdated completed successfully') + t.comment(stdout.trim()) + t.comment(stderr.trim()) + const data = JSON.parse(stdout) + t.same(data['foo-http'], { + current: '1.0.0', + wanted: 'remote', + latest: 'remote', + location: '' + }) + t.same(data['foo-https'], { + current: '1.0.0', + wanted: 'remote', + latest: 'remote', + location: '' + }) + }) +}) + +test('cleanup', t => { + common.fakeRegistry.close() + t.done() +}) diff --git a/test/tap/outdated-symlink.js b/test/tap/outdated-symlink.js new file mode 100644 index 0000000000000..5ec089758a3ca --- /dev/null +++ b/test/tap/outdated-symlink.js @@ -0,0 +1,133 @@ +'use strict' +const path = require('path') +const test = require('tap').test +const mr = require('npm-registry-mock') +const Tacks = require('tacks') +const File = Tacks.File +const Symlink = Tacks.Symlink +const Dir = Tacks.Dir +const common = require('../common-tap.js') + +const basedir = common.pkg +const testdir = path.join(basedir, 'testdir') +const cachedir = common.cache +const globaldir = path.join(basedir, 'global') +const tmpdir = path.join(basedir, 'tmp') + +const conf = { + cwd: path.join(testdir, 'main'), + env: Object.assign({}, process.env, { + npm_config_cache: cachedir, + npm_config_tmp: tmpdir, + npm_config_prefix: globaldir, + npm_config_registry: common.registry, + npm_config_loglevel: 'warn' + }) +} + +let server +const fixture = new Tacks(Dir({ + cache: Dir(), + global: Dir(), + tmp: Dir(), + testdir: Dir({ + broken: Dir({ + 'package.json': File({ + name: 'broken', + version: '1.0.0' + }) + }), + main: Dir({ + node_modules: Dir({ + unbroken: Symlink('/testdir/unbroken') + }), + 'package-lock.json': File({ + name: 'main', + version: '1.0.0', + lockfileVersion: 1, + requires: true, + dependencies: { + broken: { + version: 'file:../broken' + }, + unbroken: { + version: 'file:../unbroken' + } + } + }), + 'package.json': File({ + name: 'main', + version: '1.0.0', + dependencies: { + broken: 'file:../broken', + unbroken: 'file:../unbroken' + } + }) + }), + unbroken: Dir({ + 'package.json': File({ + name: 'unbroken', + version: '1.0.0' + }) + }) + }) +})) + +function setup () { + cleanup() + fixture.create(basedir) +} + +function cleanup () { + fixture.remove(basedir) +} + +test('setup', function (t) { + setup() + mr({port: common.port, throwOnUnmatched: true}, function (err, s) { + if (err) throw err + server = s + t.done() + }) +}) + +test('outdated sees broken links', function (t) { + common.npm(['outdated', '--json'], conf, function (err, code, stdout, stderr) { + if (err) throw err + t.is(code, 1, 'command ran not ok') + t.comment(stderr.trim()) + t.comment(stdout.trim()) + t.same(JSON.parse(stdout), { + broken: { + wanted: 'linked', + latest: 'linked', + location: '' + } + }) + t.done() + }) +}) + +test('outdated with long output sees broken links', function (t) { + common.npm(['outdated', '--long', '--json'], conf, function (err, code, stdout, stderr) { + if (err) throw err + t.is(code, 1, 'command ran not ok') + t.comment(stderr.trim()) + t.comment(stdout.trim()) + t.same(JSON.parse(stdout), { + broken: { + wanted: 'linked', + latest: 'linked', + type: 'dependencies', + location: '' + } + }) + t.done() + }) +}) + +test('cleanup', function (t) { + server.close() + cleanup() + t.done() +}) diff --git a/test/tap/outdated.js b/test/tap/outdated.js index 8b1907d95f942..e64b7b0f813c8 100644 --- a/test/tap/outdated.js +++ b/test/tap/outdated.js @@ -1,17 +1,15 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var rimraf = require('rimraf') var test = require('tap').test var npm = require('../../') var common = require('../common-tap.js') // config -var pkg = path.resolve(__dirname, 'outdated') -var cache = path.resolve(pkg, 'cache') +var pkg = common.pkg +var cache = common.cache var originalLog var json = { @@ -26,9 +24,7 @@ var json = { } test('setup', function (t) { - cleanup() originalLog = console.log - mkdirp.sync(cache) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) @@ -91,7 +87,7 @@ test('it should not throw', function (t) { mr({ port: common.port }, function (er, s) { npm.load( { - cache: 'cache', + cache: cache, loglevel: 'silent', parseable: true, registry: common.registry @@ -122,11 +118,6 @@ test('it should not throw', function (t) { }) test('cleanup', function (t) { - cleanup() console.log = originalLog t.end() }) - -function cleanup () { - rimraf.sync(pkg) -} diff --git a/test/tap/override-bundled.js b/test/tap/override-bundled.js index 493ebf4a5d7e5..7f87c0999fe06 100644 --- a/test/tap/override-bundled.js +++ b/test/tap/override-bundled.js @@ -8,7 +8,7 @@ var path = require('path') var common = require('../common-tap.js') var testname = path.basename(__filename, '.js') -var testdir = path.resolve(__dirname, testname) +var testdir = common.pkg var testmod = path.resolve(testdir, 'top-test') var testtgz = testmod + '-1.0.0.tgz' diff --git a/test/tap/owner.js b/test/tap/owner.js index 0be88284d9750..622b272852c7d 100644 --- a/test/tap/owner.js +++ b/test/tap/owner.js @@ -1,12 +1,9 @@ var mr = require('npm-registry-mock') var test = require('tap').test -var path = require('path') -var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var common = require('../common-tap.js') -var basedir = path.join(__dirname, path.basename(__filename, '.js')) -var cachedir = path.join(basedir, 'cache') +var basedir = common.pkg +var cachedir = common.cache var server @@ -74,8 +71,6 @@ function mocks (server) { } test('setup', function (t) { - cleanup() - mkdirp.sync(cachedir) mr({ port: common.port, plugin: mocks }, function (er, s) { server = s t.end() @@ -160,10 +155,5 @@ test('npm owner rm', function (t) { test('cleanup', function (t) { server.close() - cleanup() t.end() }) - -function cleanup () { - rimraf.sync(basedir) -} diff --git a/test/tap/pack-files-and-ignores.js b/test/tap/pack-files-and-ignores.js index 6d8b43e9d57d7..4d9b97a32ad25 100644 --- a/test/tap/pack-files-and-ignores.js +++ b/test/tap/pack-files-and-ignores.js @@ -6,7 +6,7 @@ var rimraf = require('rimraf') var mkdirp = require('mkdirp') var fs = require('graceful-fs') var tar = require('tar') -var basepath = path.resolve(__dirname, path.basename(__filename, '.js')) +var basepath = common.pkg var fixturepath = path.resolve(basepath, 'npm-test-files') var targetpath = path.resolve(basepath, 'target') var Tacks = require('tacks') @@ -402,6 +402,7 @@ test('certain files ignored by default', function (t) { 'npm-debug.log': File(''), '.npmrc': File(''), '.foo.swp': File(''), + 'core': Dir({core: File(''), foo: File('')}), '.DS_Store': Dir({foo: File('')}), '._ohno': File(''), '._ohnoes': Dir({noes: File('')}), @@ -422,7 +423,11 @@ test('certain files ignored by default', function (t) { t.notOk(fileExists('npm-debug.log'), 'npm-debug.log not included') t.notOk(fileExists('.npmrc'), '.npmrc not included') t.notOk(fileExists('.foo.swp'), '.foo.swp not included') + t.ok(fileExists('core'), 'core/ included') + t.ok(fileExists('core/foo'), 'core/foo included') + t.notOk(fileExists('core/core'), 'core/core not included') t.notOk(fileExists('.DS_Store'), '.DS_Store not included') + t.notOk(fileExists('.DS_Store/foo'), '.DS_Store/foo not included') t.notOk(fileExists('._ohno'), '._ohno not included') t.notOk(fileExists('._ohnoes'), '._ohnoes not included') t.notOk(fileExists('foo.orig'), 'foo.orig not included') @@ -450,6 +455,7 @@ test('default-ignored files can be explicitly included', function (t) { 'npm-debug.log', '.npmrc', '.foo.swp', + 'core', '.DS_Store', '._ohno', '._ohnoes', @@ -469,6 +475,7 @@ test('default-ignored files can be explicitly included', function (t) { 'npm-debug.log': File(''), '.npmrc': File(''), '.foo.swp': File(''), + 'core': Dir({core: File(''), foo: File('')}), '.DS_Store': Dir({foo: File('')}), '._ohno': File(''), '._ohnoes': Dir({noes: File('')}), @@ -477,7 +484,7 @@ test('default-ignored files can be explicitly included', function (t) { }) ) withFixture(t, fixture, function (done) { - t.ok(fileExists('.git'), '.git included') + t.notOk(fileExists('.git'), '.git should never be included') t.ok(fileExists('.svn'), '.svn included') t.ok(fileExists('CVS'), 'CVS included') t.ok(fileExists('.hg'), '.hg included') @@ -489,6 +496,9 @@ test('default-ignored files can be explicitly included', function (t) { t.ok(fileExists('npm-debug.log'), 'npm-debug.log included') t.ok(fileExists('.npmrc'), '.npmrc included') t.ok(fileExists('.foo.swp'), '.foo.swp included') + t.ok(fileExists('core'), 'core/ included') + t.ok(fileExists('core/foo'), 'core/foo included') + t.ok(fileExists('core/core'), 'core/core included') t.ok(fileExists('.DS_Store'), '.DS_Store included') t.ok(fileExists('._ohno'), '._ohno included') t.ok(fileExists('._ohnoes'), '._ohnoes included') diff --git a/test/tap/pack-scoped.js b/test/tap/pack-scoped.js index 05b9d12a83319..06d02297fd5d0 100644 --- a/test/tap/pack-scoped.js +++ b/test/tap/pack-scoped.js @@ -6,10 +6,10 @@ var join = require('path').join var mkdirp = require('mkdirp') var rimraf = require('rimraf') -var pkg = join(__dirname, 'scoped_package') +var pkg = common.pkg var manifest = join(pkg, 'package.json') var tmp = join(pkg, 'tmp') -var cache = join(pkg, 'cache') +var cache = common.cache var data = { name: '@scope/generic-package', diff --git a/test/tap/pack.js b/test/tap/pack.js index 1813f47bf46a5..23c8296d003cd 100644 --- a/test/tap/pack.js +++ b/test/tap/pack.js @@ -12,9 +12,9 @@ const Tacks = require('tacks') const Dir = Tacks.Dir const File = Tacks.File -const testDir = path.join(__dirname, 'pkg') +const testDir = common.pkg const tmp = path.join(testDir, 'tmp') -const cache = path.join(testDir, 'cache') +const cache = common.cache test('basic pack', (t) => { const fixture = new Tacks(new Dir({ diff --git a/test/tap/peer-deps.js b/test/tap/peer-deps.js index b516818da1f39..558fe9c4e6d96 100644 --- a/test/tap/peer-deps.js +++ b/test/tap/peer-deps.js @@ -1,16 +1,13 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') var npm = require('../../') -var pkg = path.resolve(__dirname, 'peer-deps') +var pkg = common.pkg var expected = [ 'peer dep missing: request@0.9.x, required by npm-test-peer-deps@0.0.0' ] @@ -23,6 +20,17 @@ var json = { } } +function setup (cb) { + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(json, null, 2) + ) + process.chdir(pkg) + + var opts = { cache: common.cache, registry: common.registry } + npm.load(opts, cb) +} + test('installs the peer dependency directory structure', function (t) { mr({ port: common.port }, function (er, s) { setup(function (err) { @@ -42,26 +50,3 @@ test('installs the peer dependency directory structure', function (t) { }) }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function setup (cb) { - cleanup() - mkdirp.sync(pkg) - fs.writeFileSync( - path.join(pkg, 'package.json'), - JSON.stringify(json, null, 2) - ) - process.chdir(pkg) - - var opts = { cache: path.resolve(pkg, 'cache'), registry: common.registry } - npm.load(opts, cb) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/ping.js b/test/tap/ping.js index 76d115a482343..3687b05bd4518 100644 --- a/test/tap/ping.js +++ b/test/tap/ping.js @@ -8,7 +8,7 @@ var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'ping') +var pkg = common.pkg var opts = { cwd: pkg } var outfile = path.join(pkg, '_npmrc') @@ -40,14 +40,41 @@ test('npm ping', function (t) { common.npm([ 'ping', '--registry', common.registry, - '--loglevel', 'silent', + '--loglevel', 'notice', '--userconfig', outfile - ], opts, function (err, code, stdout) { + ], opts, function (err, code, stdout, stderr) { s.close() - t.ifError(err, 'no error output') + t.ifError(err, 'command completed') t.notOk(code, 'exited OK') - t.same(stdout, 'Ping success: ' + JSON.stringify(pingResponse) + '\n') + t.match(stderr, /PING/, 'ping notification output') + t.match(stderr, /PONG/, 'pong response output') + t.end() + }) + }) +}) + +test('npm ping --json', function (t) { + mr({ port: common.port, plugin: mocks }, function (err, s) { + if (err) throw err + + common.npm([ + 'ping', + '--json', + '--registry', common.registry, + '--loglevel', 'notice', + '--userconfig', outfile + ], opts, function (err, code, stdout, stderr) { + s.close() + t.ifError(err, 'command completed') + t.notOk(code, 'exited OK') + + const json = JSON.parse(stdout.trim()) + t.similar(json, { + registry: common.registry, + details: pingResponse + }, 'JSON info returned') + t.equal(typeof json.time, 'number', 'got a timestamp') t.end() }) }) diff --git a/test/tap/prepare.js b/test/tap/prepare.js index f179c5267275a..d0966000735c4 100644 --- a/test/tap/prepare.js +++ b/test/tap/prepare.js @@ -6,9 +6,9 @@ var join = require('path').join var mkdirp = require('mkdirp') var rimraf = require('rimraf') -var pkg = join(__dirname, 'prepare_package') +var pkg = common.pkg var tmp = join(pkg, 'tmp') -var cache = join(pkg, 'cache') +var cache = common.cache test('setup', function (t) { var n = 0 @@ -55,7 +55,11 @@ test('test', function (t) { common.npm([ 'pack', '--loglevel', 'warn' - ], { cwd: pkg, env: env }, function (err, code, stdout, stderr) { + ], { + cwd: pkg, + env: env, + nodeExecPath: process.execPath + }, function (err, code, stdout, stderr) { t.equal(code, 0, 'pack finished successfully') t.ifErr(err, 'pack finished successfully') diff --git a/test/tap/prepublish-only.js b/test/tap/prepublish-only.js index 0d2d31589d7b5..988d507c6359f 100644 --- a/test/tap/prepublish-only.js +++ b/test/tap/prepublish-only.js @@ -10,14 +10,13 @@ var path = require('path') var common = require('../common-tap') -var pkg = join(__dirname, 'prepublish_package') -var cachedir = join(pkg, 'cache') +var pkg = common.pkg +var cachedir = common.cache var tmpdir = join(pkg, 'tmp') var env = { 'npm_config_cache': cachedir, 'npm_config_tmp': tmpdir, - 'npm_config_prefix': pkg, 'npm_config_global': 'false' } @@ -64,7 +63,6 @@ var fixture = new Tacks(Dir({ })) test('setup', function (t) { - cleanup() fixture.create(pkg) mr({port: common.port, throwOnUnmatched: true}, function (err, s) { t.ifError(err, 'registry mocked successfully') @@ -100,7 +98,8 @@ test('test', function (t) { common.npm( [ 'publish', - '--loglevel', 'warn' + '--loglevel', 'warn', + '--scripts-prepend-node-path' ], { cwd: pkg, @@ -131,12 +130,7 @@ test('test', function (t) { }) test('cleanup', function (t) { - cleanup() server.close() t.pass('cleaned up') t.end() }) - -function cleanup () { - fixture.remove(pkg) -} diff --git a/test/tap/prepublish.js b/test/tap/prepublish.js index c71455a2480cc..e561d057f2124 100644 --- a/test/tap/prepublish.js +++ b/test/tap/prepublish.js @@ -6,9 +6,9 @@ var join = require('path').join var mkdirp = require('mkdirp') var rimraf = require('rimraf') -var pkg = join(__dirname, 'prepublish_package') +var pkg = common.pkg var tmp = join(pkg, 'tmp') -var cache = join(pkg, 'cache') +var cache = common.cache test('setup', function (t) { var n = 0 diff --git a/test/tap/progress-config.js b/test/tap/progress-config.js index 94d9b15f1b389..92468004f2c53 100644 --- a/test/tap/progress-config.js +++ b/test/tap/progress-config.js @@ -1,9 +1,9 @@ 'use strict' -var path = require('path') var test = require('tap').test var log = require('npmlog') var fs = require('graceful-fs') -var configName = path.join(__dirname, path.basename(__filename, '.js')) + '-npmrc' +const common = require('../common-tap.js') +var configName = common.pkg + '/npmrc' // We use requireInject to get a fresh copy of // the npm singleton each time we require it. @@ -85,8 +85,3 @@ test('unicode-false', function (t) { t.done() }) }) - -test('cleanup', function (t) { - fs.unlinkSync(configName) - t.done() -}) diff --git a/test/tap/prune-dev-dep-cycle.js b/test/tap/prune-dev-dep-cycle.js index 5ccabdc1c95f0..e226ac6d32f68 100644 --- a/test/tap/prune-dev-dep-cycle.js +++ b/test/tap/prune-dev-dep-cycle.js @@ -1,12 +1,11 @@ 'use strict' var fs = require('fs') -var path = require('path') var test = require('tap').test var Tacks = require('tacks') var File = Tacks.File var Dir = Tacks.Dir var common = require('../common-tap.js') -var testdir = path.join(__dirname, path.basename(__filename, '.js')) +var testdir = common.pkg var fixture = new Tacks( Dir({ diff --git a/test/tap/prune-dev-dep-with-bins.js b/test/tap/prune-dev-dep-with-bins.js index 686b5d8d6da4c..a75652f9a177c 100644 --- a/test/tap/prune-dev-dep-with-bins.js +++ b/test/tap/prune-dev-dep-with-bins.js @@ -1,12 +1,11 @@ 'use strict' var fs = require('fs') -var path = require('path') var test = require('tap').test var Tacks = require('tacks') var File = Tacks.File var Dir = Tacks.Dir var common = require('../common-tap.js') -var testdir = path.join(__dirname, path.basename(__filename, '.js')) +var testdir = common.pkg var fixture = new Tacks( Dir({ diff --git a/test/tap/prune-with-dev-dep-duplicate.js b/test/tap/prune-with-dev-dep-duplicate.js index bfe902b1d3c5b..7e7632b489cf6 100644 --- a/test/tap/prune-with-dev-dep-duplicate.js +++ b/test/tap/prune-with-dev-dep-duplicate.js @@ -1,17 +1,14 @@ var fs = require('fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap') var server -var pkg = path.resolve(__dirname, 'prune') -var cache = path.resolve(pkg, 'cache') +var pkg = common.pkg +var cache = common.cache var json = { name: 'prune-with-dev-dep-duplicate', @@ -32,8 +29,6 @@ var EXEC_OPTS = { } test('setup', function (t) { - cleanup() - mkdirp.sync(cache) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) @@ -103,12 +98,6 @@ test('npm prune only=prod', function (t) { test('cleanup', function (t) { server.close() - cleanup() t.pass('cleaned up') t.end() }) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/prune-with-only-dev-deps.js b/test/tap/prune-with-only-dev-deps.js index dce9497e517ff..6ea1bcf64b480 100644 --- a/test/tap/prune-with-only-dev-deps.js +++ b/test/tap/prune-with-only-dev-deps.js @@ -3,15 +3,13 @@ var path = require('path') var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap') var server -var pkg = path.resolve(__dirname, 'prune') -var cache = path.resolve(pkg, 'cache') +var pkg = common.pkg +var cache = common.cache var json = { name: 'prune-with-only-dev-deps', @@ -30,7 +28,6 @@ var EXEC_OPTS = { } test('setup', function (t) { - cleanup() mkdirp.sync(cache) fs.writeFileSync( path.join(pkg, 'package.json'), @@ -113,12 +110,5 @@ test('verify installs', function (t) { test('cleanup', function (t) { server.close() - cleanup() - t.pass('cleaned up') t.end() }) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/prune.js b/test/tap/prune.js index 4fe586359e054..ce2a300b1a537 100644 --- a/test/tap/prune.js +++ b/test/tap/prune.js @@ -1,17 +1,14 @@ var fs = require('fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap') var server -var pkg = path.resolve(__dirname, path.basename(__filename, '.js')) -var cache = path.resolve(pkg, 'cache') +var pkg = common.pkg +var cache = common.cache var json = { name: 'prune', @@ -32,8 +29,6 @@ var EXEC_OPTS = { } test('setup', function (t) { - cleanup() - mkdirp.sync(cache) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) @@ -109,7 +104,7 @@ test('production: npm prune', function (t) { ], EXEC_OPTS, function (err, code, stdout) { if (err) throw err t.notOk(code, 'exit ok') - t.equal(stdout.trim(), 'remove\tmkdirp\t0.3.5\tnode_modules/mkdirp') + t.equal(stdout.trim().replace(/\\/g, '/'), 'remove\tmkdirp\t0.3.5\tnode_modules/mkdirp') t.end() }) }) @@ -122,12 +117,5 @@ test('pruduction: verify installs', function (t) { test('cleanup', function (t) { server.close() - cleanup() - t.pass('cleaned up') t.end() }) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/publish-access-scoped.js b/test/tap/publish-access-scoped.js index 17339cedfda33..7b9a3ecb59b9c 100644 --- a/test/tap/publish-access-scoped.js +++ b/test/tap/publish-access-scoped.js @@ -2,13 +2,11 @@ var fs = require('fs') var path = require('path') var test = require('tap').test -var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var mr = require('npm-registry-mock') var common = require('../common-tap') var server -var pkg = path.join(__dirname, 'publish-access') +var pkg = common.pkg test('setup', function (t) { mr({port: common.port, throwOnUnmatched: true}, function (err, s) { @@ -28,46 +26,40 @@ test('scoped packages pass public access if set', function (t) { return true }).put('/@bigco%2fpublish-access', true).reply(201, {ok: true}) - mkdirp(path.join(pkg, 'cache'), function () { - fs.writeFile( - path.join(pkg, 'package.json'), - JSON.stringify({ - name: '@bigco/publish-access', - version: '1.2.5', - public: true - }), - 'ascii', - function (er) { - t.ifError(er, 'package file written') - common.npm( - [ - 'publish', - '--access', 'public', - '--cache', path.join(pkg, 'cache'), - '--loglevel', 'silly', - '--registry', common.registry - ], - { - cwd: pkg - }, - function (er) { - t.ifError(er, 'published without error') + fs.writeFile( + path.join(pkg, 'package.json'), + JSON.stringify({ + name: '@bigco/publish-access', + version: '1.2.5', + public: true + }), + 'ascii', + function (er) { + t.ifError(er, 'package file written') + common.npm( + [ + 'publish', + '--access', 'public', + '--cache', common.cache, + '--loglevel', 'silly', + '--registry', common.registry + ], + { + cwd: pkg + }, + function (er) { + t.ifError(er, 'published without error') - server.done() - t.end() - } - ) - } - ) - }) + server.done() + t.end() + } + ) + } + ) }) test('cleanup', function (t) { process.chdir(__dirname) server.close() - rimraf(pkg, function (er) { - t.ifError(er) - - t.end() - }) + t.end() }) diff --git a/test/tap/publish-access-unscoped-restricted-fails.js b/test/tap/publish-access-unscoped-restricted-fails.js index 660d0f48d7d5a..f615fa60f2450 100644 --- a/test/tap/publish-access-unscoped-restricted-fails.js +++ b/test/tap/publish-access-unscoped-restricted-fails.js @@ -6,7 +6,7 @@ var mkdirp = require('mkdirp') var rimraf = require('rimraf') var common = require('../common-tap.js') -var pkg = path.join(__dirname, 'publish-access-unscoped') +var pkg = common.pkg test('setup', function (t) { mkdirp.sync(pkg) diff --git a/test/tap/publish-access-unscoped.js b/test/tap/publish-access-unscoped.js index 1766f61f4f93d..a7ea8e6694849 100644 --- a/test/tap/publish-access-unscoped.js +++ b/test/tap/publish-access-unscoped.js @@ -2,13 +2,11 @@ var fs = require('fs') var path = require('path') var test = require('tap').test -var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var mr = require('npm-registry-mock') var common = require('../common-tap') var server -var pkg = path.join(__dirname, 'publish-access-unscoped') +var pkg = common.pkg test('setup', function (t) { mr({port: common.port, throwOnUnmatched: true}, function (err, s) { @@ -28,46 +26,40 @@ test('unscoped packages can be explicitly set as public', function (t) { return true }).put('/publish-access', true).reply(201, {ok: true}) - mkdirp(path.join(pkg, 'cache'), function () { - fs.writeFile( - path.join(pkg, 'package.json'), - JSON.stringify({ - name: 'publish-access', - version: '1.2.5', - public: true - }), - 'ascii', - function (er) { - t.ifError(er, 'package file written') - common.npm( - [ - 'publish', - '--access', 'public', - '--cache', path.join(pkg, 'cache'), - '--loglevel', 'silly', - '--registry', common.registry - ], - { - cwd: pkg - }, - function (er) { - t.ifError(er, 'published without error') + fs.writeFile( + path.join(pkg, 'package.json'), + JSON.stringify({ + name: 'publish-access', + version: '1.2.5', + public: true + }), + 'ascii', + function (er) { + t.ifError(er, 'package file written') + common.npm( + [ + 'publish', + '--access', 'public', + '--cache', common.cache, + '--loglevel', 'silly', + '--registry', common.registry + ], + { + cwd: pkg + }, + function (er) { + t.ifError(er, 'published without error') - server.done() - t.end() - } - ) - } - ) - }) + server.done() + t.end() + } + ) + } + ) }) test('cleanup', function (t) { process.chdir(__dirname) server.close() - rimraf(pkg, function (er) { - t.ifError(er) - - t.end() - }) + t.end() }) diff --git a/test/tap/publish-config.js b/test/tap/publish-config.js index 0566795dbe3f5..0d6406316b4a0 100644 --- a/test/tap/publish-config.js +++ b/test/tap/publish-config.js @@ -3,10 +3,7 @@ const common = require('../common-tap.js') const test = require('tap').test const fs = require('fs') -const osenv = require('osenv') -const pkg = `${process.env.npm_config_tmp || '/tmp'}/npm-test-publish-config` - -require('mkdirp').sync(pkg) +const pkg = common.pkg fs.writeFileSync(pkg + '/package.json', JSON.stringify({ name: 'npm-test-publish-config', @@ -17,9 +14,9 @@ fs.writeFileSync(pkg + '/package.json', JSON.stringify({ }), 'utf8') fs.writeFileSync(pkg + '/fixture_npmrc', - '//localhost:1337/:email = fancy@feast.net\n' + - '//localhost:1337/:username = fancy\n' + - '//localhost:1337/:_password = ' + Buffer.from('feast').toString('base64')) + '//localhost:' + common.port + '/:email = fancy@feast.net\n' + + '//localhost:' + common.port + '/:username = fancy\n' + + '//localhost:' + common.port + '/:_password = ' + Buffer.from('feast').toString('base64')) test(function (t) { let child @@ -47,18 +44,24 @@ test(function (t) { // itself functions normally. // // Make sure that we don't sit around waiting for lock files - child = common.npm(['publish', '--userconfig=' + pkg + '/fixture_npmrc', '--tag=beta'], { + child = common.npm([ + 'publish', + '--userconfig=' + pkg + '/fixture_npmrc', + '--tag=beta', + '--loglevel', 'error' + ], { cwd: pkg, - stdio: 'inherit', env: { 'npm_config_cache_lock_stale': 1000, 'npm_config_cache_lock_wait': 1000, HOME: process.env.HOME, Path: process.env.PATH, PATH: process.env.PATH, - USERPROFILE: osenv.home() + USERPROFILE: process.env.USERPROFILE } - }, function (err, code) { + }, function (err, code, stdout, stderr) { + t.comment(stdout) + t.comment(stderr) t.ifError(err, 'publish command finished successfully') t.notOk(code, 'npm install exited with code 0') }) diff --git a/test/tap/publish-invalid-semver-tag.js b/test/tap/publish-invalid-semver-tag.js index 21543869a5ca6..b5d499f377248 100644 --- a/test/tap/publish-invalid-semver-tag.js +++ b/test/tap/publish-invalid-semver-tag.js @@ -2,46 +2,52 @@ var common = require('../common-tap.js') var test = require('tap').test var npm = require('../../lib/npm.js') var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var path = require('path') var fs = require('fs') var mr = require('npm-registry-mock') -var osenv = require('osenv') - -var PKG_DIR = path.resolve(__dirname, 'publish-invalid-semver-tag') -var CACHE_DIR = path.resolve(PKG_DIR, 'cache') +var PKG_DIR = common.pkg +let cacheIteration = 0 +let CACHE_DIR var DEFAULT_PKG = { 'name': 'examples', 'version': '1.2.3' } -var mockServer - +const isRoot = process.getuid && process.getuid() === 0 +const sudoUID = isRoot ? +process.env.SUDO_UID : null +const sudoGID = isRoot ? +process.env.SUDO_GID : null +const { chownSync } = require('fs') function resetPackage (options) { - rimraf.sync(CACHE_DIR) + CACHE_DIR = path.resolve(common.cache, '' + cacheIteration++) mkdirp.sync(CACHE_DIR) + npm.config.set('cache', CACHE_DIR) + + if (isRoot && sudoUID && sudoGID) { + chownSync(CACHE_DIR, sudoUID, sudoGID) + } fs.writeFileSync(path.resolve(PKG_DIR, 'package.json'), DEFAULT_PKG) } test('setup', function (t) { - process.chdir(osenv.tmpdir()) mkdirp.sync(PKG_DIR) process.chdir(PKG_DIR) - resetPackage({}) - mr({ port: common.port }, function (er, server) { + if (er) { + throw er + } + t.parent.teardown(() => server.close()) npm.load({ - cache: CACHE_DIR, + cache: common.cache, registry: common.registry, cwd: PKG_DIR }, function (err) { - t.ifError(err, 'started server') - mockServer = server - + if (err) { + throw err + } t.end() }) }) @@ -68,12 +74,3 @@ test('attempt publish with semver-like version', function (t) { t.end() }) }) - -test('cleanup', function (t) { - mockServer.close() - - process.chdir(osenv.tmpdir()) - rimraf.sync(PKG_DIR) - - t.end() -}) diff --git a/test/tap/publish-scoped.js b/test/tap/publish-scoped.js index b8fe0ae2f6902..82873688cdce9 100644 --- a/test/tap/publish-scoped.js +++ b/test/tap/publish-scoped.js @@ -2,19 +2,14 @@ var fs = require('fs') var path = require('path') var test = require('tap').test -var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var common = require('../common-tap') var mr = require('npm-registry-mock') -var pkg = path.join(__dirname, 'prepublish_package') +var pkg = common.pkg var server function setup () { - cleanup() - mkdirp.sync(path.join(pkg, 'cache')) - fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify({ @@ -40,18 +35,18 @@ test('npm publish should honor scoping', function (t) { var configuration = [ 'progress=false', - 'cache=' + path.join(pkg, 'cache'), + 'cache=' + common.cache, 'registry=http://nonexistent.lvh.me', - '//localhost:1337/:username=username', - '//localhost:1337/:_password=' + Buffer.from('password').toString('base64'), - '//localhost:1337/:email=' + 'ogd@aoaioxxysz.net', + '//localhost:' + common.port + '/:username=username', + '//localhost:' + common.port + '/:_password=' + Buffer.from('password').toString('base64'), + '//localhost:' + common.port + '/:email=' + 'ogd@aoaioxxysz.net', '@bigco:registry=' + common.registry ] var configFile = path.join(pkg, '.npmrc') fs.writeFileSync(configFile, configuration.join('\n') + '\n') - common.npm(['publish'], {'cwd': pkg}, function (err, code, stdout, stderr) { + common.npm(['publish'], {'cwd': pkg, nodeExecPath: process.execPath}, function (err, code, stdout, stderr) { if (err) throw err t.is(code, 0, 'published without error') server.done() @@ -81,11 +76,6 @@ test('npm publish should honor scoping', function (t) { test('cleanup', function (t) { server.close() + process.chdir(__dirname) t.end() - cleanup() }) - -function cleanup () { - process.chdir(__dirname) - rimraf.sync(pkg) -} diff --git a/test/tap/publish.js b/test/tap/publish.js index 765cfb07c6c82..453abcecd47ab 100644 --- a/test/tap/publish.js +++ b/test/tap/publish.js @@ -4,18 +4,13 @@ const BB = require('bluebird') const common = require('../common-tap') const fs = require('fs') -const mkdirp = require('mkdirp') const mr = BB.promisify(require('npm-registry-mock')) const path = require('path') -const rimraf = require('rimraf') const test = require('tap').test -const testDir = path.join(__dirname, 'publish_test_package') +const testDir = common.pkg function setup () { - cleanup() - mkdirp.sync(path.join(testDir, 'cache')) - fs.writeFileSync( path.join(testDir, 'package.json'), JSON.stringify({ @@ -52,12 +47,15 @@ test('basic npm publish', (t) => { return common.npm([ 'publish', '--no-color', - '--cache', path.join(testDir, 'cache'), + '--cache', common.cache, '--registry=' + common.registry.replace(common.port, server.port), `--//localhost:${server.port}/:username=username`, `--//localhost:${server.port}/:_password=` + Buffer.from('password').toString('base64'), `--//localhost:${server.port}/:email=` + 'ogd@aoaioxxysz.net' - ], {'cwd': testDir}) + ], { + 'cwd': testDir, + nodeExecPath: process.execPath + }) .spread((code, stdout, stderr) => { t.comment(stdout) t.comment(stderr) @@ -92,7 +90,7 @@ test('npm publish --dry-run', (t) => { 'publish', '--dry-run', '--registry=https://example.registry/fake', - '--cache', path.join(testDir, 'cache'), + '--cache', common.cache, '--loglevel=notice', '--no-color' ], {'cwd': testDir}) @@ -115,7 +113,7 @@ test('npm publish --json', (t) => { 'publish', '--json', '--registry', common.registry.replace(common.port, server.port), - '--cache', path.join(testDir, 'cache') + '--cache', common.cache ], {'cwd': testDir}) .spread((code, stdout, stderr) => { t.comment(stdout) @@ -125,8 +123,8 @@ test('npm publish --json', (t) => { name: 'publish-organized', version: '1.2.5', files: [ - {path: 'package.json'}, - {path: 'index.js'} + {path: 'index.js'}, + {path: 'package.json'} ], entryCount: 2 }, 'JSON output reflects package contents') @@ -142,7 +140,7 @@ test('npm publish --dry-run --json', (t) => { '--dry-run', '--json', '--registry=https://example.registry/fake', - '--cache', path.join(testDir, 'cache'), + '--cache', common.cache, '--loglevel=notice', '--no-color' ], {'cwd': testDir}) @@ -154,21 +152,11 @@ test('npm publish --dry-run --json', (t) => { name: 'publish-organized', version: '1.2.5', files: [ - {path: 'package.json'}, - {path: 'index.js'} + {path: 'index.js'}, + {path: 'package.json'} ], entryCount: 2 }, 'JSON output reflects package contents') t.equal(stderr.trim(), '', 'nothing on stderr') }) }) - -test('cleanup', (t) => { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(__dirname) - rimraf.sync(testDir) -} diff --git a/test/tap/repo.js b/test/tap/repo.js index 54907f620c77e..3e97fdeaed228 100644 --- a/test/tap/repo.js +++ b/test/tap/repo.js @@ -5,16 +5,17 @@ var test = require('tap').test var rimraf = require('rimraf') var fs = require('fs') var path = require('path') -var fakeBrowser = path.join(__dirname, '_script.sh') -var outFile = path.join(__dirname, '/_output') - -var opts = { cwd: __dirname } +var fakeBrowser = path.join(common.pkg, '_script.sh') +var outFile = path.join(common.pkg, '_output') +var opts = { cwd: common.pkg } +var mkdirp = require('mkdirp') common.pendIfWindows('This is trickier to convert without opening new shells') test('setup', function (t) { + mkdirp.sync(common.pkg) var s = '#!/usr/bin/env bash\n' + - 'echo "$@" > ' + JSON.stringify(__dirname) + '/_output\n' + 'echo "$@" > ' + JSON.stringify(common.pkg) + '/_output\n' fs.writeFileSync(fakeBrowser, s, 'ascii') fs.chmodSync(fakeBrowser, '0755') t.pass('made script') @@ -40,6 +41,41 @@ test('npm repo underscore', function (t) { }) }) +test('npm repo underscore --json', function (t) { + mr({ port: common.port }, function (er, s) { + common.npm([ + 'repo', 'underscore', + '--json', + '--registry=' + common.registry, + '--loglevel=silent', + '--no-browser' + ], opts, function (err, code, stdout, stderr) { + t.ifError(err, 'repo command ran without error') + t.equal(code, 0, 'exit ok') + t.matchSnapshot(stdout, 'should print json result') + s.close() + t.end() + }) + }) +}) + +test('npm repo underscore --no-browser', function (t) { + mr({ port: common.port }, function (er, s) { + common.npm([ + 'repo', 'underscore', + '--no-browser', + '--registry=' + common.registry, + '--loglevel=silent' + ], opts, function (err, code, stdout, stderr) { + t.ifError(err, 'repo command ran without error') + t.equal(code, 0, 'exit ok') + t.matchSnapshot(stdout, 'should print alternative msg') + s.close() + t.end() + }) + }) +}) + test('npm repo optimist - github (https://)', function (t) { mr({ port: common.port }, function (er, s) { common.npm([ diff --git a/test/tap/retry-on-stale-cache.js b/test/tap/retry-on-stale-cache.js index 8aec35ed4f44d..7e3d291a5bdd7 100644 --- a/test/tap/retry-on-stale-cache.js +++ b/test/tap/retry-on-stale-cache.js @@ -7,8 +7,8 @@ var Tacks = require('tacks') var Dir = Tacks.Dir var File = Tacks.File -var workdir = path.join(__dirname, path.basename(__filename, '.js')) -var cachedir = path.join(workdir, 'cache') +var workdir = common.pkg +var cachedir = common.cache var modulesdir = path.join(workdir, 'modules') var oldModule = path.join(modulesdir, 'good-night-0.1.0.tgz') var newModule = path.join(modulesdir, 'good-night-1.0.0.tgz') @@ -20,7 +20,6 @@ var config = [ ] var fixture = new Tacks(Dir({ - 'cache': Dir(), 'modules': Dir({ 'good-night-0.1.0.tgz': File(Buffer.from( '1f8b0800000000000003ed934f4bc43010c57beea7187a59056dd36eff80' + diff --git a/test/tap/run-script-filter-private.js b/test/tap/run-script-filter-private.js index 631759c13a733..31f1912fd103a 100644 --- a/test/tap/run-script-filter-private.js +++ b/test/tap/run-script-filter-private.js @@ -7,7 +7,7 @@ var test = require('tap').test var common = require('../common-tap') -var pkg = path.resolve(__dirname, 'run-script-filter-private') +var pkg = common.pkg var opts = { cwd: pkg } diff --git a/test/tap/run-script.js b/test/tap/run-script.js index f50a9632851a0..75b714002e2c8 100644 --- a/test/tap/run-script.js +++ b/test/tap/run-script.js @@ -7,8 +7,8 @@ var rimraf = require('rimraf') var common = require('../common-tap') -var pkg = path.resolve(__dirname, 'run-script') -var cache = path.resolve(pkg, 'cache') +var pkg = common.pkg +var cache = common.cache var tmp = path.resolve(pkg, 'tmp') var opts = { cwd: pkg } diff --git a/test/tap/save-optional.js b/test/tap/save-optional.js index 66e430dc78b71..9985451684d07 100644 --- a/test/tap/save-optional.js +++ b/test/tap/save-optional.js @@ -8,9 +8,9 @@ const File = Tacks.File const Dir = Tacks.Dir const common = require('../common-tap.js') -const basedir = path.join(__dirname, path.basename(__filename, '.js')) +const basedir = common.pkg const testdir = path.join(basedir, 'testdir') -const cachedir = path.join(basedir, 'cache') +const cachedir = common.cache const globaldir = path.join(basedir, 'global') const tmpdir = path.join(basedir, 'tmp') diff --git a/test/tap/scope-header.js b/test/tap/scope-header.js index 3eb70f132e669..f4eb7ae12c104 100644 --- a/test/tap/scope-header.js +++ b/test/tap/scope-header.js @@ -7,12 +7,12 @@ var File = Tacks.File var Dir = Tacks.Dir var common = require('../common-tap.js') -var basedir = path.join(__dirname, path.basename(__filename, '.js')) +var basedir = common.pkg var testdir = path.join(basedir, 'testdir') var withScope = path.join(testdir, 'with-scope') var withoutScope = path.join(testdir, 'without-scope') var onlyProjectScope = path.join(testdir, 'only-project-scope') -var cachedir = path.join(basedir, 'cache') +var cachedir = common.cache var globaldir = path.join(basedir, 'global') var tmpdir = path.join(basedir, 'tmp') diff --git a/test/tap/scripts-whitespace-windows.js b/test/tap/scripts-whitespace-windows.js index 4d1e53a8f5a5d..9a301dca526d0 100644 --- a/test/tap/scripts-whitespace-windows.js +++ b/test/tap/scripts-whitespace-windows.js @@ -2,15 +2,13 @@ var fs = require('graceful-fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap') -var pkg = path.resolve(__dirname, 'scripts-whitespace-windows') +var pkg = common.pkg var tmp = path.resolve(pkg, 'tmp') -var cache = path.resolve(pkg, 'cache') +var cache = common.cache var dep = path.resolve(pkg, 'dep') var EXEC_OPTS = { cwd: pkg } @@ -43,7 +41,6 @@ if (process.argv.length === 8) */ }.toString().split('\n').slice(1, -1).join('\n') test('setup', function (t) { - cleanup() mkdirp.sync(tmp) fs.writeFileSync( path.join(pkg, 'package.json'), @@ -89,13 +86,3 @@ test('test', function (t) { t.end() }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/search.all-package-search.js b/test/tap/search.all-package-search.js index c70f4f8e7ef07..9b3b989f9c726 100644 --- a/test/tap/search.all-package-search.js +++ b/test/tap/search.all-package-search.js @@ -1,23 +1,32 @@ -var path = require('path') -var mkdirp = require('mkdirp') -var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') -var cacheFile = require('npm-cache-filename') -var test = require('tap').test -var Tacks = require('tacks') -var File = Tacks.File +'use strict' -var common = require('../common-tap.js') +const cacheFile = require('npm-cache-filename') +const mkdirp = require('mkdirp') +const mr = require('npm-registry-mock') +const path = require('path') +const qs = require('querystring') +const Tacks = require('tacks') +const test = require('tap').test -var PKG_DIR = path.resolve(__dirname, 'search') -var CACHE_DIR = path.resolve(PKG_DIR, 'cache') -var cacheBase = cacheFile(CACHE_DIR)(common.registry + '/-/all') -var cachePath = path.join(cacheBase, '.cache.json') +const {File} = Tacks -var server +const common = require('../common-tap.js') + +const CACHE_DIR = common.cache +const cacheBase = cacheFile(CACHE_DIR)(common.registry + '/-/all') +const cachePath = path.join(cacheBase, '.cache.json') +const chownr = require('chownr') +const fixOwner = ( + process.getuid && process.getuid() === 0 && + process.env.SUDO_UID && process.env.SUDO_GID +) ? (path) => chownr.sync(path, +process.env.SUDO_UID, +process.env.SUDO_GID) + : () => {} + +let server test('setup', function (t) { + mkdirp.sync(cacheBase) + fixOwner(CACHE_DIR) mr({port: common.port, throwOnUnmatched: true}, function (err, s) { t.ifError(err, 'registry mocked successfully') server = s @@ -26,7 +35,7 @@ test('setup', function (t) { }) }) -var searches = [ +const searches = [ { term: 'cool', description: 'non-regex search', @@ -138,21 +147,29 @@ var searches = [ // These test classic hand-matched searches searches.forEach(function (search) { test(search.description, function (t) { - setup() - server.get('/-/v1/search?text=' + encodeURIComponent(search.term) + '&size=20').once().reply(404, {}) - var now = Date.now() - var cacheContents = { + const query = qs.stringify({ + text: search.term, + size: 20, + from: 0, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + server.get(`/-/v1/search?${query}`).once().reply(404, {}) + const now = Date.now() + const cacheContents = { '_updated': now, bar: { name: 'bar', version: '1.0.0' }, cool: { name: 'cool', version: '5.0.0' }, foo: { name: 'foo', version: '2.0.0' }, other: { name: 'other', version: '1.0.0' } } - for (var k in search.inject) { + for (let k in search.inject) { cacheContents[k] = search.inject[k] } - var fixture = new Tacks(File(cacheContents)) + const fixture = new Tacks(File(cacheContents)) fixture.create(cachePath) + fixOwner(cachePath) common.npm([ 'search', search.term, '--registry', common.registry, @@ -167,12 +184,12 @@ searches.forEach(function (search) { t.equal(code, 0, 'search finished successfully') t.ifErr(err, 'search finished successfully') // \033 == \u001B - var markStart = '\u001B\\[[0-9][0-9]m' - var markEnd = '\u001B\\[0m' + const markStart = '\u001B\\[[0-9][0-9]m' + const markEnd = '\u001B\\[0m' - var re = new RegExp(markStart + '.*?' + markEnd) + const re = new RegExp(markStart + '.*?' + markEnd) - var cnt = stdout.search(re) + const cnt = stdout.search(re) t.equal( cnt, search.location, @@ -184,18 +201,7 @@ searches.forEach(function (search) { }) test('cleanup', function (t) { - cleanup() + server.done() server.close() t.end() }) - -function setup () { - cleanup() - mkdirp.sync(cacheBase) -} - -function cleanup () { - server.done() - process.chdir(osenv.tmpdir()) - rimraf.sync(PKG_DIR) -} diff --git a/test/tap/search.esearch.js b/test/tap/search.esearch.js deleted file mode 100644 index d892aec95759c..0000000000000 --- a/test/tap/search.esearch.js +++ /dev/null @@ -1,192 +0,0 @@ -var common = require('../common-tap.js') -var finished = require('mississippi').finished -var mkdirp = require('mkdirp') -var mr = require('npm-registry-mock') -var npm = require('../../') -var osenv = require('osenv') -var path = require('path') -var rimraf = require('rimraf') -var test = require('tap').test - -var SEARCH = '/-/v1/search' -var PKG_DIR = path.resolve(__dirname, 'create-entry-update-stream') -var CACHE_DIR = path.resolve(PKG_DIR, 'cache') - -var esearch = require('../../lib/search/esearch') - -var server - -function setup () { - cleanup() - mkdirp.sync(CACHE_DIR) - process.chdir(CACHE_DIR) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(PKG_DIR) -} - -test('setup', function (t) { - mr({port: common.port, throwOnUnmatched: true}, function (err, s) { - t.ifError(err, 'registry mocked successfully') - npm.load({ cache: CACHE_DIR, registry: common.registry }, function (err) { - t.ifError(err, 'npm loaded successfully') - server = s - t.pass('all set up') - t.done() - }) - }) -}) - -test('basic test', function (t) { - setup() - server.get(SEARCH + '?text=oo&size=1').once().reply(200, { - objects: [ - { package: { name: 'cool', version: '1.0.0' } }, - { package: { name: 'foo', version: '2.0.0' } } - ] - }) - var results = [] - var s = esearch({ - include: ['oo'], - limit: 1 - }) - t.ok(s, 'got a stream') - s.on('data', function (d) { - results.push(d) - }) - finished(s, function (err) { - if (err) { throw err } - t.ok(true, 'stream finished without error') - t.deepEquals(results, [{ - name: 'cool', - version: '1.0.0', - description: null, - maintainers: null, - keywords: null, - date: null - }, { - name: 'foo', - version: '2.0.0', - description: null, - maintainers: null, - keywords: null, - date: null - }]) - server.done() - t.done() - }) -}) - -test('only returns certain fields for each package', function (t) { - setup() - var date = new Date() - server.get(SEARCH + '?text=oo&size=1').once().reply(200, { - objects: [{ - package: { - name: 'cool', - version: '1.0.0', - description: 'desc', - maintainers: [ - {username: 'x', email: 'a@b.c'}, - {username: 'y', email: 'c@b.a'} - ], - keywords: ['a', 'b', 'c'], - date: date.toISOString(), - extra: 'lol' - } - }] - }) - var results = [] - var s = esearch({ - include: ['oo'], - limit: 1 - }) - t.ok(s, 'got a stream') - s.on('data', function (d) { - results.push(d) - }) - finished(s, function (err) { - if (err) { throw err } - t.ok(true, 'stream finished without error') - t.deepEquals(results, [{ - name: 'cool', - version: '1.0.0', - description: 'desc', - maintainers: [ - {username: 'x', email: 'a@b.c'}, - {username: 'y', email: 'c@b.a'} - ], - keywords: ['a', 'b', 'c'], - date: date - }]) - server.done() - t.done() - }) -}) - -test('accepts a limit option', function (t) { - setup() - server.get(SEARCH + '?text=oo&size=3').once().reply(200, { - objects: [ - { package: { name: 'cool', version: '1.0.0' } }, - { package: { name: 'cool', version: '1.0.0' } } - ] - }) - var results = 0 - var s = esearch({ - include: ['oo'], - limit: 3 - }) - s.on('data', function () { results++ }) - finished(s, function (err) { - if (err) { throw err } - t.ok(true, 'request sent with correct size') - t.equal(results, 2, 'behaves fine with fewer results than size') - server.done() - t.done() - }) -}) - -test('passes foo:bar syntax params directly', function (t) { - setup() - server.get(SEARCH + '?text=foo%3Abar&size=1').once().reply(200, { - objects: [] - }) - var s = esearch({ - include: ['foo:bar'], - limit: 1 - }) - s.on('data', function () {}) - finished(s, function (err) { - if (err) { throw err } - t.ok(true, 'request sent with correct params') - server.done() - t.done() - }) -}) - -test('space-separates and URI-encodes multiple search params', function (t) { - setup() - server.get(SEARCH + '?text=foo%20bar%3Abaz%20quux%3F%3D&size=1').once().reply(200, { - objects: [] - }) - var s = esearch({ - include: ['foo', 'bar:baz', 'quux?='], - limit: 1 - }) - s.on('data', function () {}) - finished(s, function (err) { - if (err) { throw err } - t.ok(true, 'request sent with correct params') - server.done() - t.done() - }) -}) - -test('cleanup', function (t) { - server.close() - cleanup() - t.done() -}) diff --git a/test/tap/search.js b/test/tap/search.js index df7ff0fe375b7..049706cc0892d 100644 --- a/test/tap/search.js +++ b/test/tap/search.js @@ -1,21 +1,40 @@ -var path = require('path') -var mkdirp = require('mkdirp') -var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') -var cacheFile = require('npm-cache-filename') -var test = require('tap').test -var Tacks = require('tacks') -var File = Tacks.File +'use strict' -var common = require('../common-tap.js') +const cacheFile = require('npm-cache-filename') +const mkdirp = require('mkdirp') +const mr = require('npm-registry-mock') +const path = require('path') +const qs = require('querystring') +const test = require('tap').test -var PKG_DIR = path.resolve(__dirname, 'search') -var CACHE_DIR = path.resolve(PKG_DIR, 'cache') -var cacheBase = cacheFile(CACHE_DIR)(common.registry + '/-/all') -var cachePath = path.join(cacheBase, '.cache.json') +const Tacks = require('tacks') +const File = Tacks.File -var server +const common = require('../common-tap.js') + +// this test uses a fresh cache for each test block +// create them all in common.cache so that we can verify +// them for root-owned files in sudotest +let CACHE_DIR +let cacheBase +let cachePath +let cacheCounter = 1 +function setup () { + CACHE_DIR = common.cache + '/' + cacheCounter++ + cacheBase = cacheFile(CACHE_DIR)(common.registry + '/-/all') + cachePath = path.join(cacheBase, '.cache.json') + mkdirp.sync(cacheBase) + fixOwner(CACHE_DIR) +} + +const chownr = require('chownr') +const fixOwner = ( + process.getuid && process.getuid() === 0 && + process.env.SUDO_UID && process.env.SUDO_GID +) ? (path) => chownr.sync(path, +process.env.SUDO_UID, +process.env.SUDO_GID) + : () => {} + +let server test('setup', function (t) { mr({port: common.port, throwOnUnmatched: true}, function (err, s) { @@ -28,13 +47,22 @@ test('setup', function (t) { test('notifies when there are no results', function (t) { setup() - server.get('/-/v1/search?text=none&size=20').once().reply(200, { + const query = qs.stringify({ + text: 'none', + size: 20, + from: 0, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + server.get(`/-/v1/search?${query}`).once().reply(200, { objects: [] }) common.npm([ 'search', 'none', '--registry', common.registry, - '--loglevel', 'error' + '--loglevel', 'error', + '--cache', CACHE_DIR ], {}, function (err, code, stdout, stderr) { if (err) throw err t.equal(stderr, '', 'no error output') @@ -46,11 +74,20 @@ test('notifies when there are no results', function (t) { test('spits out a useful error when no cache nor network', function (t) { setup() - server.get('/-/v1/search?text=foo&size=20').once().reply(404, {}) + const query = qs.stringify({ + text: 'foo', + size: 20, + from: 0, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + server.get(`/-/v1/search?${query}`).once().reply(404, {}) server.get('/-/all').many().reply(404, {}) - var cacheContents = {} - var fixture = new Tacks(File(cacheContents)) + const cacheContents = {} + const fixture = new Tacks(File(cacheContents)) fixture.create(cachePath) + fixOwner(cachePath) common.npm([ 'search', 'foo', '--registry', common.registry, @@ -70,7 +107,15 @@ test('spits out a useful error when no cache nor network', function (t) { test('can switch to JSON mode', function (t) { setup() - server.get('/-/v1/search?text=oo&size=20').once().reply(200, { + const query = qs.stringify({ + text: 'oo', + size: 20, + from: 0, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + server.get(`/-/v1/search?${query}`).once().reply(200, { objects: [ { package: { name: 'cool', version: '1.0.0' } }, { package: { name: 'foo', version: '2.0.0' } } @@ -86,9 +131,15 @@ test('can switch to JSON mode', function (t) { if (err) throw err t.equal(stderr, '', 'no error output') t.equal(code, 0, 'search gives 0 error code even if no matches') - t.deepEquals(JSON.parse(stdout), [ - { name: 'cool', version: '1.0.0', date: null }, - { name: 'foo', version: '2.0.0', date: null } + t.similar(JSON.parse(stdout), [ + { + name: 'cool', + version: '1.0.0' + }, + { + name: 'foo', + version: '2.0.0' + } ], 'results returned as valid json') t.done() }) @@ -96,7 +147,15 @@ test('can switch to JSON mode', function (t) { test('JSON mode does not notify on empty', function (t) { setup() - server.get('/-/v1/search?text=oo&size=20').once().reply(200, { + const query = qs.stringify({ + text: 'oo', + size: 20, + from: 0, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + server.get(`/-/v1/search?${query}`).once().reply(200, { objects: [] }) common.npm([ @@ -116,7 +175,15 @@ test('JSON mode does not notify on empty', function (t) { test('can switch to tab separated mode', function (t) { setup() - server.get('/-/v1/search?text=oo&size=20').once().reply(200, { + const query = qs.stringify({ + text: 'oo', + size: 20, + from: 0, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + server.get(`/-/v1/search?${query}`).once().reply(200, { objects: [ { package: { name: 'cool', version: '1.0.0' } }, { package: { name: 'foo', description: 'this\thas\ttabs', version: '2.0.0' } } @@ -139,7 +206,15 @@ test('can switch to tab separated mode', function (t) { test('tab mode does not notify on empty', function (t) { setup() - server.get('/-/v1/search?text=oo&size=20').once().reply(200, { + const query = qs.stringify({ + text: 'oo', + size: 20, + from: 0, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + server.get(`/-/v1/search?${query}`).once().reply(200, { objects: [] }) common.npm([ @@ -158,8 +233,8 @@ test('tab mode does not notify on empty', function (t) { }) test('no arguments provided should error', function (t) { - cleanup() - common.npm(['search'], {}, function (err, code, stdout, stderr) { + setup() + common.npm(['search', '--cache', CACHE_DIR], {}, function (err, code, stdout, stderr) { if (err) throw err t.equal(code, 1, 'search finished unsuccessfully') @@ -173,18 +248,7 @@ test('no arguments provided should error', function (t) { }) test('cleanup', function (t) { - cleanup() + server.done() server.close() t.end() }) - -function setup () { - cleanup() - mkdirp.sync(cacheBase) -} - -function cleanup () { - server.done() - process.chdir(osenv.tmpdir()) - rimraf.sync(PKG_DIR) -} diff --git a/test/tap/semver-doc.js b/test/tap/semver-doc.js index 31c75fffd8ad7..1cc978201c7a4 100644 --- a/test/tap/semver-doc.js +++ b/test/tap/semver-doc.js @@ -3,7 +3,7 @@ var test = require('tap').test test('semver doc is up to date', function (t) { var path = require('path') var moddoc = path.join(__dirname, '../../node_modules/semver/README.md') - var mydoc = path.join(__dirname, '../../doc/misc/semver.md') + var mydoc = path.join(__dirname, '../../docs/content/using-npm/semver.md') var fs = require('fs') var mod = fs.readFileSync(moddoc, 'utf8').replace(/semver\(1\)/, 'semver(7)') var my = fs.readFileSync(mydoc, 'utf8') diff --git a/test/tap/shared-linked.js b/test/tap/shared-linked.js index 3ee00e063d4c8..517be4699d973 100644 --- a/test/tap/shared-linked.js +++ b/test/tap/shared-linked.js @@ -8,7 +8,7 @@ var Dir = Tacks.Dir var common = require('../common-tap.js') var mr = require('npm-registry-mock') -var testdir = path.join(__dirname, path.basename(__filename, '.js')) +var testdir = common.pkg var bugdir = path.join(testdir, 'modules', 'bug') // This is an absolutely minimal version of the optimist included with @@ -44,7 +44,6 @@ var optimist = Dir({ var fixture = new Tacks( Dir({ - cache: Dir({}), global: Dir({ lib: Dir({ node_modules: Dir({ @@ -126,7 +125,7 @@ test('shared-linked', function (t) { }) } var config = [ - '--cache', path.join(testdir, 'cache'), + '--cache', common.cache, '--registry', common.registry, '--unicode', 'false' ] @@ -134,7 +133,7 @@ test('shared-linked', function (t) { common.npm(config.concat(['install', '--dry-run', '--parseable']), options, function (err, code, stdout, stderr) { if (err) throw err t.is(code, 0) - var got = stdout.trim().replace(/\s+\n/g, '\n') + var got = stdout.trim().replace(/\s+\n/g, '\n').replace(/\\/g, '/') var expected = 'add\tminimist\t0.0.5\tnode_modules/minimist\n' + 'add\twordwrap\t0.0.2\tnode_modules/wordwrap\n' + diff --git a/test/tap/shrinkwrap-_auth.js b/test/tap/shrinkwrap-_auth.js index 5aff86fb08f80..2987e3eec0ace 100644 --- a/test/tap/shrinkwrap-_auth.js +++ b/test/tap/shrinkwrap-_auth.js @@ -5,15 +5,14 @@ var path = require('path') var writeFileSync = require('graceful-fs').writeFileSync var mkdirp = require('mkdirp') -var osenv = require('osenv') var http = require('http') -var rimraf = require('rimraf') var ssri = require('ssri') -var test = require('tap').test +var t = require('tap') +var test = t.test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, path.basename(__filename, '.js')) +var pkg = common.pkg var outfile = path.resolve(pkg, '_npmrc') var modules = path.resolve(pkg, 'node_modules') @@ -23,7 +22,34 @@ var tarball = path.resolve(__dirname, '../fixtures/scoped-underscore-1.3.1.tgz') var tarballIntegrity = ssri.fromData(fs.readFileSync(tarball)).toString() var _auth = '0xabad1dea' + +var contents = '_auth=' + _auth + '\n' + + '\'always-auth\'=true\n' + +var json = { + name: 'test-package-install', + version: '1.0.0', + dependencies: { + '@scoped/underscore': '1.0.0' + } +} + +var shrinkwrap = { + name: 'test-package-install', + version: '1.0.0', + lockfileVersion: 1, + dependencies: { + '@scoped/underscore': { + resolved: tarballURL, + integrity: tarballIntegrity, + version: '1.3.1' + } + } +} + var server = http.createServer() +t.teardown(() => server.close()) + const errors = [] server.on('request', (req, res) => { const auth = 'Basic ' + _auth @@ -44,10 +70,14 @@ server.on('request', (req, res) => { }) test('setup', function (t) { - server.listen(common.port, () => { - setup() - t.done() - }) + mkdirp.sync(modules) + writeFileSync(path.resolve(pkg, 'package.json'), JSON.stringify(json, null, 2) + '\n') + writeFileSync(outfile, contents) + writeFileSync( + path.resolve(pkg, 'npm-shrinkwrap.json'), + JSON.stringify(shrinkwrap, null, 2) + '\n' + ) + server.listen(common.port, t.end) }) test('authed npm install with shrinkwrapped global package using _auth', function (t) { @@ -77,50 +107,3 @@ test('authed npm install with shrinkwrapped global package using _auth', functio } ) }) - -test('cleanup', function (t) { - server.close(() => { - cleanup() - t.end() - }) -}) - -var contents = '_auth=' + _auth + '\n' + - '\'always-auth\'=true\n' - -var json = { - name: 'test-package-install', - version: '1.0.0', - dependencies: { - '@scoped/underscore': '1.0.0' - } -} - -var shrinkwrap = { - name: 'test-package-install', - version: '1.0.0', - lockfileVersion: 1, - dependencies: { - '@scoped/underscore': { - resolved: tarballURL, - integrity: tarballIntegrity, - version: '1.3.1' - } - } -} - -function setup () { - cleanup() - mkdirp.sync(modules) - writeFileSync(path.resolve(pkg, 'package.json'), JSON.stringify(json, null, 2) + '\n') - writeFileSync(outfile, contents) - writeFileSync( - path.resolve(pkg, 'npm-shrinkwrap.json'), - JSON.stringify(shrinkwrap, null, 2) + '\n' - ) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/shrinkwrap-default-dev.js b/test/tap/shrinkwrap-default-dev.js index 48f9f2a90af90..cd101092d4e92 100644 --- a/test/tap/shrinkwrap-default-dev.js +++ b/test/tap/shrinkwrap-default-dev.js @@ -7,9 +7,9 @@ var File = Tacks.File var Dir = Tacks.Dir var common = require('../common-tap.js') -var basedir = path.join(__dirname, path.basename(__filename, '.js')) +var basedir = common.pkg var testdir = path.join(basedir, 'testdir') -var cachedir = path.join(basedir, 'cache') +var cachedir = common.cache var globaldir = path.join(basedir, 'global') var tmpdir = path.join(basedir, 'tmp') diff --git a/test/tap/shrinkwrap-dev-dependency.js b/test/tap/shrinkwrap-dev-dependency.js index 79d3b7cd0fe46..973cb5435b8f2 100644 --- a/test/tap/shrinkwrap-dev-dependency.js +++ b/test/tap/shrinkwrap-dev-dependency.js @@ -1,18 +1,15 @@ var fs = require('fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'shrinkwrap-dev-dependency') +var pkg = common.pkg var opts = [ - '--cache', path.resolve(pkg, 'cache'), + '--cache', common.cache, '--registry', common.registry ] @@ -46,26 +43,11 @@ var json = { } } -function setup () { - cleanup() - mkdirp.sync(pkg) - fs.writeFileSync(path.join(pkg, 'package.json'), JSON.stringify(json, null, 2)) - process.chdir(pkg) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} - -test('setup', function (t) { - setup() - t.end() -}) - test("shrinkwrap doesn't strip out the dependency", function (t) { t.plan(3) - setup() + + fs.writeFileSync(path.join(pkg, 'package.json'), JSON.stringify(json, null, 2)) + process.chdir(pkg) mr({port: common.port}, function (er, s) { common.npm(opts.concat(['install', '.']), {stdio: [0, 'pipe', 2]}, function (err, code) { @@ -86,8 +68,3 @@ test("shrinkwrap doesn't strip out the dependency", function (t) { }) }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) diff --git a/test/tap/shrinkwrap-empty-deps.js b/test/tap/shrinkwrap-empty-deps.js index eeb5e656ef20c..3c68845745a94 100644 --- a/test/tap/shrinkwrap-empty-deps.js +++ b/test/tap/shrinkwrap-empty-deps.js @@ -2,14 +2,11 @@ const common = require('../common-tap.js') const fs = require('fs') -const mkdirp = require('mkdirp') const mr = require('npm-registry-mock') -const osenv = require('osenv') const path = require('path') -const rimraf = require('rimraf') const test = require('tap').test -const pkg = path.resolve(__dirname, 'shrinkwrap-empty-deps') +const pkg = common.pkg const EXEC_OPTS = { cwd: pkg } @@ -22,8 +19,6 @@ const json = { } test('setup', function (t) { - cleanup() - mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) @@ -61,14 +56,3 @@ test('returns a list of removed items', function (t) { ) }) }) - -test('cleanup', function (t) { - cleanup() - - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/shrinkwrap-extra-metadata.js b/test/tap/shrinkwrap-extra-metadata.js index dd7f85ee827ed..6e652e2db0714 100644 --- a/test/tap/shrinkwrap-extra-metadata.js +++ b/test/tap/shrinkwrap-extra-metadata.js @@ -2,15 +2,12 @@ const common = require('../common-tap.js') const fs = require('fs') -const mkdirp = require('mkdirp') const mr = require('npm-registry-mock') const npm = require('../../lib/npm.js') -const osenv = require('osenv') const path = require('path') -const rimraf = require('rimraf') const test = require('tap').test -const pkg = path.join(__dirname, path.basename(__filename, '.js')) +const pkg = common.pkg const json = { author: 'Rockbert', @@ -19,8 +16,6 @@ const json = { } test('setup', function (t) { - cleanup() - mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) @@ -32,6 +27,7 @@ test('setup', function (t) { test('adds additional metadata fields from the pkglock spec', function (t) { mr({ port: common.port }, function (er, s) { + t.teardown(() => s.close()) common.npm( [ '--registry', common.registry, @@ -56,21 +52,9 @@ test('adds additional metadata fields from the pkglock spec', function (t) { 'shrinkwrap wrote the expected metadata fields' ) - s.close() t.end() }) } ) }) }) - -test('cleanup', function (t) { - cleanup() - - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/shrinkwrap-global-auth.js b/test/tap/shrinkwrap-global-auth.js index 76fa0a4662bf5..e14a328a43a51 100644 --- a/test/tap/shrinkwrap-global-auth.js +++ b/test/tap/shrinkwrap-global-auth.js @@ -6,14 +6,12 @@ var writeFileSync = require('graceful-fs').writeFileSync var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var ssri = require('ssri') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, path.basename(__filename, '.js')) +var pkg = common.pkg var outfile = path.resolve(pkg, '_npmrc') var modules = path.resolve(pkg, 'node_modules') @@ -22,7 +20,30 @@ var tarballURL = common.registry + tarballPath var tarball = path.resolve(__dirname, '../fixtures/scoped-underscore-1.3.1.tgz') var tarballIntegrity = ssri.fromData(fs.readFileSync(tarball)).toString() -var server +var contents = 'registry=' + common.registry + '\n' + + '_authToken=0xabad1dea\n' + + '\'always-auth\'=true\n' + +var json = { + name: 'test-package-install', + version: '1.0.0', + dependencies: { + '@scoped/underscore': '1.0.0' + } +} + +var shrinkwrap = { + name: 'test-package-install', + version: '1.0.0', + lockfileVersion: 1, + dependencies: { + '@scoped/underscore': { + resolved: tarballURL, + integrity: tarballIntegrity, + version: '1.3.1' + } + } +} function mocks (server) { var auth = 'Bearer 0xabad1dea' @@ -34,10 +55,16 @@ function mocks (server) { } test('setup', function (t) { + mkdirp.sync(modules) + writeFileSync(path.resolve(pkg, 'package.json'), JSON.stringify(json, null, 2) + '\n') + writeFileSync(outfile, contents) + writeFileSync( + path.resolve(pkg, 'npm-shrinkwrap.json'), + JSON.stringify(shrinkwrap, null, 2) + '\n' + ) mr({ port: common.port, plugin: mocks }, function (er, s) { - server = s + t.parent.teardown(() => s.close()) t.ok(s, 'set up mock registry') - setup() t.end() }) }) @@ -68,50 +95,3 @@ test('authed npm install with shrinkwrapped global package', function (t) { } ) }) - -test('cleanup', function (t) { - server.close() - cleanup() - t.end() -}) - -var contents = 'registry=' + common.registry + '\n' + - '_authToken=0xabad1dea\n' + - '\'always-auth\'=true\n' - -var json = { - name: 'test-package-install', - version: '1.0.0', - dependencies: { - '@scoped/underscore': '1.0.0' - } -} - -var shrinkwrap = { - name: 'test-package-install', - version: '1.0.0', - lockfileVersion: 1, - dependencies: { - '@scoped/underscore': { - resolved: tarballURL, - integrity: tarballIntegrity, - version: '1.3.1' - } - } -} - -function setup () { - cleanup() - mkdirp.sync(modules) - writeFileSync(path.resolve(pkg, 'package.json'), JSON.stringify(json, null, 2) + '\n') - writeFileSync(outfile, contents) - writeFileSync( - path.resolve(pkg, 'npm-shrinkwrap.json'), - JSON.stringify(shrinkwrap, null, 2) + '\n' - ) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/shrinkwrap-lifecycle-cwd.js b/test/tap/shrinkwrap-lifecycle-cwd.js index bc9ab9cf11d5b..78f40f5297628 100644 --- a/test/tap/shrinkwrap-lifecycle-cwd.js +++ b/test/tap/shrinkwrap-lifecycle-cwd.js @@ -7,12 +7,11 @@ var File = Tacks.File var Dir = Tacks.Dir var common = require('../common-tap.js') -var basedir = path.join(__dirname, path.basename(__filename, '.js')) +var basedir = common.pkg var testdir = path.join(basedir, 'testdir') -var cachedir = path.join(basedir, 'cache') +var cachedir = common.cache var globaldir = path.join(basedir, 'global') var tmpdir = path.join(basedir, 'tmp') -var escapeArg = require('../../lib/utils/escape-arg.js') var conf = { cwd: testdir, @@ -39,8 +38,8 @@ var fixture = new Tacks(Dir({ // add this to the end of the command to preserve the debug log: // || mv npm-debug.log real-debug.log // removed for windows compat reasons - abc: escapeArg(common.nodeBin) + ' ' + escapeArg(common.bin) + ' shrinkwrap', - shrinkwrap: escapeArg(common.nodeBin) + ' scripts/shrinkwrap.js' + abc: 'node ' + JSON.stringify(common.bin) + ' shrinkwrap', + shrinkwrap: 'node scripts/shrinkwrap.js' } }), scripts: Dir({ diff --git a/test/tap/shrinkwrap-lifecycle.js b/test/tap/shrinkwrap-lifecycle.js index 8c0f36e3bb84f..6e6bc4c2872b2 100644 --- a/test/tap/shrinkwrap-lifecycle.js +++ b/test/tap/shrinkwrap-lifecycle.js @@ -1,16 +1,10 @@ var fs = require('graceful-fs') var path = require('path') - -var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test - var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'shrinkwrap-lifecycle') +var pkg = common.pkg test('npm shrinkwrap execution order', function (t) { - setup() fs.writeFileSync(path.resolve(pkg, 'package.json'), JSON.stringify({ author: 'Simen Bekkhus', name: 'shrinkwrap-lifecycle', @@ -22,7 +16,7 @@ test('npm shrinkwrap execution order', function (t) { postshrinkwrap: 'echo this happens third' } }), 'utf8') - common.npm(['shrinkwrap', '--loglevel=error'], [], function (err, code, stdout, stderr) { + common.npm(['shrinkwrap', '--loglevel=error'], { cwd: pkg }, function (err, code, stdout, stderr) { if (err) throw err t.comment(stdout) @@ -41,14 +35,3 @@ test('npm shrinkwrap execution order', function (t) { t.end() }) }) - -test('cleanup', function (t) { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) - t.end() -}) - -function setup () { - mkdirp.sync(pkg) - process.chdir(pkg) -} diff --git a/test/tap/shrinkwrap-local-dependency.js b/test/tap/shrinkwrap-local-dependency.js index 58974ad72431a..5e6d278d30c25 100644 --- a/test/tap/shrinkwrap-local-dependency.js +++ b/test/tap/shrinkwrap-local-dependency.js @@ -8,8 +8,8 @@ var unixFormatPath = require('../../lib/utils/unix-format-path.js') var File = Tacks.File var Dir = Tacks.Dir -var testdir = path.resolve(__dirname, path.basename(__filename, '.js')) -var cachedir = path.resolve(testdir, 'cache') +var testdir = common.pkg +var cachedir = common.cache var config = ['--cache=' + cachedir, '--loglevel=error'] var shrinkwrap = { @@ -20,8 +20,7 @@ var shrinkwrap = { version: 'file:' + unixFormatPath(path.join('mods', 'mod2')), dependencies: { mod1: { - version: 'file:' + unixFormatPath(path.join('mods', 'mod1')), - bundled: true + version: 'file:' + unixFormatPath(path.join('mods', 'mod1')) } } } diff --git a/test/tap/shrinkwrap-nested.js b/test/tap/shrinkwrap-nested.js index 44d0cb0b2fb34..bace44d01aa42 100644 --- a/test/tap/shrinkwrap-nested.js +++ b/test/tap/shrinkwrap-nested.js @@ -8,7 +8,7 @@ var fs = require('fs') var path = require('path') var common = require('../common-tap.js') -var testdir = path.resolve(__dirname, path.basename(__filename, '.js')) +var testdir = common.pkg var modAtgz = path.resolve(testdir, 'modA') + '-1.0.0.tgz' var modB1tgz = path.resolve(testdir, 'modB') + '-1.0.0.tgz' var modB2tgz = path.resolve(testdir, 'modB') + '-2.0.0.tgz' diff --git a/test/tap/shrinkwrap-optional-dependency.js b/test/tap/shrinkwrap-optional-dependency.js index 5085dd0cbb49c..a08d1538490db 100644 --- a/test/tap/shrinkwrap-optional-dependency.js +++ b/test/tap/shrinkwrap-optional-dependency.js @@ -3,14 +3,12 @@ var path = require('path') var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') var npm = require('../../') -var pkg = path.resolve(__dirname, 'shrinkwrap-optional-dependency') +var pkg = common.pkg test('shrinkwrap does not fail on missing optional dependency', function (t) { t.plan(1) @@ -22,17 +20,17 @@ test('shrinkwrap does not fail on missing optional dependency', function (t) { } mr({port: common.port, mocks: mocks}, function (er, s) { - function fail (err) { - s.close() // Close on failure to allow node to exit - t.fail(err) - } - + t.parent.teardown(() => s.close()) setup(function (err) { - if (err) return fail(err) + if (err) { + throw err + } // Install without the optional dependency npm.install('.', function (err) { - if (err) return fail(err) + if (err) { + throw err + } // Pretend the optional dependency was specified, but somehow failed to load: json.optionalDependencies = { @@ -41,7 +39,9 @@ test('shrinkwrap does not fail on missing optional dependency', function (t) { writePackage() npm.commands.shrinkwrap([], true, function (err, results) { - if (err) return fail(err) + if (err) { + throw err + } t.deepEqual(results.dependencies, desired.dependencies) s.close() @@ -52,11 +52,6 @@ test('shrinkwrap does not fail on missing optional dependency', function (t) { }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - var desired = { name: 'npm-test-shrinkwrap-optional-dependency', version: '0.0.0', @@ -83,19 +78,14 @@ function writePackage () { } function setup (cb) { - cleanup() mkdirp.sync(pkg) writePackage() process.chdir(pkg) var opts = { - cache: path.resolve(pkg, 'cache'), - registry: common.registry + cache: common.cache, + registry: common.registry, + cwd: pkg } npm.load(opts, cb) } - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/shrinkwrap-optional-platform.js b/test/tap/shrinkwrap-optional-platform.js index b109d89578150..f5b87d2f8d445 100644 --- a/test/tap/shrinkwrap-optional-platform.js +++ b/test/tap/shrinkwrap-optional-platform.js @@ -6,9 +6,9 @@ var File = Tacks.File var Dir = Tacks.Dir var common = require('../common-tap.js') -var basedir = path.join(__dirname, path.basename(__filename, '.js')) +var basedir = common.pkg var testdir = path.join(basedir, 'testdir') -var cachedir = path.join(basedir, 'cache') +var cachedir = common.cache var globaldir = path.join(basedir, 'global') var tmpdir = path.join(basedir, 'tmp') diff --git a/test/tap/shrinkwrap-optional-property.js b/test/tap/shrinkwrap-optional-property.js index ccff5a93e2e38..c2dc8585aec1c 100644 --- a/test/tap/shrinkwrap-optional-property.js +++ b/test/tap/shrinkwrap-optional-property.js @@ -1,37 +1,36 @@ var fs = require('fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') var npm = require('../../') -var pkg = path.resolve(__dirname, 'shrinkwrap-optional-dependency') +var pkg = common.pkg test('shrinkwrap adds optional property when optional dependency', function (t) { t.plan(1) mr({port: common.port}, function (er, s) { - function fail (err) { - s.close() // Close on failure to allow node to exit - t.fail(err) - } - + t.parent.teardown(() => s.close()) setup(function (err) { - if (err) return fail(err) + if (err) { + throw err + } // Install with the optional dependency npm.install('.', function (err) { - if (err) return fail(err) + if (err) { + throw err + } writePackage() npm.commands.shrinkwrap([], true, function (err, results) { - if (err) return fail(err) + if (err) { + throw err + } t.deepEqual(results.dependencies, desired.dependencies) s.close() @@ -42,11 +41,6 @@ test('shrinkwrap adds optional property when optional dependency', function (t) }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - var desired = { name: 'npm-test-shrinkwrap-optional-dependency', version: '0.0.0', @@ -58,7 +52,7 @@ var desired = { }, 'underscore': { version: '1.3.3', - resolved: 'http://localhost:1337/underscore/-/underscore-1.3.3.tgz', + resolved: 'http://localhost:' + common.port + '/underscore/-/underscore-1.3.3.tgz', optional: true, integrity: 'sha1-R6xTaD2vgyv6lS4XdEF9pHgXrkI=' } @@ -82,19 +76,12 @@ function writePackage () { } function setup (cb) { - cleanup() - mkdirp.sync(pkg) writePackage() process.chdir(pkg) var opts = { - cache: path.resolve(pkg, 'cache'), + cache: common.cache, registry: common.registry } npm.load(opts, cb) } - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/shrinkwrap-prod-dependency-also.js b/test/tap/shrinkwrap-prod-dependency-also.js index 6cc388946e406..1e4e9d7438a2c 100644 --- a/test/tap/shrinkwrap-prod-dependency-also.js +++ b/test/tap/shrinkwrap-prod-dependency-also.js @@ -1,16 +1,13 @@ var fs = require('fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, path.basename(__filename, '.js')) +var pkg = common.pkg var opts = [ - '--cache=' + path.resolve(pkg, 'cache'), + '--cache=' + common.cache, '--registry=' + common.registry ] @@ -21,13 +18,42 @@ function reportOutput (t, fh, out) { t.comment(prefix + trimmed.split(/\n/).join('\n' + prefix)) } -var server +var desired = { + name: 'npm-test-shrinkwrap-prod-dependency', + version: '0.0.0', + dependencies: { + request: { + version: '0.9.0', + resolved: common.registry + '/request/-/request-0.9.0.tgz', + integrity: 'sha1-EEn1mm9GWI5tAwkh+7hMovDCcU4=' + }, + underscore: { + dev: true, + version: '1.5.1', + resolved: common.registry + '/underscore/-/underscore-1.5.1.tgz', + integrity: 'sha1-0r3oF9F2/63olKtxRY5oKhS4bck=' + } + } +} + +var json = { + author: 'Domenic Denicola', + name: 'npm-test-shrinkwrap-prod-dependency', + version: '0.0.0', + dependencies: { + request: '0.9.0' + }, + devDependencies: { + underscore: '1.5.1' + } +} + test("shrinkwrap --also=development doesn't strip out prod dependencies", function (t) { t.plan(4) mr({port: common.port}, function (er, s) { - server = s - setup() + t.parent.teardown(() => s.close()) + fs.writeFileSync(path.join(pkg, 'package.json'), JSON.stringify(json, null, 2)) common.npm(['install', '.'].concat(opts), {cwd: pkg}, function (err, code, stdout, stderr) { if (err) return t.fail(err) t.is(code, 0, 'install') @@ -57,50 +83,3 @@ test("shrinkwrap --also=development doesn't strip out prod dependencies", functi }) }) }) - -test('cleanup', function (t) { - server.close() - cleanup() - t.end() -}) - -var desired = { - name: 'npm-test-shrinkwrap-prod-dependency', - version: '0.0.0', - dependencies: { - request: { - version: '0.9.0', - resolved: common.registry + '/request/-/request-0.9.0.tgz', - integrity: 'sha1-EEn1mm9GWI5tAwkh+7hMovDCcU4=' - }, - underscore: { - dev: true, - version: '1.5.1', - resolved: common.registry + '/underscore/-/underscore-1.5.1.tgz', - integrity: 'sha1-0r3oF9F2/63olKtxRY5oKhS4bck=' - } - } -} - -var json = { - author: 'Domenic Denicola', - name: 'npm-test-shrinkwrap-prod-dependency', - version: '0.0.0', - dependencies: { - request: '0.9.0' - }, - devDependencies: { - underscore: '1.5.1' - } -} - -function setup (opts) { - cleanup() - mkdirp.sync(pkg) - fs.writeFileSync(path.join(pkg, 'package.json'), JSON.stringify(json, null, 2)) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/shrinkwrap-prod-dependency.js b/test/tap/shrinkwrap-prod-dependency.js index 34e30d7c7a987..bf6d98bf80152 100644 --- a/test/tap/shrinkwrap-prod-dependency.js +++ b/test/tap/shrinkwrap-prod-dependency.js @@ -3,42 +3,12 @@ var path = require('path') var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var npm = require('../../') var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'shrinkwrap-prod-dependency') - -test("shrinkwrap --dev doesn't strip out prod dependencies", function (t) { - t.plan(1) - - mr({port: common.port}, function (er, s) { - setup({}, function (err) { - if (err) return t.fail(err) - - npm.install('.', function (err) { - if (err) return t.fail(err) - - npm.config.set('dev', true) - npm.commands.shrinkwrap([], true, function (err, results) { - if (err) return t.fail(err) - - t.deepEqual(results.dependencies, desired.dependencies) - s.close() - t.end() - }) - }) - }) - }) -}) - -test('cleanup', function (t) { - cleanup() - t.end() -}) +var pkg = common.pkg var desired = { name: 'npm-test-shrinkwrap-prod-dependency', @@ -70,25 +40,37 @@ var json = { } } -function setup (opts, cb) { - cleanup() +test('setup', function (t) { mkdirp.sync(pkg) fs.writeFileSync(path.join(pkg, 'package.json'), JSON.stringify(json, null, 2)) process.chdir(pkg) var allOpts = { - cache: path.resolve(pkg, 'cache'), + cache: common.cache, registry: common.registry } - for (var key in opts) { - allOpts[key] = opts[key] - } + npm.load(allOpts, t.end) +}) - npm.load(allOpts, cb) -} +test('mock registry', t => { + mr({port: common.port}, function (er, s) { + t.parent.teardown(() => s.close()) + t.end() + }) +}) -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} +test("shrinkwrap --dev doesn't strip out prod dependencies", t => { + t.plan(1) + npm.install('.', function (err) { + if (err) return t.fail(err) + + npm.config.set('dev', true) + npm.commands.shrinkwrap([], true, function (err, results) { + if (err) return t.fail(err) + + t.deepEqual(results.dependencies, desired.dependencies) + t.end() + }) + }) +}) diff --git a/test/tap/shrinkwrap-resolve-conflict.js b/test/tap/shrinkwrap-resolve-conflict.js index 146d1191bf5ae..d1f63f7e62661 100644 --- a/test/tap/shrinkwrap-resolve-conflict.js +++ b/test/tap/shrinkwrap-resolve-conflict.js @@ -12,7 +12,7 @@ const Tacks = require('tacks') const File = Tacks.File const Dir = Tacks.Dir -const testDir = path.resolve(__dirname, path.basename(__filename, '.js')) +const testDir = common.pkg const modAdir = path.resolve(testDir, 'modA') const modBdir = path.resolve(testDir, 'modB') const modCdir = path.resolve(testDir, 'modC') diff --git a/test/tap/shrinkwrap-save-dev-with-existing-deps.js b/test/tap/shrinkwrap-save-dev-with-existing-deps.js index 5934a758da27a..1c35bf226b913 100644 --- a/test/tap/shrinkwrap-save-dev-with-existing-deps.js +++ b/test/tap/shrinkwrap-save-dev-with-existing-deps.js @@ -1,15 +1,11 @@ /* eslint-disable camelcase */ var fs = require('fs') var path = require('path') - var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test - var common = require('../common-tap.js') -var base = path.resolve(__dirname, path.basename(__filename, '.js')) +var base = common.pkg var installme = path.join(base, 'installme') var installme_pkg = path.join(installme, 'package.json') var example = path.join(base, 'example') @@ -74,7 +70,6 @@ function writeJson (filename, obj) { } test('setup', function (t) { - cleanup() writeJson(installme_pkg, installme_pkg_json) writeJson(example_pkg, example_pkg_json) writeJson(example_shrinkwrap, example_shrinkwrap_json) @@ -95,13 +90,3 @@ test('install --save-dev leaves prod deps alone', function (t) { t.end() }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(base) -} diff --git a/test/tap/shrinkwrap-save-with-existing-dev-deps.js b/test/tap/shrinkwrap-save-with-existing-dev-deps.js index 5f0eb07962276..acf6ac21014d1 100644 --- a/test/tap/shrinkwrap-save-with-existing-dev-deps.js +++ b/test/tap/shrinkwrap-save-with-existing-dev-deps.js @@ -3,13 +3,11 @@ var fs = require('fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var base = path.resolve(__dirname, path.basename(__filename, '.js')) +var base = common.pkg var installme = path.join(base, 'installme') var installme_pkg = path.join(installme, 'package.json') var example = path.join(base, 'example') @@ -18,7 +16,13 @@ var example_pkg = path.join(example, 'package.json') var installed = path.join(example, 'node_modules', 'installed') var installed_pkg = path.join(installed, 'package.json') -var EXEC_OPTS = { cwd: example } +// Ignore max listeners warnings until that gets fixed +var env = Object.keys(process.env).reduce((set, key) => { + if (!set[key]) set[key] = process.env[key] + return set +}, { NODE_NO_WARNINGS: '1' }) + +var EXEC_OPTS = { cwd: example, env: env } var installme_pkg_json = { name: 'installme', @@ -57,7 +61,6 @@ function writeJson (filename, obj) { } test('setup', function (t) { - cleanup() writeJson(installme_pkg, installme_pkg_json) writeJson(example_pkg, example_pkg_json) writeJson(example_shrinkwrap, example_shrinkwrap_json) @@ -65,24 +68,12 @@ test('setup', function (t) { t.end() }) -test('install --save leaves dev deps alone', function (t) { - common.npm(['install', '--save', 'file://' + installme], EXEC_OPTS, function (er, code, stdout, stderr) { - t.ifError(er, "spawn didn't catch fire") - t.is(code, 0, 'install completed ok') - t.is(stderr, '', 'install completed without error output') - var shrinkwrap = JSON.parse(fs.readFileSync(example_shrinkwrap)) - t.ok(shrinkwrap.dependencies.installed, "save new install didn't remove dev dep") - t.ok(shrinkwrap.dependencies.installme, 'save new install DID add new dep') - t.end() - }) -}) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(base) -} +test('install --save leaves dev deps alone', t => + common.npm(['install', '--save', 'file://' + installme], EXEC_OPTS) + .then(([code, stdout, stderr]) => { + t.is(code, 0, 'install completed ok') + t.is(stderr, '', 'install completed without error output') + var shrinkwrap = JSON.parse(fs.readFileSync(example_shrinkwrap)) + t.ok(shrinkwrap.dependencies.installed, "save new install didn't remove dev dep") + t.ok(shrinkwrap.dependencies.installme, 'save new install DID add new dep') + })) diff --git a/test/tap/shrinkwrap-scoped-auth.js b/test/tap/shrinkwrap-scoped-auth.js index 72c3b5119853a..cc1b2270012ec 100644 --- a/test/tap/shrinkwrap-scoped-auth.js +++ b/test/tap/shrinkwrap-scoped-auth.js @@ -6,15 +6,13 @@ var writeFileSync = require('graceful-fs').writeFileSync var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var ssri = require('ssri') var test = require('tap').test var common = require('../common-tap.js') var toNerfDart = require('../../lib/config/nerf-dart.js') -var pkg = path.resolve(__dirname, path.basename(__filename, '.js')) +var pkg = common.pkg var outfile = path.resolve(pkg, '_npmrc') var modules = path.resolve(pkg, 'node_modules') var tarballPath = '/scoped-underscore/-/scoped-underscore-1.3.1.tgz' @@ -22,8 +20,6 @@ var tarballURL = common.registry + tarballPath var tarball = path.resolve(__dirname, '../fixtures/scoped-underscore-1.3.1.tgz') var tarballIntegrity = ssri.fromData(fs.readFileSync(tarball)).toString() -var server - function mocks (server) { var auth = 'Bearer 0xabad1dea' server.get(tarballPath, { authorization: auth }).replyWithFile(200, tarball) @@ -33,11 +29,41 @@ function mocks (server) { }) } +var contents = '@scoped:registry=' + common.registry + '\n' + + toNerfDart(common.registry) + ':_authToken=0xabad1dea\n' + +var json = { + name: 'test-package-install', + version: '1.0.0', + dependencies: { + '@scoped/underscore': '1.0.0' + } +} + +var shrinkwrap = { + name: 'test-package-install', + version: '1.0.0', + lockfileVersion: 1, + dependencies: { + '@scoped/underscore': { + resolved: tarballURL, + integrity: tarballIntegrity, + version: '1.3.1' + } + } +} + test('setup', function (t) { + mkdirp.sync(modules) + writeFileSync(path.resolve(pkg, 'package.json'), JSON.stringify(json, null, 2) + '\n') + writeFileSync(outfile, contents) + writeFileSync( + path.resolve(pkg, 'npm-shrinkwrap.json'), + JSON.stringify(shrinkwrap, null, 2) + '\n' + ) mr({ port: common.port, plugin: mocks }, function (er, s) { - server = s + t.parent.teardown(() => s.close()) t.ok(s, 'set up mock registry') - setup() t.end() }) }) @@ -68,49 +94,3 @@ test('authed npm install with shrinkwrapped scoped package', function (t) { } ) }) - -test('cleanup', function (t) { - server.close() - cleanup() - t.end() -}) - -var contents = '@scoped:registry=' + common.registry + '\n' + - toNerfDart(common.registry) + ':_authToken=0xabad1dea\n' - -var json = { - name: 'test-package-install', - version: '1.0.0', - dependencies: { - '@scoped/underscore': '1.0.0' - } -} - -var shrinkwrap = { - name: 'test-package-install', - version: '1.0.0', - lockfileVersion: 1, - dependencies: { - '@scoped/underscore': { - resolved: tarballURL, - integrity: tarballIntegrity, - version: '1.3.1' - } - } -} - -function setup () { - cleanup() - mkdirp.sync(modules) - writeFileSync(path.resolve(pkg, 'package.json'), JSON.stringify(json, null, 2) + '\n') - writeFileSync(outfile, contents) - writeFileSync( - path.resolve(pkg, 'npm-shrinkwrap.json'), - JSON.stringify(shrinkwrap, null, 2) + '\n' - ) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/shrinkwrap-shared-dev-dependency.js b/test/tap/shrinkwrap-shared-dev-dependency.js index 8960c0926154c..5aa331f34cc46 100644 --- a/test/tap/shrinkwrap-shared-dev-dependency.js +++ b/test/tap/shrinkwrap-shared-dev-dependency.js @@ -8,11 +8,11 @@ var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, path.basename(__filename, '.js')) +var pkg = common.pkg var opts = { env: common.newEnv().extend({ - npm_config_cache: path.resolve(pkg, 'cache'), + npm_config_cache: common.cache, npm_config_registry: common.registry }), stdio: [0, 1, 2], diff --git a/test/tap/shrinkwrap-version-match.js b/test/tap/shrinkwrap-version-match.js index 57906528f7be7..e579f84d4b3bd 100644 --- a/test/tap/shrinkwrap-version-match.js +++ b/test/tap/shrinkwrap-version-match.js @@ -7,7 +7,7 @@ var fs = require('fs') var path = require('path') var common = require('../common-tap.js') -var testdir = path.resolve(__dirname, path.basename(__filename, '.js')) +var testdir = common.pkg var modAdir = path.resolve(testdir, 'modA') var modB1dir = path.resolve(testdir, 'modB@1') var modB2dir = path.resolve(testdir, 'modB@2') diff --git a/test/tap/sorted-package-json.js b/test/tap/sorted-package-json.js index 557f3dc53d272..f802bfd9e5605 100644 --- a/test/tap/sorted-package-json.js +++ b/test/tap/sorted-package-json.js @@ -1,21 +1,28 @@ var test = require('tap').test var path = require('path') -var rimraf = require('rimraf') -var mkdirp = require('mkdirp') -var pkg = path.resolve(__dirname, 'sorted-package-json') +var common = require('../common-tap.js') +var pkg = common.pkg var tmp = path.join(pkg, 'tmp') -var cache = path.join(pkg, 'cache') +var cache = common.cache var fs = require('fs') -var common = require('../common-tap.js') var mr = require('npm-registry-mock') -var osenv = require('osenv') var packageJson = path.resolve(pkg, 'package.json') -test('setup', function (t) { - setup() - t.pass('setup success') - t.done() -}) +fs.writeFileSync(packageJson, JSON.stringify({ + 'name': 'sorted-package-json', + 'version': '0.0.0', + 'description': '', + 'main': 'index.js', + 'scripts': { + 'test': 'echo \'Error: no test specified\' && exit 1' + }, + 'author': 'Rocko Artischocko', + 'license': 'ISC', + 'dependencies': { + 'underscore': '^1.3.3', + 'request': '^0.9.0' + } +}, null, 2), 'utf8') test('sorting dependencies', function (t) { var before = JSON.parse(fs.readFileSync(packageJson).toString()) @@ -52,38 +59,3 @@ test('sorting dependencies', function (t) { }) }) }) - -test('cleanup', function (t) { - cleanup() - t.pass('cleaned up') - t.end() -}) - -function setup () { - cleanup() - mkdirp.sync(pkg) - - fs.writeFileSync(packageJson, JSON.stringify({ - 'name': 'sorted-package-json', - 'version': '0.0.0', - 'description': '', - 'main': 'index.js', - 'scripts': { - 'test': 'echo \'Error: no test specified\' && exit 1' - }, - 'author': 'Rocko Artischocko', - 'license': 'ISC', - 'dependencies': { - 'underscore': '^1.3.3', - 'request': '^0.9.0' - } - }, null, 2), 'utf8') -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(cache) - rimraf.sync(pkg) - mkdirp.sync(cache) - mkdirp.sync(tmp) -} diff --git a/test/tap/spawn-enoent-help.js b/test/tap/spawn-enoent-help.js index d4a6fcdd832ad..cb7c942be8e7b 100644 --- a/test/tap/spawn-enoent-help.js +++ b/test/tap/spawn-enoent-help.js @@ -1,10 +1,9 @@ -var path = require('path') var test = require('tap').test var rimraf = require('rimraf') var mkdirp = require('mkdirp') var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'spawn-enoent-help') +var pkg = common.pkg common.pendIfWindows('man pages are not built on Windows') diff --git a/test/tap/spawn-enoent.js b/test/tap/spawn-enoent.js index c81460fdcff82..78153572eeb70 100644 --- a/test/tap/spawn-enoent.js +++ b/test/tap/spawn-enoent.js @@ -1,11 +1,10 @@ -var path = require('path') var test = require('tap').test var fs = require('fs') var rimraf = require('rimraf') var mkdirp = require('mkdirp') var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'spawn-enoent') +var pkg = common.pkg var pj = JSON.stringify({ name: 'x', version: '1.2.3', diff --git a/test/tap/spec-local-specifiers.js b/test/tap/spec-local-specifiers.js index d149b7ea0ea3a..6ea65278cda26 100644 --- a/test/tap/spec-local-specifiers.js +++ b/test/tap/spec-local-specifiers.js @@ -11,9 +11,9 @@ var Dir = Tacks.Dir var common = require('../common-tap.js') var isWindows = require('../../lib/utils/is-windows.js') -var basedir = path.join(__dirname, path.basename(__filename, '.js')) +var basedir = common.pkg var testdir = path.join(basedir, 'testdir') -var cachedir = path.join(basedir, 'cache') +var cachedir = common.cache var globaldir = path.join(basedir, 'global') var tmpdir = path.join(basedir, 'tmp') @@ -586,7 +586,7 @@ test('save behavior', function (t) { t.is(deps['sb-transitive'], 'file:../sb-transitive', 'package.json') var sdep = shrinkwrap.dependencies['sb-transitive'] || {} var tdep = sdep.dependencies.sbta - t.like(tdep, {bundled: true, version: 'file:../sb-transitive/sbta'}, 'npm-shrinkwrap.json transitive dep') + t.like(tdep, {bundled: null, version: 'file:../sb-transitive/sbta'}, 'npm-shrinkwrap.json transitive dep') t.like(sdep, {bundled: null, version: 'file:../sb-transitive'}, 'npm-shrinkwrap.json direct dep') t.done() }) diff --git a/test/tap/splat-with-only-prerelease-to-latest.js b/test/tap/splat-with-only-prerelease-to-latest.js index cf5aac110bd94..ac2c58b5b9de9 100644 --- a/test/tap/splat-with-only-prerelease-to-latest.js +++ b/test/tap/splat-with-only-prerelease-to-latest.js @@ -3,12 +3,9 @@ const common = require('../common-tap.js') const mr = require('npm-registry-mock') const npm = require('../../lib/npm') -const osenv = require('osenv') -const path = require('path') -const rimraf = require('rimraf') const test = require('tap').test -const testdir = path.join(__dirname, path.basename(__filename, '.js')) +const testdir = common.pkg const moduleName = 'xyzzy-wibble' const testModule = { @@ -62,7 +59,7 @@ test('setup', (t) => { npm.load({ loglevel: 'silent', registry: common.registry, - cache: path.join(testdir, 'cache') + cache: common.cache }, (err) => { if (err) { throw err } t.ok(true, 'npm loaded') @@ -96,11 +93,3 @@ test('splat', (t) => { server.close() }) }) - -test('cleanup', (t) => { - process.chdir(osenv.tmpdir()) - rimraf(testdir, () => { - t.ok(true, 'cleaned up test dir') - t.done() - }) -}) diff --git a/test/tap/startstop.js b/test/tap/startstop.js index 0e9d2d94020b2..dd07352e0dc81 100644 --- a/test/tap/startstop.js +++ b/test/tap/startstop.js @@ -2,13 +2,11 @@ var fs = require('graceful-fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap') -var pkg = path.resolve(__dirname, 'startstop') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } @@ -33,7 +31,6 @@ function testOutput (t, command, er, code, stdout, stderr) { } test('setup', function (t) { - cleanup() mkdirp.sync(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), @@ -62,13 +59,3 @@ test('npm restart', function (t) { t.end() }) }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/symlink-cycle.js b/test/tap/symlink-cycle.js index 5bee2c5569ab6..ea0c0f5359443 100644 --- a/test/tap/symlink-cycle.js +++ b/test/tap/symlink-cycle.js @@ -3,12 +3,10 @@ var fs = require('fs') var path = require('path') var test = require('tap').test var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var writeFileSync = require('fs').writeFileSync var common = require('../common-tap.js') -var base = path.join(__dirname, path.basename(__filename, '.js')) +var base = common.pkg var cycle = path.join(base, 'cycle') var cycleJSON = { @@ -27,7 +25,12 @@ var cycleJSON = { } test('setup', function (t) { - setup() + mkdirp.sync(path.join(cycle, 'node_modules')) + writeFileSync( + path.join(cycle, 'package.json'), + JSON.stringify(cycleJSON, null, 2) + ) + fs.symlinkSync(cycle, path.join(cycle, 'node_modules', 'cycle'), 'junction') t.end() }) @@ -39,23 +42,3 @@ test('ls', function (t) { t.end() }) }) - -test('cleanup', function (t) { - process.chdir(osenv.tmpdir()) - cleanup() - t.end() -}) - -function cleanup () { - rimraf.sync(base) -} - -function setup () { - cleanup() - mkdirp.sync(path.join(cycle, 'node_modules')) - writeFileSync( - path.join(cycle, 'package.json'), - JSON.stringify(cycleJSON, null, 2) - ) - fs.symlinkSync(cycle, path.join(cycle, 'node_modules', 'cycle'), 'junction') -} diff --git a/test/tap/tag-version-prefix.js b/test/tap/tag-version-prefix.js index 555de1af16518..70c968705ab07 100644 --- a/test/tap/tag-version-prefix.js +++ b/test/tap/tag-version-prefix.js @@ -2,15 +2,11 @@ var common = require('../common-tap.js') var fs = require('fs') var path = require('path') -var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var npm = require('../../lib/npm.js') -var pkg = path.resolve(__dirname, 'version-message-config') -var cache = path.resolve(pkg, 'cache') +var pkg = common.pkg var npmrc = path.resolve(pkg, '.npmrc') var packagePath = path.resolve(pkg, 'package.json') @@ -70,23 +66,8 @@ test('npm version <semver> with message config', function (t) { }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - // windows fix for locked files - process.chdir(osenv.tmpdir()) - - rimraf.sync(pkg) -} - function setup () { - cleanup() - mkdirp.sync(cache) process.chdir(pkg) - fs.writeFileSync(packagePath, JSON.stringify(json), 'utf8') fs.writeFileSync(npmrc, configContents, 'ascii') } diff --git a/test/tap/tagged-version-matching.js b/test/tap/tagged-version-matching.js index 55dfb7b7c4b1c..a939c21c0d749 100644 --- a/test/tap/tagged-version-matching.js +++ b/test/tap/tagged-version-matching.js @@ -7,9 +7,9 @@ var Dir = Tacks.Dir var Symlink = Tacks.Symlink var common = require('../common-tap.js') -var basedir = path.join(__dirname, path.basename(__filename, '.js')) +var basedir = common.pkg var testdir = path.join(basedir, 'testdir') -var cachedir = path.join(basedir, 'cache') +var cachedir = common.cache var globaldir = path.join(basedir, 'global') var tmpdir = path.join(basedir, 'tmp') diff --git a/test/tap/team.js b/test/tap/team.js index 38caadde53870..17acf82f4b231 100644 --- a/test/tap/team.js +++ b/test/tap/team.js @@ -22,17 +22,19 @@ test('team create basic', function (t) { deleted: null } server.put('/-/org/myorg/team', JSON.stringify({ - name: teamData.name + name: teamData.name, + description: null })).reply(200, teamData) common.npm([ 'team', 'create', 'myorg:' + teamData.name, '--registry', common.registry, - '--loglevel', 'silent' + '--loglevel', 'error', + '--json' ], {}, function (err, code, stdout, stderr) { t.ifError(err, 'npm team') t.equal(code, 0, 'exited OK') t.equal(stderr, '', 'no error output') - t.same(JSON.parse(stdout), teamData) + t.same(JSON.parse(stdout), {created: true, team: `myorg:${teamData.name}`}) t.end() }) }) @@ -46,17 +48,19 @@ test('team create (allow optional @ prefix on scope)', function (t) { deleted: null } server.put('/-/org/myorg/team', JSON.stringify({ - name: teamData.name + name: teamData.name, + description: null })).reply(200, teamData) common.npm([ 'team', 'create', '@myorg:' + teamData.name, '--registry', common.registry, - '--loglevel', 'silent' + '--loglevel', 'silent', + '--json' ], {}, function (err, code, stdout, stderr) { t.ifError(err, 'npm team') t.equal(code, 0, 'exited OK') t.equal(stderr, '', 'no error output') - t.same(JSON.parse(stdout), teamData) + t.same(JSON.parse(stdout), {created: true, team: `myorg:${teamData.name}`}) t.end() }) }) @@ -73,12 +77,36 @@ test('team destroy', function (t) { common.npm([ 'team', 'destroy', 'myorg:' + teamData.name, '--registry', common.registry, - '--loglevel', 'silent' + '--loglevel', 'silent', + '--json' ], {}, function (err, code, stdout, stderr) { t.ifError(err, 'npm team') t.equal(code, 0, 'exited OK') t.equal(stderr, '', 'no error output') - t.same(JSON.parse(stdout), teamData) + t.same(JSON.parse(stdout), {deleted: true, team: `myorg:${teamData.name}`}) + t.end() + }) +}) + +test('team destroy is not allowed for the default developers team', (t) => { + const teamData = { + name: 'developers', + scope_id: 1234, + created: '2015-07-23T18:07:49.959Z', + updated: '2015-07-23T18:07:49.959Z', + deleted: '2015-07-23T18:27:27.178Z' + } + server.delete('/-/team/myorg/' + teamData.name).reply(405, teamData) + common.npm([ + 'team', 'destroy', 'myorg:' + teamData.name, + '--registry', common.registry, + '--loglevel', 'silent', + '--json' + ], {}, function (err, code, stdout, stderr) { + t.ifError(err, 'npm team') + t.equal(code, 1, 'exited with code 1') + t.equal(stderr, '', 'no error output') + t.match(JSON.parse(stdout), {error: {code: 'E405'}}) t.end() }) }) @@ -87,11 +115,12 @@ test('team add', function (t) { var user = 'zkat' server.put('/-/team/myorg/myteam/user', JSON.stringify({ user: user - })).reply(200) + })).reply(200, {}) common.npm([ 'team', 'add', 'myorg:myteam', user, '--registry', common.registry, - '--loglevel', 'silent' + '--loglevel', 'error', + '--json' ], {}, function (err, code, stdout, stderr) { t.ifError(err, 'npm team') t.equal(code, 0, 'exited OK') @@ -104,11 +133,12 @@ test('team rm', function (t) { var user = 'zkat' server.delete('/-/team/myorg/myteam/user', JSON.stringify({ user: user - })).reply(200) + })).reply(200, {}) common.npm([ 'team', 'rm', 'myorg:myteam', user, '--registry', common.registry, - '--loglevel', 'silent' + '--loglevel', 'silent', + '--json' ], {}, function (err, code, stdout, stderr) { t.ifError(err, 'npm team') t.equal(code, 0, 'exited OK') @@ -123,7 +153,8 @@ test('team ls (on org)', function (t) { common.npm([ 'team', 'ls', 'myorg', '--registry', common.registry, - '--loglevel', 'silent' + '--loglevel', 'silent', + '--json' ], {}, function (err, code, stdout, stderr) { t.ifError(err, 'npm team') t.equal(code, 0, 'exited OK') @@ -139,12 +170,13 @@ test('team ls (on team)', function (t) { common.npm([ 'team', 'ls', 'myorg:myteam', '--registry', common.registry, - '--loglevel', 'silent' + '--loglevel', 'silent', + '--json' ], {}, function (err, code, stdout, stderr) { t.ifError(err, 'npm team') t.equal(code, 0, 'exited OK') t.equal(stderr, '', 'no error output') - t.same(JSON.parse(stdout), users) + t.same(JSON.parse(stdout).sort(), users.sort()) t.end() }) }) diff --git a/test/tap/tree-style.js b/test/tap/tree-style.js index 12fdb0bbd9ac4..5d8abef3698b2 100644 --- a/test/tap/tree-style.js +++ b/test/tap/tree-style.js @@ -7,7 +7,7 @@ var rimraf = require('rimraf') var fs = require('graceful-fs') var common = require('../common-tap') -var base = path.resolve(__dirname, path.basename(__filename, '.js')) +var base = common.pkg var modA = path.resolve(base, 'modA') var modB = path.resolve(base, 'modB') var modC = path.resolve(base, 'modC') diff --git a/test/tap/umask-lifecycle.js b/test/tap/umask-lifecycle.js index c4c323363775e..dc365c94abbd5 100644 --- a/test/tap/umask-lifecycle.js +++ b/test/tap/umask-lifecycle.js @@ -1,5 +1,4 @@ var fs = require('fs') -var path = require('path') var mkdirp = require('mkdirp') var rimraf = require('rimraf') @@ -9,7 +8,7 @@ var sprintf = require('sprintf-js').sprintf var escapeExecPath = require('../../lib/utils/escape-exec-path.js') var escapeArg = require('../../lib/utils/escape-arg.js') var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'umask-lifecycle') +var pkg = common.pkg var nodeCmd = escapeExecPath(common.nodeBin) var npmCmd = nodeCmd + ' ' + escapeArg(common.bin) @@ -24,7 +23,7 @@ var pj = JSON.stringify({ var umask = process.umask() var expected = [ '', - '> x@1.2.3 umask ' + path.join(__dirname, 'umask-lifecycle'), + '> x@1.2.3 umask ' + pkg, '> ' + umaskScript, '', sprintf('%04o', umask), @@ -41,12 +40,13 @@ test('setup', function (t) { }) test('umask script', function (t) { - common.npm(['run', 'umask'], { + common.npm(['run', 'umask', '--scripts-prepend-node-path'], { cwd: pkg, env: { PATH: process.env.PATH, Path: process.env.Path, - 'npm_config_loglevel': 'warn' + 'npm_config_loglevel': 'warn', + nodeExecPath: process.execPath } }, function (er, code, sout, serr) { t.equal(sout, expected) diff --git a/test/tap/uninstall-link-clean.js b/test/tap/uninstall-link-clean.js index 2b1d244d00fde..e21c370f603b7 100644 --- a/test/tap/uninstall-link-clean.js +++ b/test/tap/uninstall-link-clean.js @@ -8,7 +8,7 @@ var test = require('tap').test var common = require('../common-tap.js') -var testdir = path.join(__dirname, path.basename(__filename, '.js')) +var testdir = common.pkg var pkg = path.join(testdir, 'pkg') var dep = path.join(testdir, 'dep') var work = path.join(testdir, 'uninstall-link-clean-TEST') diff --git a/test/tap/uninstall-package.js b/test/tap/uninstall-package.js index 7cc7b1da1f8a8..3e0b404b6c457 100644 --- a/test/tap/uninstall-package.js +++ b/test/tap/uninstall-package.js @@ -1,15 +1,12 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var pkg = path.join(__dirname, 'uninstall-package') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg, stdio: [0, 'pipe', 2] } @@ -18,19 +15,16 @@ var json = { version: '0.0.0', dependencies: { underscore: '~1.3.1', - request: '~0.9.0' + request: '~0.9.0', + '@isaacs/namespace-test': '1.x' } } test('setup', function (t) { - cleanup() - mkdirp.sync(pkg) - process.chdir(pkg) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) ) - t.end() }) @@ -69,12 +63,23 @@ test('returns a list of removed items', function (t) { }) }) -test('cleanup', function (t) { - cleanup() - t.end() +test('does not fail if installed package lacks a name somehow', function (t) { + const scope = path.resolve(pkg, 'node_modules/@isaacs') + const scopePkg = path.resolve(scope, 'namespace-test') + const pj = path.resolve(scopePkg, 'package.json') + fs.writeFileSync(pj, JSON.stringify({ + lol: 'yolo', + name: 99 + })) + common.npm( + ['uninstall', '@isaacs/namespace-test'], + EXEC_OPTS, + function (err, code, stdout, stderr) { + if (err) throw err + t.equal(code, 0, 'should exit successfully') + t.has(stdout, /removed 1 package in/) + t.notOk(fs.existsSync(scope), 'scoped package removed') + t.end() + } + ) }) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} diff --git a/test/tap/uninstall-save.js b/test/tap/uninstall-save.js index 9bf342d7cf955..bf1683edcab2c 100644 --- a/test/tap/uninstall-save.js +++ b/test/tap/uninstall-save.js @@ -3,14 +3,12 @@ var path = require('path') var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') -var server -var pkg = path.join(__dirname, path.basename(__filename, '.js')) +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg, stdio: [0, 'ignore', 2] } @@ -20,10 +18,14 @@ var json = { } test('setup', function (t) { - setup() + mkdirp.sync(path.resolve(pkg, 'node_modules')) + fs.writeFileSync( + path.join(pkg, 'package.json'), + JSON.stringify(json, null, 2) + ) mr({ port: common.port }, function (er, s) { t.ifError(er, 'started mock registry') - server = s + t.parent.teardown(() => s.close()) t.end() }) }) @@ -68,25 +70,3 @@ test('uninstall --save removes rm-ed package from package.json', function (t) { ) }) }) - -test('cleanup', function (t) { - server.close() - cleanup() - t.end() -}) - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} - -function setup () { - cleanup() - mkdirp.sync(path.resolve(pkg, 'node_modules')) - fs.writeFileSync( - path.join(pkg, 'package.json'), - JSON.stringify(json, null, 2) - ) - - process.chdir(pkg) -} diff --git a/test/tap/unit-token-validate-cidr.js b/test/tap/unit-token-validate-cidr.js index db963c31f386d..cda91b8f1e0da 100644 --- a/test/tap/unit-token-validate-cidr.js +++ b/test/tap/unit-token-validate-cidr.js @@ -1,7 +1,6 @@ 'use strict' const test = require('tap').test -const requireInject = require('require-inject') -const validateCIDRList = requireInject('../../lib/token.js', {'../../lib/npm.js': {}})._validateCIDRList +const validateCIDRList = require('../../lib/token.js')._validateCIDRList test('validateCIDRList', (t) => { t.plan(10) diff --git a/test/tap/unpack-foreign-tarball.js b/test/tap/unpack-foreign-tarball.js index d128e94d8c37d..b3a9026f84ab2 100644 --- a/test/tap/unpack-foreign-tarball.js +++ b/test/tap/unpack-foreign-tarball.js @@ -1,19 +1,18 @@ var fs = require('graceful-fs') var path = require('path') -var test = require('tap').test +var t = require('tap') var mkdirp = require('mkdirp') -var osenv = require('osenv') var rimraf = require('rimraf') var common = require('../common-tap.js') var fixtures = path.resolve(__dirname, '..', 'fixtures') -var pkg = path.resolve(__dirname, 'unpack-foreign-tarball') +var pkg = common.pkg var nm = path.resolve(pkg, 'node_modules') var target = path.resolve(nm, 'npm-test-gitignore') -var cache = path.resolve(pkg, 'cache') +var cache = common.cache var tmp = path.resolve(pkg, 'tmp') var EXEC_OPTS = { @@ -24,64 +23,62 @@ var EXEC_OPTS = { cwd: pkg } -function verify (t, files, err, code) { +function verify (t, files, code) { if (code) { - t.fail('exited with failure: ' + code) - return t.end() + return t.fail('exited with failure: ' + code) } var actual = fs.readdirSync(target).sort() var expect = files.concat(['.npmignore', 'package.json']).sort() t.same(actual, expect) - t.end() } -test('setup', function (t) { - setup() - t.comment('test for https://github.com/npm/npm/issues/5658') - t.end() -}) +t.comment('test for https://github.com/npm/npm/issues/5658') -test('npmignore only', function (t) { +t.test('npmignore only', function (t) { + t.test('setup', setup) var file = path.resolve(fixtures, 'npmignore.tgz') - common.npm(['install', file], EXEC_OPTS, verify.bind(null, t, ['foo'])) + return t.test('test', t => common.npm(['install', file], EXEC_OPTS) + .then(([code]) => verify(t, ['foo'], code))) }) -test('gitignore only', function (t) { - setup() +t.test('gitignore only', function (t) { + t.test('setup', setup) var file = path.resolve(fixtures, 'gitignore.tgz') - common.npm(['install', file], EXEC_OPTS, verify.bind(null, t, ['foo'])) + return t.test('test', t => common.npm(['install', file], EXEC_OPTS) + .then(([code]) => verify(t, ['foo'], code))) }) -test('gitignore and npmignore', function (t) { - setup() +t.test('gitignore and npmignore', function (t) { + t.test('setup', setup) var file = path.resolve(fixtures, 'gitignore-and-npmignore.tgz') - common.npm(['install', file], EXEC_OPTS, verify.bind(null, t, ['foo', 'bar'])) + return t.test('test', t => common.npm(['install', file], EXEC_OPTS) + .then(([code]) => verify(t, ['foo', 'bar'], code))) }) -test('gitignore and npmignore, not gzipped 1/2', function (t) { - setup() +t.test('gitignore and npmignore, not gzipped 1/2', function (t) { + t.test('setup', setup) var file = path.resolve(fixtures, 'gitignore-and-npmignore.tar') - common.npm(['install', file], EXEC_OPTS, verify.bind(null, t, ['foo', 'bar'])) + return t.test('test', t => common.npm(['install', file], EXEC_OPTS) + .then(([code]) => verify(t, ['foo', 'bar'], code))) }) -test('gitignore and npmignore, not gzipped 2/2', function (t) { - setup() +t.test('gitignore and npmignore, not gzipped 2/2', function (t) { + t.test('setup', setup) var file = path.resolve(fixtures, 'gitignore-and-npmignore-2.tar') - common.npm(['install', file], EXEC_OPTS, verify.bind(null, t, ['foo', 'bar'])) + return t.test('test', t => common.npm(['install', file], EXEC_OPTS) + .then(([code]) => verify(t, ['foo', 'bar'], code))) }) -test('cleanup', function (t) { - cleanup() +function setup (t) { + t.test('destroy', t => { + t.plan(2) + t.test('node_modules', t => rimraf(nm, t.end)) + t.test('tmp', t => rimraf(tmp, t.end)) + }) + t.test('create', t => { + mkdirp.sync(nm) + mkdirp.sync(tmp) + t.end() + }) t.end() -}) - -function setup () { - cleanup() - mkdirp.sync(nm) - mkdirp.sync(tmp) -} - -function cleanup () { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) } diff --git a/test/tap/unpublish-config.js b/test/tap/unpublish-config.js index f5d391d8c29b8..6d5c981d08d95 100644 --- a/test/tap/unpublish-config.js +++ b/test/tap/unpublish-config.js @@ -2,15 +2,11 @@ var fs = require('graceful-fs') var http = require('http') var path = require('path') -var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test -var pkg = path.join(__dirname, 'npm-test-unpublish-config') -var fixturePath = path.join(pkg, 'fixture_npmrc') - var common = require('../common-tap.js') +var pkg = common.pkg +var fixturePath = path.join(pkg, 'fixture_npmrc') var json = { name: 'npm-test-unpublish-config', @@ -19,15 +15,13 @@ var json = { } test('setup', function (t) { - mkdirp.sync(pkg) - fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json), 'utf8' ) fs.writeFileSync( fixturePath, - '//localhost:1337/:_authToken = beeeeeeeeeeeeef\n' + + '//localhost:' + common.port + '/:_authToken = beeeeeeeeeeeeef\n' + 'registry = http://lvh.me:4321/registry/path\n' ) @@ -64,7 +58,7 @@ test('cursory test of unpublishing with config', function (t) { HOME: process.env.HOME, Path: process.env.PATH, PATH: process.env.PATH, - USERPROFILE: osenv.home() + USERPROFILE: process.env.USERPROFILE } }, function (err, code) { @@ -74,9 +68,3 @@ test('cursory test of unpublishing with config', function (t) { ) }) }) - -test('cleanup', function (t) { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) - t.end() -}) diff --git a/test/tap/unsupported.js b/test/tap/unsupported.js index 7abc8da741441..2ebbd2d6542e2 100644 --- a/test/tap/unsupported.js +++ b/test/tap/unsupported.js @@ -21,8 +21,8 @@ var versions = [ ['v2.3.1', true, true], ['v3.0.0', true, true], ['v4.5.0', true, true], - ['v4.8.4', false, true], - ['v5.7.1', false, true], + ['v4.8.4', true, true], + ['v5.7.1', true, true], ['v6.8.1', false, false], ['v7.0.0-beta23', false, true], ['v7.2.3', false, true], @@ -30,7 +30,8 @@ var versions = [ ['v9.3.0', false, false], ['v10.0.0-0', false, false], ['v11.0.0-0', false, false], - ['v12.0.0-0', false, false] + ['v12.0.0-0', false, false], + ['v13.0.0-0', false, false] ] test('versions', function (t) { diff --git a/test/tap/update-examples.js b/test/tap/update-examples.js index 8369d002fab29..2217386de8d23 100644 --- a/test/tap/update-examples.js +++ b/test/tap/update-examples.js @@ -1,16 +1,11 @@ var common = require('../common-tap.js') var test = require('tap').test var mkdirp = require('mkdirp') -var rimraf = require('rimraf') -var path = require('path') var mr = require('npm-registry-mock') - -var osenv = require('osenv') - var requireInject = require('require-inject') -var PKG_DIR = path.resolve(__dirname, 'update-examples') -var CACHE_DIR = path.resolve(PKG_DIR, 'cache') +var PKG_DIR = common.pkg +var CACHE_DIR = common.cache // ** constant templates for mocks ** var DEFAULT_PKG = { @@ -65,7 +60,6 @@ var registryMocks = { } // ** dynamic mocks, cloned from templates and modified ** -var mockServer var mockDepJson = clone(DEP_PKG) var mockInstalled = clone(INSTALLED) var mockParentJson = clone(DEFAULT_PKG) @@ -84,10 +78,21 @@ function extend (a, b) { return a } +const path = require('path') +let cacheIteration = 0 +const isRoot = process.getuid && process.getuid() === 0 +const sudoUID = isRoot ? +process.env.SUDO_UID : null +const sudoGID = isRoot ? +process.env.SUDO_GID : null +const { chownSync } = require('fs') function resetPackage (options) { - rimraf.sync(CACHE_DIR) + CACHE_DIR = path.resolve(common.cache, '' + cacheIteration++) + npm.config.set('cache', CACHE_DIR) mkdirp.sync(CACHE_DIR) + if (isRoot && sudoUID && sudoGID) { + chownSync(CACHE_DIR, sudoUID, sudoGID) + } + installAskedFor = undefined mockParentJson = clone(DEFAULT_PKG) @@ -138,20 +143,18 @@ var npm = requireInject.installGlobally('../../lib/npm.js', { test('setup', function (t) { t.plan(5) - process.chdir(osenv.tmpdir()) - mkdirp.sync(PKG_DIR) process.chdir(PKG_DIR) t.pass('made ' + PKG_DIR) - resetPackage({}) - mr({ port: common.port, mocks: registryMocks }, function (er, server) { t.pass('mock registry active') - npm.load({ cache: CACHE_DIR, + npm.load({ + cache: CACHE_DIR, registry: common.registry, - cwd: PKG_DIR }, function (err) { + cwd: PKG_DIR + }, function (err) { t.ifError(err, 'started server') - mockServer = server + t.parent.teardown(() => server.close()) t.pass('npm.load complete') @@ -216,12 +219,3 @@ test('update old caret dependency with newer', function (t) { t.end() }) }) - -test('cleanup', function (t) { - mockServer.close() - - process.chdir(osenv.tmpdir()) - rimraf.sync(PKG_DIR) - - t.end() -}) diff --git a/test/tap/update-symlink.js b/test/tap/update-symlink.js new file mode 100644 index 0000000000000..eda07c56e1f69 --- /dev/null +++ b/test/tap/update-symlink.js @@ -0,0 +1,109 @@ +'use strict' +const path = require('path') +const test = require('tap').test +const mr = require('npm-registry-mock') +const Tacks = require('tacks') +const File = Tacks.File +const Symlink = Tacks.Symlink +const Dir = Tacks.Dir +const common = require('../common-tap.js') + +const basedir = common.pkg +const testdir = path.join(basedir, 'testdir') +const cachedir = common.cache +const globaldir = path.join(basedir, 'global') +const tmpdir = path.join(basedir, 'tmp') + +const conf = { + cwd: path.join(testdir, 'main'), + env: Object.assign({}, process.env, { + npm_config_cache: cachedir, + npm_config_tmp: tmpdir, + npm_config_prefix: globaldir, + npm_config_registry: common.registry, + npm_config_loglevel: 'warn' + }) +} + +let server +const fixture = new Tacks(Dir({ + cache: Dir(), + global: Dir(), + tmp: Dir(), + testdir: Dir({ + broken: Dir({ + 'package.json': File({ + name: 'broken', + version: '1.0.0' + }) + }), + main: Dir({ + node_modules: Dir({ + unbroken: Symlink('/testdir/unbroken') + }), + 'package-lock.json': File({ + name: 'main', + version: '1.0.0', + lockfileVersion: 1, + requires: true, + dependencies: { + broken: { + version: 'file:../broken' + }, + unbroken: { + version: 'file:../unbroken' + } + } + }), + 'package.json': File({ + name: 'main', + version: '1.0.0', + dependencies: { + broken: 'file:../broken', + unbroken: 'file:../unbroken' + } + }) + }), + unbroken: Dir({ + 'package.json': File({ + name: 'unbroken', + version: '1.0.0' + }) + }) + }) +})) + +function setup () { + cleanup() + fixture.create(basedir) +} + +function cleanup () { + fixture.remove(basedir) +} + +test('setup', function (t) { + setup() + mr({port: common.port, throwOnUnmatched: true}, function (err, s) { + if (err) throw err + server = s + t.done() + }) +}) + +test('update fixes broken links', function (t) { + common.npm(['update'], conf, function (err, code, stdout, stderr) { + if (err) throw err + t.is(code, 0, 'command ran ok') + t.comment(stdout.trim()) + t.comment(stderr.trim()) + t.match(stdout, '+ broken@1.0.0') + t.done() + }) +}) + +test('cleanup', function (t) { + server.close() + cleanup() + t.done() +}) diff --git a/test/tap/upgrade-lifecycles.js b/test/tap/upgrade-lifecycles.js index 0d0b8da616f67..0821cacba60cd 100644 --- a/test/tap/upgrade-lifecycles.js +++ b/test/tap/upgrade-lifecycles.js @@ -6,9 +6,9 @@ var File = Tacks.File var Dir = Tacks.Dir var common = require('../common-tap.js') -var basedir = path.join(__dirname, path.basename(__filename, '.js')) +var basedir = common.pkg var testdir = path.join(basedir, 'testdir') -var cachedir = path.join(basedir, 'cache') +var cachedir = common.cache var globaldir = path.join(basedir, 'global') var tmpdir = path.join(basedir, 'tmp') diff --git a/test/tap/url-dependencies.js b/test/tap/url-dependencies.js index 66b3e1a63b47c..14da5d1fc4fc7 100644 --- a/test/tap/url-dependencies.js +++ b/test/tap/url-dependencies.js @@ -1,16 +1,12 @@ var fs = require('graceful-fs') var path = require('path') -var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap') -var server -var pkg = path.resolve(__dirname, 'url-dependencies') +var pkg = common.pkg var json = { author: 'Steve Mason', @@ -27,87 +23,39 @@ var mockRoutes = { } } -test('setup', function (t) { - mr({ port: common.port, mocks: mockRoutes }, function (er, s) { - server = s - t.end() - }) -}) - -test('url-dependencies: download first time', function (t) { - setup() - - performInstall(t, function (output) { - if (!tarballWasFetched(output)) { - t.fail('Tarball was not fetched') - } else { - t.pass('Tarball was fetched') - } - t.end() - }) -}) - -test('url-dependencies: do not download subsequent times', function (t) { - setup() - - performInstall(t, function () { - performInstall(t, function (output) { - if (tarballWasFetched(output)) { - t.fail('Tarball was fetched second time around') - } else { - t.pass('Tarball was not fetched') - } - t.end() - }) - }) -}) - -test('cleanup', function (t) { - server.close() - cleanup() - t.end() +const tarballWasFetched = output => output.includes( + `GET 200 ${common.registry}/underscore/-/underscore-1.3.1.tgz`) + +const performInstall = () => common.npm(['install'], { + cwd: pkg, + env: { + npm_config_registry: common.registry, + npm_config_cache_lock_stale: 1000, + npm_config_cache_lock_wait: 1000, + npm_config_loglevel: 'http', + HOME: process.env.HOME, + Path: process.env.PATH, + PATH: process.env.PATH + } }) -function cleanup () { - // windows fix for locked files - process.chdir(osenv.tmpdir()) - rimraf.sync(path.resolve(pkg)) -} - -function setup () { - cleanup() - mkdirp.sync(pkg) +test('setup', function (t) { fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) ) -} - -function tarballWasFetched (output) { - return output.indexOf( - 'GET 200 ' + - common.registry + - '/underscore/-/underscore-1.3.1.tgz' - ) > -1 -} - -function performInstall (t, cb) { - var opts = { - cwd: pkg, - env: { - npm_config_registry: common.registry, - npm_config_cache_lock_stale: 1000, - npm_config_cache_lock_wait: 1000, - npm_config_loglevel: 'http', - HOME: process.env.HOME, - Path: process.env.PATH, - PATH: process.env.PATH - } - } - common.npm(['install'], opts, function (err, code, stdout, stderr) { - t.ifError(err, 'install success') - t.notOk(code, 'npm install exited with code 0') + mr({ port: common.port, mocks: mockRoutes }, function (er, s) { + t.parent.teardown(() => s.close()) + t.end() + }) +}) - cb(stderr) +test('url-dependencies: download first time', t => + performInstall().then(([code, _, output]) => { + t.equal(code, 0, 'exited successfully') + t.ok(tarballWasFetched(output), 'download first time') }) -} + .then(() => performInstall()).then(([code, _, output]) => { + t.equal(code, 0, 'exited successfully') + t.notOk(tarballWasFetched(output), 'do not download second time') + })) diff --git a/test/tap/utils.funding.js b/test/tap/utils.funding.js new file mode 100644 index 0000000000000..4276f3e43a5cf --- /dev/null +++ b/test/tap/utils.funding.js @@ -0,0 +1,657 @@ +'use strict' + +const { test } = require('tap') +const { retrieveFunding, getFundingInfo } = require('../../lib/utils/funding') + +test('empty tree', (t) => { + t.deepEqual( + getFundingInfo({}), + { + name: null, + dependencies: {}, + length: 0 + }, + 'should return empty list' + ) + t.end() +}) + +test('single item missing funding', (t) => { + t.deepEqual( + getFundingInfo({ name: 'project', + dependencies: { + 'single-item': { + name: 'single-item', + version: '1.0.0' + } + }}), + { + name: 'project', + dependencies: {}, + length: 0 + }, + 'should return empty list' + ) + t.end() +}) + +test('funding object missing url', (t) => { + t.deepEqual( + getFundingInfo({ name: 'project', + dependencies: { + 'single-item': { + name: 'single-item', + version: '1.0.0', + funding: { + type: 'Foo' + } + } + }}), + { + name: 'project', + dependencies: {}, + length: 0 + }, + 'should return empty list' + ) + t.end() +}) + +test('use path if name is missing', (t) => { + t.deepEqual( + getFundingInfo({ name: undefined, + path: '/tmp/foo', + children: { + 'single-item': { + name: 'single-item', + version: '1.0.0' + } + }}), + { + name: '/tmp/foo', + dependencies: {}, + length: 0 + }, + 'should use path as top level name' + ) + t.end() +}) + +test('single item tree', (t) => { + t.deepEqual( + getFundingInfo({ name: 'project', + dependencies: { + 'single-item': { + name: 'single-item', + version: '1.0.0', + funding: { + type: 'foo', + url: 'http://example.com' + } + } + }}), + { + name: 'project', + dependencies: { + 'single-item': { + version: '1.0.0', + funding: { + type: 'foo', + url: 'http://example.com' + } + } + }, + length: 1 + }, + 'should return list with a single item' + ) + t.end() +}) + +test('top-level funding info', (t) => { + t.deepEqual( + getFundingInfo({ name: 'project', + funding: 'http://example.com' + }), + { + name: 'project', + funding: { + url: 'http://example.com' + }, + dependencies: {}, + length: 0 + }, + 'should return top-level item with normalized funding info' + ) + t.end() +}) + +test('use string shorthand', (t) => { + t.deepEqual( + getFundingInfo({ name: 'project', + dependencies: { + 'single-item': { + name: 'single-item', + version: '1.0.0', + funding: 'http://example.com' + } + }}), + { + name: 'project', + dependencies: { + 'single-item': { + version: '1.0.0', + funding: { + url: 'http://example.com' + } + } + }, + length: 1 + }, + 'should return item with normalized funding info' + ) + t.end() +}) + +test('duplicate items along the tree', (t) => { + t.deepEqual( + getFundingInfo({ name: 'project', + version: '2.3.4', + dependencies: { + 'single-item': { + name: 'single-item', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + }, + dependencies: { + 'shared-top-first': { + name: 'shared-top-first', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + } + }, + 'sub-dep': { + name: 'sub-dep', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + }, + dependencies: { + 'shared-nested-first': { + name: 'shared-nested-first', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + }, + dependencies: { + 'shared-top-first': { + name: 'shared-top-first', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + } + } + } + } + } + }, + 'shared-nested-first': { + name: 'shared-nested-first', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + } + } + } + } + }}), + { + name: 'project', + version: '2.3.4', + dependencies: { + 'single-item': { + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + }, + dependencies: { + 'shared-top-first': { + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + } + }, + 'sub-dep': { + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + } + }, + 'shared-nested-first': { + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + } + } + } + } + }, + length: 4 + }, + 'should return list with a single item' + ) + t.end() +}) + +test('multi-level nested items tree', (t) => { + t.deepEqual( + getFundingInfo({ name: 'project', + dependencies: { + 'first-level-dep': { + name: 'first-level-dep', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + }, + dependencies: { + 'sub-dep': { + name: 'sub-dep', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + }, + dependencies: { + package: { + name: 'sub-sub-dep', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + }, + dependencies: {} + } + } + } + } + } + }}), + { + name: 'project', + dependencies: { + 'first-level-dep': { + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + }, + dependencies: { + 'sub-dep': { + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + }, + dependencies: { + 'sub-sub-dep': { + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + } + } + } + } + } + } + }, + length: 3 + }, + 'should return list with all items' + ) + t.end() +}) + +test('missing fund nested items tree', (t) => { + t.deepEqual( + getFundingInfo({ name: 'project', + dependencies: { + 'first-level-dep': { + name: 'first-level-dep', + version: '1.0.0', + funding: { + type: 'foo' + }, + dependencies: { + 'sub-dep': { + name: 'sub-dep', + version: '1.0.0', + dependencies: { + 'sub-sub-dep-01': { + name: 'sub-sub-dep-01', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + }, + dependencies: { + 'non-funding-child': { + name: 'non-funding-child', + version: '1.0.0', + dependencies: { + 'sub-sub-sub-dep': { + name: 'sub-sub-sub-dep', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + } + } + } + } + } + }, + 'sub-sub-dep-02': { + name: 'sub-sub-dep-02', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + }, + dependencies: {} + }, + 'sub-sub-dep-03': { + name: 'sub-sub-dep-03', + version: '1.0.0', + funding: { + type: 'foo', + url: 'git://example.git' + }, + dependencies: { + 'sub-sub-sub-dep-03': { + name: 'sub-sub-sub-dep-03', + version: '1.0.0', + dependencies: { + 'sub-sub-sub-sub-dep': { + name: 'sub-sub-sub-sub-dep', + version: '1.0.0', + funding: { + type: 'foo', + url: 'http://example.com' + } + } + } + } + } + } + } + } + } + } + }}), + { + name: 'project', + dependencies: { + 'sub-sub-dep-01': { + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + }, + dependencies: { + 'sub-sub-sub-dep': { + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + } + } + } + }, + 'sub-sub-dep-02': { + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + } + }, + 'sub-sub-sub-sub-dep': { + version: '1.0.0', + funding: { + type: 'foo', + url: 'http://example.com' + } + } + }, + length: 4 + }, + 'should return list excluding missing funding items' + ) + t.end() +}) + +test('countOnly option', (t) => { + t.deepEqual( + getFundingInfo({ name: 'project', + dependencies: { + 'first-level-dep': { + name: 'first-level-dep', + version: '1.0.0', + funding: { + type: 'foo' + }, + dependencies: { + 'sub-dep': { + name: 'sub-dep', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + }, + dependencies: { + 'sub-sub-dep': { + name: 'sub-sub-dep', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + } + }, + dependencies: {} + } + }, + 'sub-sub-dep': { + name: 'sub-sub-dep', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + } + } + } + } + }}, { countOnly: true }), + { + length: 2 + }, + 'should return only the length property' + ) + t.end() +}) + +test('handle different versions', (t) => { + t.deepEqual( + getFundingInfo({ name: 'project', + dependencies: { + foo: { + name: 'foo', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + }, + dependencies: { + bar: { + name: 'bar', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + } + } + } + }, + lorem: { + dependencies: { + fooo: { + name: 'foo', + version: '2.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + }, + dependencies: { + 'foo-bar': { + name: 'foo-bar', + version: '1.0.0', + funding: { + type: 'foo', + url: 'https://example.com' + } + } + } + } + } + } + } + }, { countOnly: true }), + { + length: 4 + }, + 'should treat different versions as diff packages' + ) + t.end() +}) + +test('retrieve funding info from valid objects', (t) => { + t.deepEqual( + retrieveFunding({ + url: 'http://example.com', + type: 'Foo' + }), + { + url: 'http://example.com', + type: 'Foo' + }, + 'should return standard object fields' + ) + t.deepEqual( + retrieveFunding({ + extra: 'Foo', + url: 'http://example.com', + type: 'Foo' + }), + { + extra: 'Foo', + url: 'http://example.com', + type: 'Foo' + }, + 'should leave untouched extra fields' + ) + t.deepEqual( + retrieveFunding({ + url: 'http://example.com' + }), + { + url: 'http://example.com' + }, + 'should accept url-only objects' + ) + t.end() +}) + +test('retrieve funding info from invalid objects', (t) => { + t.deepEqual( + retrieveFunding({}), + {}, + 'should passthrough empty objects' + ) + t.deepEqual( + retrieveFunding(), + undefined, + 'should not care about undefined' + ) + t.deepEqual( + retrieveFunding(), + null, + 'should not care about null' + ) + t.end() +}) + +test('retrieve funding info string shorthand', (t) => { + t.deepEqual( + retrieveFunding('http://example.com'), + { + url: 'http://example.com' + }, + 'should accept string shorthand' + ) + t.end() +}) + +test('retrieve funding info from an array', (t) => { + t.deepEqual( + retrieveFunding([ + 'http://example.com', + { + url: 'http://two.example.com' + }, + 'http://three.example.com', + { + url: 'http://three.example.com', + type: 'dos' + }, + { + url: 'http://three.example.com', + type: 'third copy!', + extra: 'extra metadata!' + } + ]), + [ + { + url: 'http://example.com' + }, + { + url: 'http://two.example.com' + }, + { + url: 'http://three.example.com' + }, + { + url: 'http://three.example.com', + type: 'dos' + }, + { + url: 'http://three.example.com', + type: 'third copy!', + extra: 'extra metadata!' + } + ], + 'should accept and normalize multiple funding sources' + ) + t.end() +}) diff --git a/test/tap/verify-no-lifecycle-on-repo.js b/test/tap/verify-no-lifecycle-on-repo.js index babdfb7dace23..c9232715a7079 100644 --- a/test/tap/verify-no-lifecycle-on-repo.js +++ b/test/tap/verify-no-lifecycle-on-repo.js @@ -5,9 +5,9 @@ var mkdirp = require('mkdirp') var rimraf = require('rimraf') var test = require('tap').test var requireInject = require('require-inject') -require('../common-tap.js') +const common = require('../common-tap.js') -var base = path.join(__dirname, path.basename(__filename, '.js')) +var base = common.pkg var baseJSON = { name: 'base', diff --git a/test/tap/version-allow-same-version.js b/test/tap/version-allow-same-version.js index 66f568dec9721..14506aac7d928 100644 --- a/test/tap/version-allow-same-version.js +++ b/test/tap/version-allow-same-version.js @@ -1,65 +1,70 @@ -var fs = require('graceful-fs') -var path = require('path') +const fs = require('graceful-fs') +const path = require('path') +const t = require('tap') +const common = require('../common-tap.js') +const npm = require('../../') +const pkg = common.pkg +const cache = common.cache +const npmrc = path.resolve(pkg, './.npmrc') +const configContents = 'sign-git-tag=false\n' -var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') -var test = require('tap').test - -var common = require('../common-tap.js') -var npm = require('../../') -var pkg = path.resolve(__dirname, 'version-allow-same-version') -var cache = path.resolve(pkg, 'cache') -var npmrc = path.resolve(pkg, './.npmrc') -var configContents = 'sign-git-tag=false\n' - -test('npm version <semver> with same version without --allow-same-version', function (t) { - setup() +t.test('setup', t => { + process.chdir(pkg) + fs.writeFileSync(npmrc, configContents, 'ascii') fs.writeFileSync(path.resolve(pkg, 'package.json'), JSON.stringify({ author: 'Lucas Theisen', name: 'version-allow-same-version', version: '0.0.1', description: 'Test for npm version without --allow-same-version' }), 'utf8') - npm.load({cache: cache, 'allow-same-version': false, registry: common.registry}, function () { - var version = require('../../lib/version') - version(['0.0.1'], function (err) { - t.ok(err) - t.like(err.message, /Version not changed/) - t.end() - }) - }) + npm.load({cache: cache, 'allow-same-version': false, registry: common.registry}, t.end) }) -test('npm version <semver> with same version with --allow-same-version', function (t) { - setup() - fs.writeFileSync(path.resolve(pkg, 'package.json'), JSON.stringify({ - author: 'Lucas Theisen', - name: 'version-allow-same-version', - version: '0.0.1', - description: 'Test for npm version without --allow-same-version' - }), 'utf8') - npm.load({cache: cache, 'allow-same-version': true, registry: common.registry}, function () { - var version = require('../../lib/version') - version(['0.0.1'], - function (err) { t.ok(!err) }, - function () { t.end() }) + +t.test('without --allow-same-version', t => { + npm.config.set('allow-same-version', false) + + const version = require('../../lib/version') + + const commit1 = version.buildCommitArgs() + const commit2 = version.buildCommitArgs([ 'commit' ]) + const commit3 = version.buildCommitArgs([ 'commit', '-m', 'some commit message' ]) + + t.same(commit1, [ 'commit' ]) + t.same(commit2, [ 'commit' ]) + t.same(commit3, [ 'commit', '-m', 'some commit message' ]) + + const tag = version.buildTagFlags() + + t.same(tag, '-m') + + npm.commands.version(['0.0.1'], function (err) { + t.isa(err, Error, 'got an error') + t.like(err.message, /Version not changed/) + t.end() }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) -function cleanup () { - // windows fix for locked files - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} +t.test('with --allow-same-version', t => { + npm.config.set('allow-same-version', true) -function setup () { - mkdirp.sync(pkg) - mkdirp.sync(path.join(pkg, 'node_modules')) - mkdirp.sync(cache) - fs.writeFileSync(npmrc, configContents, 'ascii') - process.chdir(pkg) -} + const version = require('../../lib/version') + + const commit1 = version.buildCommitArgs() + const commit2 = version.buildCommitArgs([ 'commit' ]) + const commit3 = version.buildCommitArgs([ 'commit', '-m', 'some commit message' ]) + + t.same(commit1, [ 'commit', '--allow-empty' ]) + t.same(commit2, [ 'commit', '--allow-empty' ]) + t.same(commit3, [ 'commit', '--allow-empty', '-m', 'some commit message' ]) + + const tag = version.buildTagFlags() + + t.same(tag, '-fm') + + npm.commands.version(['0.0.1'], function (err) { + if (err) { + throw err + } + t.end() + }) +}) diff --git a/test/tap/version-commit-hooks.js b/test/tap/version-commit-hooks.js index 4791fc3f3c4d2..36694d7eae959 100644 --- a/test/tap/version-commit-hooks.js +++ b/test/tap/version-commit-hooks.js @@ -1,9 +1,7 @@ -var fs = require('graceful-fs') -var path = require('path') -var osenv = require('osenv') -var mkdirp = require('mkdirp') -var rimraf = require('rimraf') -var pkg = path.resolve(__dirname, 'version-commit-hooks') +const fs = require('graceful-fs') +const path = require('path') +const common = require('../common-tap.js') +const pkg = common.pkg var test = require('tap').test var npm = require('../../') @@ -11,7 +9,6 @@ var npm = require('../../') delete process.env['npm_config_commit_hooks'] test('npm version <semver> with commit-hooks disabled in .npmrc', function (t) { - mkdirp.sync(pkg) var npmrc = path.resolve(pkg, '.npmrc') fs.writeFileSync(npmrc, 'commit-hooks=false\n', 'ascii') process.chdir(pkg) @@ -36,7 +33,7 @@ test('npm version <semver> with commit-hooks disabled', function (t) { t.same(args1, [ 'commit', '-n' ]) t.same(args2, [ 'commit', '-n' ]) - t.same(args3, [ 'commit', '-m', 'some commit message', '-n' ]) + t.same(args3, [ 'commit', '-n', '-m', 'some commit message' ]) t.end() }) }) @@ -56,9 +53,3 @@ test('npm version <semver> with commit-hooks enabled (default)', function (t) { t.end() }) }) - -test('cleanup', function (t) { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) - t.end() -}) diff --git a/test/tap/version-consistent-newlines.js b/test/tap/version-consistent-newlines.js index 11020b2a7004e..583874db7a305 100644 --- a/test/tap/version-consistent-newlines.js +++ b/test/tap/version-consistent-newlines.js @@ -3,15 +3,13 @@ const common = require('../common-tap.js') const test = require('tap').test const npm = require('../../') -const osenv = require('osenv') const path = require('path') const fs = require('fs') const mkdirp = require('mkdirp') -const rimraf = require('rimraf') const requireInject = require('require-inject') -const pkg = path.resolve(__dirname, 'version-no-git') -const cache = path.resolve(pkg, 'cache') +const pkg = common.pkg +const cache = common.cache const gitDir = path.resolve(pkg, '.git') test('npm version does not alter the line endings in package.json (LF)', function (t) { @@ -66,16 +64,7 @@ test('npm version does not alter the line endings in package.json (CRLF)', funct }) }) -test('cleanup', function (t) { - process.chdir(osenv.tmpdir()) - - rimraf.sync(pkg) - t.end() -}) - function setup (lineEnding) { - mkdirp.sync(pkg) - mkdirp.sync(cache) mkdirp.sync(gitDir) fs.writeFileSync( path.resolve(pkg, 'package.json'), diff --git a/test/tap/version-from-git.js b/test/tap/version-from-git.js index 1dc649beb4212..e63865a73378a 100644 --- a/test/tap/version-from-git.js +++ b/test/tap/version-from-git.js @@ -3,15 +3,14 @@ var fs = require('fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') var rimraf = require('rimraf') var test = require('tap').test var npm = require('../../lib/npm.js') -var pkg = path.resolve(__dirname, 'version-from-git') +var pkg = common.pkg var packagePath = path.resolve(pkg, 'package.json') -var cache = path.resolve(pkg, 'cache') +var cache = common.cache var json = { name: 'cat', version: '0.1.2' } @@ -187,20 +186,10 @@ test('npm version from-git without any versions', function (t) { } }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - // windows fix for locked files - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} - function setup () { - cleanup() - mkdirp.sync(cache) + process.chdir(__dirname) + rimraf.sync(pkg) + mkdirp.sync(pkg) process.chdir(pkg) fs.writeFileSync(packagePath, JSON.stringify(json), 'utf8') } diff --git a/test/tap/version-git-not-clean.js b/test/tap/version-git-not-clean.js index 43e2549ceb280..486e2e0766181 100644 --- a/test/tap/version-git-not-clean.js +++ b/test/tap/version-git-not-clean.js @@ -1,19 +1,15 @@ var common = require('../common-tap.js') var test = require('tap').test var npm = require('../../') -var osenv = require('osenv') -var path = require('path') var fs = require('fs') -var rimraf = require('rimraf') -var mkdirp = require('mkdirp') var which = require('which') var spawn = require('child_process').spawn -var pkg = path.resolve(__dirname, 'version-git-not-clean') -var cache = path.resolve(pkg, 'cache') +var pkg = common.pkg +var cache = common.cache test('npm version <semver> with working directory not clean', function (t) { - setup() + process.chdir(pkg) npm.load({ cache: cache, registry: common.registry, prefix: pkg }, function () { which('git', function (err, git) { t.ifError(err, 'git found') @@ -81,17 +77,3 @@ test('npm version <semver> --force with working directory not clean', function ( t.end() }) }) - -test('cleanup', function (t) { - // windows fix for locked files - process.chdir(osenv.tmpdir()) - - rimraf.sync(pkg) - t.end() -}) - -function setup () { - mkdirp.sync(pkg) - mkdirp.sync(cache) - process.chdir(pkg) -} diff --git a/test/tap/version-lifecycle.js b/test/tap/version-lifecycle.js index e7a7793b44507..590ae86aa4b02 100644 --- a/test/tap/version-lifecycle.js +++ b/test/tap/version-lifecycle.js @@ -2,14 +2,13 @@ var fs = require('graceful-fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap.js') var npm = require('../../') -var pkg = path.resolve(__dirname, 'version-lifecycle') -var cache = path.resolve(pkg, 'cache') +var pkg = common.pkg +var cache = common.cache var npmrc = path.resolve(pkg, './.npmrc') var configContents = 'sign-git-commit=false\nsign-git-tag=false\n' @@ -145,16 +144,11 @@ test('npm version <semver> execution order', function (t) { }) }) -test('cleanup', function (t) { - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) - t.end() -}) - function setup () { + process.chdir(__dirname) + rimraf.sync(pkg) mkdirp.sync(pkg) mkdirp.sync(path.join(pkg, 'node_modules')) - mkdirp.sync(cache) fs.writeFileSync(npmrc, configContents, 'ascii') process.chdir(pkg) } diff --git a/test/tap/version-message-config.js b/test/tap/version-message-config.js index 12cb6eb59912d..94e9e951e776f 100644 --- a/test/tap/version-message-config.js +++ b/test/tap/version-message-config.js @@ -2,15 +2,11 @@ var common = require('../common-tap.js') var fs = require('fs') var path = require('path') -var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var npm = require('../../lib/npm.js') -var pkg = path.resolve(__dirname, 'version-message-config') -var cache = path.resolve(pkg, 'cache') +var pkg = common.pkg var npmrc = path.resolve(pkg, '.npmrc') var packagePath = path.resolve(pkg, 'package.json') @@ -54,21 +50,7 @@ test('npm version <semver> with message config', function (t) { }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - // windows fix for locked files - process.chdir(osenv.tmpdir()) - - rimraf.sync(pkg) -} - function setup () { - cleanup() - mkdirp.sync(cache) process.chdir(pkg) fs.writeFileSync(packagePath, JSON.stringify(json), 'utf8') diff --git a/test/tap/version-no-git.js b/test/tap/version-no-git.js index 0a859c7af2170..cea8b55ddcd8c 100644 --- a/test/tap/version-no-git.js +++ b/test/tap/version-no-git.js @@ -1,15 +1,13 @@ var common = require('../common-tap.js') var test = require('tap').test var npm = require('../../') -var osenv = require('osenv') var path = require('path') var fs = require('fs') var mkdirp = require('mkdirp') -var rimraf = require('rimraf') var requireInject = require('require-inject') -var pkg = path.resolve(__dirname, 'version-no-git') -var cache = path.resolve(pkg, 'cache') +var pkg = common.pkg +var cache = common.cache var gitDir = path.resolve(pkg, '.git') test('npm version <semver> in a git repo without the git binary', function (t) { @@ -33,16 +31,7 @@ test('npm version <semver> in a git repo without the git binary', function (t) { }) }) -test('cleanup', function (t) { - process.chdir(osenv.tmpdir()) - - rimraf.sync(pkg) - t.end() -}) - function setup () { - mkdirp.sync(pkg) - mkdirp.sync(cache) mkdirp.sync(gitDir) fs.writeFileSync(path.resolve(pkg, 'package.json'), JSON.stringify({ author: 'Terin Stock', diff --git a/test/tap/version-no-package.js b/test/tap/version-no-package.js index e4eeda0eebda0..60c0e3be7adb9 100644 --- a/test/tap/version-no-package.js +++ b/test/tap/version-no-package.js @@ -1,22 +1,13 @@ var common = require('../common-tap.js') var test = require('tap').test -var osenv = require('osenv') -var path = require('path') -var mkdirp = require('mkdirp') -var rimraf = require('rimraf') -var pkg = path.resolve(__dirname, 'version-no-package') - -test('setup', function (t) { - setup() - t.end() -}) +var pkg = common.pkg test('npm version in a prefix with no package.json', function (t) { - setup() + process.chdir(pkg) common.npm( ['version', '--json', '--prefix', pkg], - { cwd: pkg }, + { cwd: pkg, nodeExecPath: process.execPath }, function (er, code, stdout, stderr) { t.ifError(er, "npm version doesn't care that there's no package.json") t.notOk(code, 'npm version ran without barfing') @@ -30,15 +21,3 @@ test('npm version in a prefix with no package.json', function (t) { } ) }) - -test('cleanup', function (t) { - process.chdir(osenv.tmpdir()) - - rimraf.sync(pkg) - t.end() -}) - -function setup () { - mkdirp.sync(pkg) - process.chdir(pkg) -} diff --git a/test/tap/version-no-tags.js b/test/tap/version-no-tags.js index 755e640c0b31e..c2c11d875b55a 100644 --- a/test/tap/version-no-tags.js +++ b/test/tap/version-no-tags.js @@ -1,16 +1,13 @@ var common = require('../common-tap.js') var test = require('tap').test var npm = require('../../') -var osenv = require('osenv') var path = require('path') var fs = require('fs') -var rimraf = require('rimraf') -var mkdirp = require('mkdirp') var which = require('which') var spawn = require('child_process').spawn -var pkg = path.resolve(__dirname, 'version-no-tags') -var cache = path.resolve(pkg, 'cache') +var pkg = common.pkg +var cache = common.cache test('npm version <semver> without git tag', function (t) { setup() @@ -50,17 +47,7 @@ test('npm version <semver> without git tag', function (t) { }) }) -test('cleanup', function (t) { - // windows fix for locked files - process.chdir(osenv.tmpdir()) - - rimraf.sync(pkg) - t.end() -}) - function setup () { - mkdirp.sync(pkg) - mkdirp.sync(cache) fs.writeFileSync(path.resolve(pkg, 'package.json'), JSON.stringify({ author: 'Evan Lucas', name: 'version-no-tags-test', diff --git a/test/tap/version-prerelease-id.js b/test/tap/version-prerelease-id.js index 1a206aa116649..0e248423cd052 100644 --- a/test/tap/version-prerelease-id.js +++ b/test/tap/version-prerelease-id.js @@ -1,24 +1,15 @@ var fs = require('fs') var path = require('path') -var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var npm = require('../../') var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'version-shrinkwrap') -var cache = path.resolve(pkg, 'cache') +var pkg = common.pkg var EXEC_OPTS = { cwd: pkg } -test('setup', function (t) { - setup() - t.end() -}) - test('npm version --preid=rc uses prerelease id', function (t) { setup() @@ -32,15 +23,7 @@ test('npm version --preid=rc uses prerelease id', function (t) { }) }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - function setup () { - cleanup() - mkdirp.sync(pkg) - mkdirp.sync(cache) var contents = { author: 'Daniel Wilches', name: 'version-prerelease-id', @@ -52,10 +35,3 @@ function setup () { fs.writeFileSync(path.resolve(pkg, 'npm-shrinkwrap.json'), JSON.stringify(contents), 'utf8') process.chdir(pkg) } - -function cleanup () { - // windows fix for locked files - process.chdir(osenv.tmpdir()) - rimraf.sync(cache) - rimraf.sync(pkg) -} diff --git a/test/tap/version-sub-directory-shrinkwrap.js b/test/tap/version-sub-directory-shrinkwrap.js index 0455b62ab7536..5f2d688f42ba5 100644 --- a/test/tap/version-sub-directory-shrinkwrap.js +++ b/test/tap/version-sub-directory-shrinkwrap.js @@ -3,17 +3,15 @@ var fs = require('fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var npm = require('../../lib/npm.js') -var pkg = path.resolve(__dirname, 'version-sub-directory') +var pkg = common.pkg var subDirectory = path.resolve(pkg, 'sub-directory') var packagePath = path.resolve(pkg, 'package.json') var shrinkwrapPath = path.resolve(pkg, 'npm-shrinkwrap.json') -var cache = path.resolve(pkg, 'cache') +var cache = common.cache var json = { name: 'cat', version: '0.1.2' } @@ -59,20 +57,7 @@ test('npm version <semver> from a subdirectory', function (t) { } }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - // windows fix for locked files - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} - function setup () { - cleanup() - mkdirp.sync(cache) mkdirp.sync(subDirectory) process.chdir(subDirectory) fs.writeFileSync(packagePath, JSON.stringify(json), 'utf8') diff --git a/test/tap/version-sub-directory.js b/test/tap/version-sub-directory.js index 71c96121ca7d7..fc4a41f36e583 100644 --- a/test/tap/version-sub-directory.js +++ b/test/tap/version-sub-directory.js @@ -3,21 +3,21 @@ var fs = require('fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') -var rimraf = require('rimraf') var test = require('tap').test var npm = require('../../lib/npm.js') -var pkg = path.resolve(__dirname, 'version-sub-directory') +var pkg = common.pkg var subDirectory = path.resolve(pkg, 'sub-directory') var packagePath = path.resolve(pkg, 'package.json') -var cache = path.resolve(pkg, 'cache') +var cache = common.cache var json = { name: 'cat', version: '0.1.2' } test('npm version <semver> from a subdirectory', function (t) { - setup() + mkdirp.sync(subDirectory) + process.chdir(subDirectory) + fs.writeFileSync(packagePath, JSON.stringify(json), 'utf8') npmLoad() function npmLoad () { @@ -54,22 +54,3 @@ test('npm version <semver> from a subdirectory', function (t) { t.end() } }) - -test('cleanup', function (t) { - cleanup() - t.end() -}) - -function cleanup () { - // windows fix for locked files - process.chdir(osenv.tmpdir()) - rimraf.sync(pkg) -} - -function setup () { - cleanup() - mkdirp.sync(cache) - mkdirp.sync(subDirectory) - process.chdir(subDirectory) - fs.writeFileSync(packagePath, JSON.stringify(json), 'utf8') -} diff --git a/test/tap/version-update-shrinkwrap.js b/test/tap/version-update-shrinkwrap.js index 58264e9926c03..e6ba5ee6182ba 100644 --- a/test/tap/version-update-shrinkwrap.js +++ b/test/tap/version-update-shrinkwrap.js @@ -2,19 +2,18 @@ var fs = require('fs') var path = require('path') var mkdirp = require('mkdirp') -var osenv = require('osenv') var rimraf = require('rimraf') var test = require('tap').test var npm = require('../../') var common = require('../common-tap.js') -var pkg = path.resolve(__dirname, 'version-shrinkwrap') -var cache = path.resolve(pkg, 'cache') +var pkg = common.pkg +var cache = common.cache test('npm version <semver> updates shrinkwrap - no git', function (t) { setup() - npm.load({ cache: pkg + '/cache', registry: common.registry }, function () { + npm.load({ cache: cache, registry: common.registry }, function () { npm.commands.version(['patch'], function (err) { if (err) return t.fail('Error perform version patch') var shrinkwrap = require(path.resolve(pkg, 'npm-shrinkwrap.json')) @@ -111,15 +110,10 @@ test('npm version <semver> updates shrinkwrap and updates git', function (t) { } }) -test('cleanup', function (t) { - cleanup() - t.end() -}) - function setup () { - cleanup() + process.chdir(__dirname) + rimraf.sync(pkg) mkdirp.sync(pkg) - mkdirp.sync(cache) var contents = { author: 'Nathan Bowser && Faiq Raza', name: 'version-with-shrinkwrap-test', @@ -131,10 +125,3 @@ function setup () { fs.writeFileSync(path.resolve(pkg, 'npm-shrinkwrap.json'), JSON.stringify(contents), 'utf8') process.chdir(pkg) } - -function cleanup () { - // windows fix for locked files - process.chdir(osenv.tmpdir()) - rimraf.sync(cache) - rimraf.sync(pkg) -} diff --git a/test/tap/view.js b/test/tap/view.js index 30ccdb471cf58..71d21487ae99c 100644 --- a/test/tap/view.js +++ b/test/tap/view.js @@ -1,10 +1,14 @@ var common = require('../common-tap.js') -var test = require('tap').test +const t = require('tap') +var test = t.test var osenv = require('osenv') var path = require('path') var fs = require('fs') var rimraf = require('rimraf') var mkdirp = require('mkdirp') + +// this test has to use a tmpdir so that it's outside of +// the current package context of npm. var tmp = osenv.tmpdir() var t1dir = path.resolve(tmp, 'view-local-no-pkg') var t2dir = path.resolve(tmp, 'view-local-notmine') @@ -13,6 +17,15 @@ var mr = require('npm-registry-mock') var server +t.teardown(() => { + rimraf.sync(t1dir) + rimraf.sync(t2dir) + rimraf.sync(t3dir) + if (server) { + server.close() + } +}) + test('setup', function (t) { mkdirp.sync(t1dir) mkdirp.sync(t2dir) @@ -46,7 +59,6 @@ function plugin (server) { } test('npm view . in global mode', function (t) { - process.chdir(t1dir) common.npm([ 'view', '.', @@ -61,7 +73,6 @@ test('npm view . in global mode', function (t) { }) test('npm view --global', function (t) { - process.chdir(t1dir) common.npm([ 'view', '--registry=' + common.registry, @@ -75,7 +86,6 @@ test('npm view --global', function (t) { }) test('npm view . with no package.json', function (t) { - process.chdir(t1dir) common.npm([ 'view', '.', @@ -89,7 +99,6 @@ test('npm view . with no package.json', function (t) { }) test('npm view . with no published package', function (t) { - process.chdir(t3dir) common.npm([ 'view', '.', @@ -97,13 +106,12 @@ test('npm view . with no published package', function (t) { ], { cwd: t3dir }, function (err, code, stdout, stderr) { t.ifError(err, 'view command finished successfully') t.equal(code, 1, 'exit not ok') - t.similar(stderr, /version not found/m) + t.similar(stderr, /not in the npm registry/m) t.end() }) }) test('npm view .', function (t) { - process.chdir(t2dir) common.npm([ 'view', '.', @@ -117,7 +125,6 @@ test('npm view .', function (t) { }) test('npm view . select fields', function (t) { - process.chdir(t2dir) common.npm([ 'view', '.', @@ -132,7 +139,6 @@ test('npm view . select fields', function (t) { }) test('npm view .@<version>', function (t) { - process.chdir(t2dir) common.npm([ 'view', '.@0.0.0', @@ -147,7 +153,6 @@ test('npm view .@<version>', function (t) { }) test('npm view .@<version> version --json', function (t) { - process.chdir(t2dir) common.npm([ 'view', '.@0.0.0', @@ -163,7 +168,6 @@ test('npm view .@<version> version --json', function (t) { }) test('npm view . --json author name version', function (t) { - process.chdir(t2dir) common.npm([ 'view', '.', @@ -186,7 +190,6 @@ test('npm view . --json author name version', function (t) { }) test('npm view .@<version> --json author name version', function (t) { - process.chdir(t2dir) common.npm([ 'view', '.@0.0.0', @@ -376,13 +379,3 @@ test('npm view with valid but non existent package name', function (t) { t.end() }) }) - -test('cleanup', function (t) { - process.chdir(osenv.tmpdir()) - rimraf.sync(t1dir) - rimraf.sync(t2dir) - rimraf.sync(t3dir) - t.pass('cleaned up') - server.close() - t.end() -}) diff --git a/test/tap/whoami.js b/test/tap/whoami.js new file mode 100644 index 0000000000000..9f4bf4266b74c --- /dev/null +++ b/test/tap/whoami.js @@ -0,0 +1,76 @@ +var common = require('../common-tap.js') + +var fs = require('fs') +var path = require('path') +var createServer = require('http').createServer + +var test = require('tap').test +var rimraf = require('rimraf') + +var opts = { cwd: __dirname } + +var FIXTURE_PATH = path.resolve(common.pkg, 'fixture_npmrc') + +test('npm whoami with basic auth', function (t) { + var s = '//registry.lvh.me/:username = wombat\n' + + '//registry.lvh.me/:_password = YmFkIHBhc3N3b3Jk\n' + + '//registry.lvh.me/:email = lindsay@wdu.org.au\n' + fs.writeFileSync(FIXTURE_PATH, s, 'ascii') + fs.chmodSync(FIXTURE_PATH, 0o644) + + common.npm( + [ + 'whoami', + '--userconfig=' + FIXTURE_PATH, + '--registry=http://registry.lvh.me/' + ], + opts, + function (err, code, stdout, stderr) { + t.ifError(err) + + t.equal(stderr, '', 'got nothing on stderr') + t.equal(code, 0, 'exit ok') + t.equal(stdout, 'wombat\n', 'got username') + t.end() + } + ) +}) + +test('npm whoami with bearer auth', { timeout: 6000 }, function (t) { + var s = '//localhost:' + common.port + + '/:_authToken = wombat-developers-union\n' + fs.writeFileSync(FIXTURE_PATH, s, 'ascii') + fs.chmodSync(FIXTURE_PATH, 0o644) + + function verify (req, res) { + t.equal(req.method, 'GET') + t.equal(req.url, '/-/whoami') + + res.setHeader('content-type', 'application/json') + res.writeHead(200) + res.end(JSON.stringify({ username: 'wombat' }), 'utf8') + } + + var server = createServer(verify) + + server.listen(common.port, function () { + common.npm( + [ + 'whoami', + '--userconfig=' + FIXTURE_PATH, + '--registry=http://localhost:' + common.port + '/' + ], + opts, + function (err, code, stdout, stderr) { + t.ifError(err) + + t.equal(stderr, '', 'got nothing on stderr') + t.equal(code, 0, 'exit ok') + t.equal(stdout, 'wombat\n', 'got username') + rimraf.sync(FIXTURE_PATH) + server.close() + t.end() + } + ) + }) +}) diff --git a/test/tap/zz-cleanup.js b/test/tap/zz-cleanup.js deleted file mode 100644 index e1020aa3b11b5..0000000000000 --- a/test/tap/zz-cleanup.js +++ /dev/null @@ -1,8 +0,0 @@ -var common = require('../common-tap') -var test = require('tap').test -var rimraf = require('rimraf') - -test('cleanup', function (t) { - rimraf.sync(common.npm_config_cache) - t.end() -}) diff --git a/test/util/mock-tarball.js b/test/util/mock-tarball.js new file mode 100644 index 0000000000000..f62eaf4e423d0 --- /dev/null +++ b/test/util/mock-tarball.js @@ -0,0 +1,47 @@ +'use strict' + +const BB = require('bluebird') + +const getStream = require('get-stream') +const pipeline = require('mississippi').pipeline +const tar = require('tar-stream') +const zlib = require('zlib') + +module.exports = makeTarball +function makeTarball (files, opts) { + opts = opts || {} + const pack = tar.pack() + Object.keys(files).forEach(function (filename) { + const entry = files[filename] + pack.entry({ + name: (opts.noPrefix ? '' : 'package/') + filename, + type: entry.type, + size: entry.size, + mode: entry.mode, + mtime: entry.mtime || new Date(0), + linkname: entry.linkname, + uid: entry.uid, + gid: entry.gid, + uname: entry.uname, + gname: entry.gname + }, typeof files[filename] === 'string' + ? files[filename] + : files[filename].data) + }) + pack.finalize() + return BB.try(() => { + if (opts.stream && opts.gzip) { + return pipeline(pack, zlib.createGzip()) + } else if (opts.stream) { + return pack + } else { + return getStream.buffer(pack).then(ret => { + if (opts.gzip) { + return BB.fromNode(cb => zlib.gzip(ret, cb)) + } else { + return ret + } + }) + } + }) +}